text stringlengths 1 1.05M |
|---|
package io.dronefleet.mavlink.slugs;
import io.dronefleet.mavlink.annotations.MavlinkFieldInfo;
import io.dronefleet.mavlink.annotations.MavlinkMessageBuilder;
import io.dronefleet.mavlink.annotations.MavlinkMessageInfo;
import java.lang.Object;
import java.lang.Override;
import java.lang.String;
import java.util.Objects;
/**
* Control for camara.
*/
@MavlinkMessageInfo(
id = 188,
crc = 5,
description = "Control for camara."
)
public final class SlugsConfigurationCamera {
private final int target;
private final int idorder;
private final int order;
private SlugsConfigurationCamera(int target, int idorder, int order) {
this.target = target;
this.idorder = idorder;
this.order = order;
}
/**
* Returns a builder instance for this message.
*/
@MavlinkMessageBuilder
public static Builder builder() {
return new Builder();
}
/**
* The system setting the commands
*/
@MavlinkFieldInfo(
position = 1,
unitSize = 1,
description = "The system setting the commands"
)
public final int target() {
return this.target;
}
/**
* ID 0: brightness 1: aperture 2: iris 3: ICR 4: backlight
*/
@MavlinkFieldInfo(
position = 2,
unitSize = 1,
description = "ID 0: brightness 1: aperture 2: iris 3: ICR 4: backlight"
)
public final int idorder() {
return this.idorder;
}
/**
* 1: up/on 2: down/off 3: auto/reset/no action
*/
@MavlinkFieldInfo(
position = 3,
unitSize = 1,
description = "1: up/on 2: down/off 3: auto/reset/no action"
)
public final int order() {
return this.order;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !getClass().equals(o.getClass())) return false;
SlugsConfigurationCamera other = (SlugsConfigurationCamera)o;
if (!Objects.deepEquals(target, other.target)) return false;
if (!Objects.deepEquals(idorder, other.idorder)) return false;
if (!Objects.deepEquals(order, other.order)) return false;
return true;
}
@Override
public int hashCode() {
int result = 0;
result = 31 * result + Objects.hashCode(target);
result = 31 * result + Objects.hashCode(idorder);
result = 31 * result + Objects.hashCode(order);
return result;
}
@Override
public String toString() {
return "SlugsConfigurationCamera{target=" + target
+ ", idorder=" + idorder
+ ", order=" + order + "}";
}
public static final class Builder {
private int target;
private int idorder;
private int order;
/**
* The system setting the commands
*/
@MavlinkFieldInfo(
position = 1,
unitSize = 1,
description = "The system setting the commands"
)
public final Builder target(int target) {
this.target = target;
return this;
}
/**
* ID 0: brightness 1: aperture 2: iris 3: ICR 4: backlight
*/
@MavlinkFieldInfo(
position = 2,
unitSize = 1,
description = "ID 0: brightness 1: aperture 2: iris 3: ICR 4: backlight"
)
public final Builder idorder(int idorder) {
this.idorder = idorder;
return this;
}
/**
* 1: up/on 2: down/off 3: auto/reset/no action
*/
@MavlinkFieldInfo(
position = 3,
unitSize = 1,
description = "1: up/on 2: down/off 3: auto/reset/no action"
)
public final Builder order(int order) {
this.order = order;
return this;
}
public final SlugsConfigurationCamera build() {
return new SlugsConfigurationCamera(target, idorder, order);
}
}
}
|
<filename>src/stratification/loader.ts
/* *****************************************************************************
* Caleydo - Visualization for Molecular Biology - http://caleydo.org
* Copyright (c) The Caleydo Team. All rights reserved.
* Licensed under the new BSD license, available at http://caleydo.org/license
**************************************************************************** */
/**
* Created by <NAME> on 04.08.2014.
*/
import {AppContext} from '../app/AppContext';
import {ParseRangeUtils, Range1DGroup, Range, CompositeRange1D} from '../range';
import {IStratificationDataDescription} from './IStratification';
import {IDTypeManager} from '../idtype';
export interface ILoadedStratification {
readonly rowIds: Range;
readonly rows: string[];
readonly range: CompositeRange1D;
}
export interface IStratificationLoader {
(desc: IStratificationDataDescription): Promise<ILoadedStratification>;
}
function createRangeFromGroups(name: string, groups: any[]) {
return CompositeRange1D.composite(name, groups.map((g) => {
return new Range1DGroup(g.name, g.color || 'gray', ParseRangeUtils.parseRangeLike(g.range).dim(0));
}));
}
export class StratificationLoaderUtils {
static viaAPILoader(): IStratificationLoader {
let _data: Promise<ILoadedStratification> = undefined;
return (desc) => {
if (!_data) { //in the cache
_data = AppContext.getInstance().getAPIJSON('/dataset/' + desc.id).then((data) => {
const idType = IDTypeManager.getInstance().resolveIdType(desc.idtype);
const rowIds = ParseRangeUtils.parseRangeLike(data.rowIds);
idType.fillMapCache(rowIds.dim(0).asList(data.rows.length), data.rows);
return {
rowIds,
rows: data.rows,
range: createRangeFromGroups(desc.name, data.groups)
};
});
}
return _data;
};
}
static viaDataLoader(rows: string[], rowIds: number[], range: CompositeRange1D): IStratificationLoader {
let _data: Promise<ILoadedStratification> = undefined;
return () => {
if (!_data) { //in the cache
_data = Promise.resolve({
rowIds: Range.list(rowIds),
rows,
range
});
}
return _data;
};
}
}
|
package io.ph.bot.exception;
import java.net.URL;
public class FileTooLargeException extends Exception {
private static final long serialVersionUID = 1077370676532367238L;
private URL urlRequested;
public FileTooLargeException(URL url) {
super();
this.urlRequested = url;
}
public URL getUrlRequested() {
return this.urlRequested;
}
}
|
#!/bin/bash
#SBATCH -n 1
time python mu_sims_random_scaled_200.py 200 400 |
SELECT table1.col1, table1.col2, table2.col3, table2.col4
FROM table1
INNER JOIN table2
ON table1.common_column=table2.common_column |
<reponame>jnschbrt/data-prepper
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
package com.amazon.dataprepper.model.log;
import com.amazon.dataprepper.model.event.DefaultEventMetadata;
import com.amazon.dataprepper.model.event.EventMetadata;
import com.amazon.dataprepper.model.event.JacksonEvent;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class JacksonLogTest {
@Test
public void testBuilder_usesLogEventType() {
final Log log = JacksonLog.builder().build();
assertThat(log, is(notNullValue()));
assertThat(log.getMetadata().getEventType(), is(equalTo("LOG")));
}
@Test
public void testBuilder_usesLogEventType_withUserProvidedEventType() {
final Log log = JacksonLog.builder()
.withEventType("test")
.getThis()
.build();
assertThat(log, is(notNullValue()));
assertThat(log.getMetadata().getEventType(), is(equalTo("LOG")));
}
@Test
public void testBuilder_withNonLogMetadata_throwsIllegalArgumentException() {
final EventMetadata eventMetadata = DefaultEventMetadata.builder()
.withEventType("foobar")
.build();
final JacksonEvent.Builder<JacksonLog.Builder> logBuilder = JacksonLog.builder()
.withEventMetadata(eventMetadata);
assertThat(logBuilder, is(notNullValue()));
assertThrows(IllegalArgumentException.class, logBuilder::build);
}
}
|
def maxValue(matrix):
max_value = 0
for row in matrix:
for element in row:
if element > max_value:
max_value = element
return max_value |
<filename>tests/recorder/009_recording_scroll.t.js
describe('Recording scroll activity', function (t) {
var recorder;
t.beforeEach(function () {
recorder && recorder.stop();
recorder = new Siesta.Recorder.ExtJS({
window : window,
ignoreSynthetic : false,
recordScroll : true
})
})
t.it('scrolling as the first action', function (t) {
document.body.innerHTML =
'<div id="scrollDiv" style="height:200px;background:red;overflow: scroll">' +
'<div style="height:4000px;background:red">tall div</div>' +
'</div>'
t.chain(
// test has failed sporadically once, could be because "scrollTop" assignment
// been done synchronously with "innerHTML" assignment
{ waitFor : 1 },
function (next) {
recorder.start();
t.query('#scrollDiv')[ 0 ].scrollTop = 100;
next()
},
{
waitFor : function () {
return recorder.getRecordedActions().length > 0;
}
},
function () {
var actions = recorder.getRecordedActions();
var scrollParams = actions[ 0 ].value;
t.expect(actions.length).toBe(1)
t.is(actions[ 0 ].action, 'scrollTo', "Correct name for action")
t.expect(actions[0].getTarget().target).toBe('#scrollDiv')
t.expect(typeof scrollParams[ 0 ]).toBe('number')
t.expect(scrollParams[ 0 ]).toBe(0);
t.expect(scrollParams[ 1 ]).toBeGreaterThan(0);
document.body.scrollTop = 0;
}
)
});
t.it('Should ignore scrolling happening on elements as a side effect', function (t) {
document.body.innerHTML =
'<button>Foo</button>' +
'<div id="outer" style="height:100px;width:200px;overflow:auto">' +
'<div id="inner" style="width:4000px;background:red">tall div</div>' +
'</div>'
var button = document.body.querySelector('button');
button.addEventListener('click', function () {
document.getElementById('outer').scrollLeft = 50;
});
recorder.start();
t.chain(
{ click : 'button' },
{ waitFor : 1000 },
function () {
var actions = recorder.getRecordedActions();
t.expect(actions.length).toBe(1)
t.expect(actions[ 0 ].action).toBe('click') // no "scroll" should be recorded
}
)
});
});
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
rm -R -f fifo/*
rm -R -f work/*
mkdir work/kat/
fmpy -a2 --create-financial-structure-files
mkdir work/il_S1_summaryleccalc
mkfifo fifo/il_P15
mkfifo fifo/il_S1_summary_P15
mkfifo fifo/il_S1_summary_P15.idx
# --- Do insured loss computes ---
tee < fifo/il_S1_summary_P15 work/il_S1_summaryleccalc/P15.bin > /dev/null & pid1=$!
tee < fifo/il_S1_summary_P15.idx work/il_S1_summaryleccalc/P15.idx > /dev/null & pid2=$!
summarycalc -m -f -1 fifo/il_S1_summary_P15 < fifo/il_P15 &
eve 15 20 | getmodel | gulcalc -S100 -L100 -r -a0 -i - | fmpy -a2 > fifo/il_P15 &
wait $pid1 $pid2
# --- Do insured loss kats ---
|
import { Component, createMemo } from 'solid-js';
import { DeepReadonly } from 'solid-js/store';
import { MissionGiveItem } from '../../../types';
import { MissionAcceptedItemsInput } from '../MissionAcceptedItemsInput';
import { QuestFoundInRaidOnly } from '../QuestFoundInRaidOnly';
import { QuestNumberInput } from '../QuestNumberInput';
import { QuestStringInput } from '../QuestStringInput';
import { MissionUpdator } from './types';
type Props = {
questId: string;
index: number;
mission: DeepReadonly<MissionGiveItem>;
updateMission: MissionUpdator<MissionGiveItem>;
};
export const MissionGiveItemForm: Component<Props> = props => {
const uniqId = createMemo(() => {
return `${props.questId}_give_item_${props.index}`;
});
const missionCount = createMemo(() => {
return props.mission.count ?? 0;
});
return (
<>
{props.children}
<QuestStringInput
formIndex={-1}
updateQuestString={fn => props.updateMission(m => ({ ...m, message: fn(m.message) }))}
uniqId={uniqId()}
questString={props.mission.message}
fieldName="message"
/>
<QuestNumberInput
fieldName="count"
value={missionCount()}
setValue={c => props.updateMission(m => ({ ...m, count: c }))}
/>
<QuestFoundInRaidOnly mission={props.mission} updateMission={props.updateMission} />
<MissionAcceptedItemsInput
updateItems={fn => {
props.updateMission(m => ({ ...m, accepted_items: fn(m.accepted_items) }));
}}
items={props.mission.accepted_items}
uniqId={uniqId()}
/>
</>
);
};
|
# rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: linked_accounts
#
# id :integer not null, primary key
# disabled_reason :string
# encrypted_token :string
# encrypted_token_iv :string
# session_data :text
# share_from :boolean default(FALSE), not null
# share_to :boolean default(FALSE), not null
# sync_to :boolean default(FALSE), not null
# type :string not null
# created_at :datetime not null
# updated_at :datetime not null
# external_user_id :string not null
# user_id :integer not null, indexed
#
# Indexes
#
# index_linked_accounts_on_user_id (user_id)
#
# Foreign Keys
#
# fk_rails_166e103170 (user_id => users.id)
#
# rubocop:enable Metrics/LineLength
FactoryBot.define do
factory :linked_account do
association :user
external_user_id { 'toyhammered' }
token { '<PASSWORD>' }
type { 'LinkedAccount::MyAnimeList' }
end
end
|
const model = require('./model'); // Import the MongoDB model
async function validateCredentials(username, password) {
try {
const user = await model.MEMB_INFO.findOne({
username: username,
password: password
});
if (user) {
return true; // Valid credentials
} else {
return false; // Invalid credentials
}
} catch (error) {
console.error('Error validating credentials:', error);
return false; // Return false in case of any errors
}
} |
package simple
import (
"reflect"
"testing"
)
func Test_fromCipherString(t *testing.T) {
type args struct {
cipherString string
}
tests := []struct {
name string
args args
wantCipherData []byte
wantNonce []byte
wantSalt []byte
wantErr bool
}{
{
name: "From Cipher String",
args: args{
cipherString: "CSv1.AE.AI.AM",
},
wantCipherData: []byte{0x01},
wantNonce: []byte{0x02},
wantSalt: []byte{0x03},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotCipherData, gotNonce, gotSalt, err := fromCipherString(tt.args.cipherString)
if (err != nil) != tt.wantErr {
t.Errorf("fromCipherString() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(gotCipherData, tt.wantCipherData) {
t.Errorf("fromCipherString() gotCipherData = %v, want %v", gotCipherData, tt.wantCipherData)
}
if !reflect.DeepEqual(gotNonce, tt.wantNonce) {
t.Errorf("fromCipherString() gotNonce = %v, want %v", gotNonce, tt.wantNonce)
}
if !reflect.DeepEqual(gotSalt, tt.wantSalt) {
t.Errorf("fromCipherString() gotSalt = %v, want %v", gotSalt, tt.wantSalt)
}
})
}
}
func Test_decodeStringWithNoPadding(t *testing.T) {
type args struct {
s string
}
tests := []struct {
name string
args args
want []byte
wantErr bool
}{
{
name: "Decode String",
args: args{
s: "AE",
},
want: []byte{0x01},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := decodeStringWithNoPadding(tt.args.s)
if (err != nil) != tt.wantErr {
t.Errorf("decodeStringWithNoPadding() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("decodeStringWithNoPadding() = %v, want %v", got, tt.want)
}
})
}
}
func Test_encodeStringWithNoPadding(t *testing.T) {
type args struct {
s []byte
}
tests := []struct {
name string
args args
want string
}{
{
name: "Encode String",
args: args{
s: []byte{0x01},
},
want: "AE",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := encodeStringWithNoPadding(tt.args.s); got != tt.want {
t.Errorf("encodeStringWithNoPadding() = %v, want %v", got, tt.want)
}
})
}
}
func Test_toCipherString(t *testing.T) {
type args struct {
cipherData []byte
nonce []byte
salt []byte
}
tests := []struct {
name string
args args
want string
}{
{
name: "To Cipher String",
args: args{
cipherData: []byte{0x01},
nonce: []byte{0x02},
salt: []byte{0x03},
},
want: "CSv1.AE.AI.AM",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := toCipherString(tt.args.cipherData, tt.args.nonce, tt.args.salt); got != tt.want {
t.Errorf("toCipherString() = %v, want %v", got, tt.want)
}
})
}
}
func Test_isCipherStringValid(t *testing.T) {
type args struct {
input string
}
tests := []struct {
name string
args args
want bool
}{
{
name: "Minimum",
args: args{
input: "CSv1.AE.AI.AM",
},
want: true,
},
{
name: "Real World Sample",
args: args{
input: "CSv1.443MMQSEWDPHEYKVS42FWJN633PS4EQIOFXDGMJOM2ON4ACJ.CIG44UL5BXWJU6JSW2BQ.KIORDLXAIJAT7NCTJHWYCE273Q",
},
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := isCipherStringValid(tt.args.input); got != tt.want {
t.Errorf("isCipherStringValid() = %v, want %v", got, tt.want)
}
})
}
}
|
<reponame>rGunti/BudgetPi<gh_stars>0
from wx import Button, EVT_BUTTON, StaticText, Font, FONTFAMILY_DEFAULT, FONTSTYLE_NORMAL, FONTWEIGHT_NORMAL, ListBox, \
NOT_FOUND, Dialog, BORDER_NONE, BoxSizer, ID_OK
from piwindows.base import BaseWindow, BaseWindowPanel
from piwindows.const import Colour, get_colour
from storage.base import GlobalStorage
class EntryListDialog(Dialog):
def __init__(self, parent):
Dialog.__init__(self,
parent,
title=u"Entries",
size=(320, 240),
style=BORDER_NONE)
pnl = EntryListWindowPanel(self)
self._pnl = pnl
self._sizer = BoxSizer()
self._sizer.Clear()
self._sizer.Add(self._pnl)
self.SetSizer(self._sizer)
class EntryListWindowPanel(BaseWindowPanel):
def __init__(self, parent):
BaseWindowPanel.__init__(self,
parent,
bg_color=Colour.BLACK,
fg_color=Colour.WHITE)
self._title_label = StaticText(self,
pos=(110, 10),
size=(100, 30),
label=u"Entries")
self._title_label.SetFont(Font(20, FONTFAMILY_DEFAULT, FONTSTYLE_NORMAL, FONTWEIGHT_NORMAL))
self._back_button = Button(self,
-1,
"< Back",
pos=(10, 10),
size=(70, 30))
self._back_button.SetBackgroundColour(get_colour(0x333333))
self._back_button.SetForegroundColour(Colour.WHITE)
self._delete_button = Button(self,
-1,
"Del",
pos=(240, 10),
size=(70, 30))
self._delete_button.SetBackgroundColour(Colour.RED)
self._delete_button.SetForegroundColour(Colour.WHITE)
self._list_control = ListBox(self,
pos=(10, 50),
size=(295, 170))
self._list_control.SetBackgroundColour(Colour.BLACK)
self._list_control.SetForegroundColour(Colour.WHITE)
self._items = GlobalStorage.get_storage().get_items()
self._list_control.SetItems(GlobalStorage.get_storage().get_string_list(self._items))
self.Bind(EVT_BUTTON, self._back_button_click, self._back_button)
self.Bind(EVT_BUTTON, self._delete_button_click, self._delete_button)
def _back_button_click(self, e):
self.GetParent().EndModal(ID_OK)
def _delete_button_click(self, e):
sel = self._list_control.GetSelection()
if sel == NOT_FOUND:
return
else:
item = self._items[sel]
GlobalStorage.get_storage().delete_item(item.Id)
self._items = GlobalStorage.get_storage().get_items()
self._list_control.SetItems(GlobalStorage.get_storage().get_string_list(self._items))
|
package com.doodl6.demo.thread.concurrent;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
public class CountDownLatchTest {
public static void main(String[] args) throws InterruptedException {
//多线程执行任务,等待所有线程完成以后,再执行主线程
CountDownLatch countDownLatch = new CountDownLatch(3);
Thread threadA = new Thread(new MyRunnable(countDownLatch), "threadA");
Thread threadB = new Thread(new MyRunnable(countDownLatch), "threadB");
Thread threadC = new Thread(new MyRunnable(countDownLatch), "threadC");
threadA.start();
threadB.start();
threadC.start();
countDownLatch.await();
System.out.println("子线程已全部执行完,继续执行主线程");
}
private static class MyRunnable implements Runnable {
private final CountDownLatch countDownLatch;
public MyRunnable(CountDownLatch countDownLatch){
this.countDownLatch = countDownLatch;
}
@Override
public void run() {
//随机休眠一段时间
try {
Thread.sleep(new Random().nextInt(3000));
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println(Thread.currentThread().getName() + " is finished");
countDownLatch.countDown();
}
}
}
|
import React from "react"
import { Link } from "gatsby"
import SocialLinks from "../constants/socialLinks"
import { StaticImage } from "gatsby-plugin-image"
const Hero = () => {
return (
<header className="hero">
<div className="section-center hero-center">
<article className="hero-info">
<div>
<div className="underline"></div>
<h1>Fullstack</h1>
<h1>Developer</h1>
<Link to="/contact" className="btn">
contact
</Link>
<SocialLinks />
</div>
</article>
<StaticImage
src="../assets/images/resume.png"
alt="portfolio"
className="hero-img"
placeholder="blurred"
/>
</div>
</header>
)
}
export default Hero
|
import { initialCommunicationField } from 'shared/helpers/redux';
import * as NS from '../namespace';
export const initial: NS.IReduxState = {
communication: {
loadDepositAddress: initialCommunicationField,
withdrawCoins: initialCommunicationField,
withdrawCoinsVerify: initialCommunicationField,
},
ui: {
modals: {
depositCoins: {
isOpen: false,
currencyCode: null,
address: null,
},
withdrawCoins: {
isOpen: false,
currencyCode: null,
},
simplex: {
isOpen: false,
address: null,
currency: '',
}
},
},
};
|
package br.usp.poli.lta.nlpdep.execute.NLP.dependency;
import br.usp.poli.lta.nlpdep.execute.NLP.StructuredPushdownAutomatonNLP;
import br.usp.poli.lta.nlpdep.execute.NLP.output.NLPOutputToken;
import br.usp.poli.lta.nlpdep.execute.NLP.output.Node;
import br.usp.poli.lta.nlpdep.mwirth2ape.structure.Stack;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import org.apache.commons.io.FilenameUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Date;
import java.util.Scanner;
public class DepPatternsTree {
private static final Logger logger = LoggerFactory.getLogger(DepParseTree.class);
private Stack<String> stack = new Stack<>();
private StructuredPushdownAutomatonNLP spaNLP;
private JsonObject depJsnObj = new JsonObject(); // Dados gerais
private JsonArray depPtrnsJsnArray = new JsonArray(); // Relações de dependências
private JsonObject depSglPtrnJsnObj; // Uma relação de dependência
private JsonArray depPtrnCnstntsJsnArray; // Lista de constituintes de uma relação
private JsonObject depSglCnstntJsnObj; // Single constituent
public DepPatternsTree(StructuredPushdownAutomatonNLP spaNLP) {
this.spaNLP = spaNLP;
}
public JsonObject parsePreorderFromLeaf() {
boolean result = false;
long threadId = Thread.currentThread().getId();
Path path = Paths.get(spaNLP.getAppProperties().getProperty("inputFileName"));
depJsnObj.addProperty("File", FilenameUtils.removeExtension(path.getFileName().getName(0).toString()) +"_" +
Thread.currentThread().getName() + "_" +
LocalDateTime.now().format(DateTimeFormatter.ofPattern(("yyyyMMdd-HHmmss"))) + ".json");
depJsnObj.addProperty("ThreadName", Thread.currentThread().getName());
depJsnObj.addProperty("Date", LocalDateTime.now().format(DateTimeFormatter.ofPattern(("yyyy-MM-dd HH:mm:ss"))));
/*
// verifica se o nó é nulo 2019.02.24
if (((DepStackElementNterm)(this.spaNLP.getDepStackList().getDepStackFromThreadID(threadId).top())).getNode() == null) {
// Pergunta ao usuário se continua
Scanner userInput = new Scanner(System.in);
String input = "y"; // teste forçado, decomentar abaixo e tirar para forçar interação
while (!(input.equals("y") || input.equals("n"))) {
System.out.println(Thread.currentThread().getName() + ": Termina thread? (y ou n)");
input = userInput.nextLine();
}
if (input.equals("y")) {
return null;
}
} else { */
Node<NLPOutputToken> rootNode =
((DepStackElementNterm) (this.spaNLP.getDepStackList().getDepStackFromThreadID(threadId).top())).getNode();
result = parsePreorder(rootNode);
depJsnObj.add("DepPatterns", depPtrnsJsnArray);
//patternsOutput.append(depJsnObj);
/*
} */
if (result) {
return depJsnObj;
} else {
return null;
}
}
final boolean parsePreorder (Node<NLPOutputToken> node) {
boolean result = true;
if (node.getData().getType().equals("nterm")) {
NLPOutputToken currentNlpOutputToken = node.getData();
logger.debug("DepGetPattern processando nterm {}",
currentNlpOutputToken.getValue());
depSglPtrnJsnObj = new JsonObject();
depSglPtrnJsnObj.addProperty("_comment","");
depSglPtrnJsnObj.addProperty("value",currentNlpOutputToken.getValue());
//depSglPtrnJsnObj.addProperty("mainConstituent","");
depSglPtrnJsnObj.addProperty("headDirection","");
depPtrnCnstntsJsnArray = new JsonArray();
int counter = 1;
for (Node<NLPOutputToken> currentChild: node.getChildren()) {
depSglCnstntJsnObj = new JsonObject();
depSglCnstntJsnObj.addProperty("id", counter);
depSglCnstntJsnObj.addProperty("value", currentChild.getData().getValue());
depSglCnstntJsnObj.addProperty("type", currentChild.getData().getType());
depSglCnstntJsnObj.addProperty("head","");
depSglCnstntJsnObj.addProperty("depRel","");
JsonArray leftDeps = new JsonArray();
JsonArray rightDeps = new JsonArray();
depSglCnstntJsnObj.add("leftDeps", leftDeps);
depSglCnstntJsnObj.add("rightDeps", rightDeps);
depPtrnCnstntsJsnArray.add(depSglCnstntJsnObj);
counter++;
}
if (depPtrnCnstntsJsnArray.size() == 1) {
depSglPtrnJsnObj.addProperty("mainConstituent",1);
} else {
depSglPtrnJsnObj.addProperty("mainConstituent","");
}
depSglPtrnJsnObj.add("depConstituents",depPtrnCnstntsJsnArray);
} else {
logger.debug("DepGetPattern não é nterm! É {}", node.getData().getType());
}
depPtrnsJsnArray.add(depSglPtrnJsnObj);
for (Node<NLPOutputToken> currentChild: node.getChildren()) {
if (!parsePreorder(currentChild)) {
result = false;
break;
}
}
return result;
}
}
|
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <CL/cl.h>
#define MAX_PLATFORMS 32
#define MAX_DEVICES 32
#define MAX_BINARIES 32
char kernel[] = "__kernel void k() {\n return;\n}";
int
main(void){
cl_int err;
cl_platform_id platforms[MAX_PLATFORMS];
cl_uint nplatforms;
cl_device_id devices[MAX_DEVICES + 1]; // + 1 for duplicate test
cl_device_id device_id0;
cl_uint num_devices;
cl_uint i;
size_t num_binaries;
const unsigned char **binaries = NULL;
size_t *binary_sizes = NULL;
size_t num_bytes_copied;
cl_int binary_statuses[MAX_BINARIES];
cl_int binary_statuses2[MAX_BINARIES];
cl_program program = NULL;
cl_program program_with_binary = NULL;
err = clGetPlatformIDs(MAX_PLATFORMS, platforms, &nplatforms);
if (err != CL_SUCCESS && !nplatforms)
return EXIT_FAILURE;
err = clGetDeviceIDs(platforms[0], CL_DEVICE_TYPE_ALL, MAX_DEVICES,
devices, &num_devices);
if (err != CL_SUCCESS)
return EXIT_FAILURE;
cl_context context = clCreateContext(NULL, num_devices, devices, NULL, NULL, &err);
if (err != CL_SUCCESS)
return EXIT_FAILURE;
size_t kernel_size = strlen(kernel);
char* kernel_buffer = kernel;
program = clCreateProgramWithSource(context, 1, (const char**)&kernel_buffer,
&kernel_size, &err);
if (err != CL_SUCCESS)
return EXIT_FAILURE;
err = clBuildProgram(program, num_devices, devices, NULL, NULL, NULL);
if (err != CL_SUCCESS)
return EXIT_FAILURE;
err = clGetProgramInfo(program, CL_PROGRAM_BINARY_SIZES, 0, 0, &num_binaries);
if (err != CL_SUCCESS)
goto FREE_AND_EXIT;
num_binaries = num_binaries/sizeof(size_t);
binary_sizes = (size_t*)malloc(num_binaries * sizeof(size_t));
binaries = (const unsigned char**)calloc(num_binaries, sizeof(unsigned char*));
err = clGetProgramInfo(program, CL_PROGRAM_BINARY_SIZES,
num_binaries*sizeof(size_t), binary_sizes ,
&num_bytes_copied);
if (err != CL_SUCCESS)
goto FREE_AND_EXIT;
for (i = 0; i < num_binaries; ++i)
binaries[i] = (const unsigned char*) malloc(binary_sizes[i] *
sizeof(const unsigned char));
err = clGetProgramInfo(program, CL_PROGRAM_BINARIES,
num_binaries*sizeof(char*), binaries, &num_bytes_copied);
if (err != CL_SUCCESS)
goto FREE_AND_EXIT;
cl_int num = num_binaries < num_devices ? num_binaries : num_devices;
if (num == 0)
{
err = !CL_SUCCESS;
goto FREE_AND_EXIT;
}
program_with_binary = clCreateProgramWithBinary(context, num, devices, binary_sizes,
binaries, binary_statuses, &err);
if (err != CL_SUCCESS)
goto FREE_AND_EXIT;
clReleaseProgram(program_with_binary);
for (i = 0; i < num; i++)
{
if (binary_statuses[i] != CL_SUCCESS)
{
err = !CL_SUCCESS;
goto FREE_AND_EXIT;
}
}
// negative test1: invalid device
device_id0 = devices[0];
devices[0] = NULL; // invalid device
program_with_binary = clCreateProgramWithBinary(context, num, devices, binary_sizes,
binaries, binary_statuses, &err);
if (err != CL_INVALID_DEVICE || program_with_binary != NULL)
{
err = !CL_SUCCESS;
goto FREE_AND_EXIT;
}
err = CL_SUCCESS;
devices[0] = device_id0;
for (i = 0; i < num_binaries; ++i) free((void*)binaries[i]);
free(binary_sizes);
free(binaries);
// negative test2: duplicate device
num_binaries = 2;
devices[1] = devices[0]; // duplicate
binary_sizes = (size_t*)malloc(num_binaries * sizeof(size_t));
binaries = (const unsigned char**)calloc(num_binaries, sizeof(unsigned char*));
err = clGetProgramInfo(program, CL_PROGRAM_BINARY_SIZES, 1*sizeof(size_t),
binary_sizes , &num_bytes_copied);
if (err != CL_SUCCESS)
goto FREE_AND_EXIT;
binary_sizes[1] = binary_sizes[0];
binaries[0] = (const unsigned char*) malloc(binary_sizes[0] *
sizeof(const unsigned char));
binaries[1] = (const unsigned char*) malloc(binary_sizes[1] *
sizeof(const unsigned char));
err = clGetProgramInfo(program, CL_PROGRAM_BINARIES, 1 * sizeof(char*),
binaries, &num_bytes_copied);
if (err != CL_SUCCESS)
goto FREE_AND_EXIT;
memcpy((void*)binaries[1], (void*)binaries[0], binary_sizes[0]);
program_with_binary = clCreateProgramWithBinary(context, 2, devices, binary_sizes,
binaries, binary_statuses2, &err);
if (err != CL_INVALID_DEVICE || program_with_binary != NULL)
{
err = !CL_SUCCESS;
goto FREE_AND_EXIT;
}
err = CL_SUCCESS;
FREE_AND_EXIT:
// Free resources
for (i = 0; i < num_binaries; ++i)
if (binaries)
if(binaries[i])
free((void*)binaries[i]);
if (binary_sizes)
free(binary_sizes);
if (binaries)
free(binaries);
if (program)
clReleaseProgram(program);
if (program_with_binary)
clReleaseProgram(program_with_binary);
return err == CL_SUCCESS ? EXIT_SUCCESS : EXIT_FAILURE;
}
|
<reponame>EIDSS/EIDSS-Legacy<filename>EIDSS v6.1/Android/app/src/main/java/com/bv/eidss/web/VectorASCampaign.java
package com.bv.eidss.web;
import com.bv.eidss.model.ASCampaign;
import java.util.ArrayList;
@SuppressWarnings("serial")
public class VectorASCampaign extends ArrayList<ASCampaign> {
public VectorASCampaign(){}
}
|
/**
* Programmatically invoke APIs to populate database
*
*/
var request = require("request")
var async = require("async")
var fs = require("fs");
var path = require("path");
var csvjson = require('csvjson');
var _ = require('lodash');
var invoke_add = function (nIter, payload, callback) {
var url = baseUrl + "/add"
var headerVars = {
"Content-Type": "application/json",
"Authorization": "Bearer ",
"x-authenticated-user-token": ""
}
if (dryRun) {
console.log("#" + nIter + " DryRun: Will invoke " + url + " with payload " + payload)
callback(null, nIter)
} else {
//console.log("#" + nIter + " Invoking " + url + " with payload " + payload)
request(url, {
method: "POST",
body: payload,
headers: headerVars
}, function (err, response, body) {
//console.log("This is the api response " + JSON.stringify(body))
var apiResponse = JSON.parse(body)
if (err) {
console.error(err)
console.log(" error for " + payload)
callback(err)
} else {
var responseErr = apiResponse
if (responseErr != "") {
callback(responseErr, null)
} else {
console.log(" success for " + payload, " " + apiResponse.result)
callback(null, apiResponse.result)
}
}
})
}
}
var addToArr = function (arr, val, cb) {
arr.push(val)
cb()
}
/**
*
*/
var populate_add_tasks = function (tasks, entityType, static_payload, arrDynamicData, someEntity) {
var allPayloads = []
for (var itr = 0; itr < arrDynamicData.length; itr++) {
var completePayload = JSON.parse(JSON.stringify(static_payload))
var oneCSVRow = JSON.parse(JSON.stringify(arrDynamicData[itr]))
//console.log("PAYLOAD Complete", JSON.stringify(static_payload))
//console.log("one row = " + JSON.stringify(oneCSVRow))
var attrsMerged = Object.assign(completePayload["request"][entityType], oneCSVRow)
completePayload["request"][entityType] = attrsMerged
//console.log(itr + " - payload = " + JSON.stringify(completePayload))
/*
id,channel,createdby,createddate,
datetime,description,email,externalid,
hashtagid,homeurl,imgurl,isapproved,
isdefault,isrootorg,isssoenabled,keys,
locationid,locationids,noofmembers,orgcode,
orgname,orgtype,orgtypeid,parentorgid,
preferredlanguage,provider,rootorgid,slug,
status
*/
// Have externalid, location, orgname
var dataPortion = completePayload["request"][entityType]
var toDeleteCols = ["id", "channel", "createdby", "createddate", "datetime", "description",
"email", "hashtagid", "homeurl", "imgurl",
"isapproved", "isdefault", "isrootorg", "isssoenabled",
"keys", "locationid", "noofmembers", "orgcode",
"orgtype", "orgtypeid", "parentorgid", "preferredlanguage",
"provider", "rootorgid", "slug", "status"]
toDeleteCols.forEach(field => {
delete dataPortion[field]
})
for (var field in dataPortion) {
var fieldVal = dataPortion[field]
if (fieldVal.indexOf("[") != -1) {
var myArr = new Array()
var individualItems = fieldVal.replace(/\[|\]/g, "")
//console.log("Expect [] to be removed " + JSON.stringify(individualItems) + " flag = " + individualItems.indexOf(","));
if (individualItems.indexOf(",") != -1) {
console.log("Array contains multiple values")
// More than one item
// For every locationIds, construct and lookup locationId
var locationObj = {
"block": "",
"blockId": "",
"district": "",
"districtId": "",
"state": "",
"stateId": ""
}
var arrItems = individualItems.split(",")
arrItems.forEach(element => {
var elementWoQuote = element.replace(/\'/g, "")
elementWoQuote = elementWoQuote.trim()
myArr.push(element);
var thisLoc = g_locationObj[elementWoQuote]
if (thisLoc === undefined) {
console.log("element not found " + elementWoQuote)
} else {
var name = thisLoc["name"]
var type = thisLoc["type"]
console.log("type is " + type)
if (type === 'district') {
locationObj["district"] = name
locationObj["districtId"] = elementWoQuote
} else if (type === 'block') {
locationObj["block"] = name
locationObj["blockId"] = elementWoQuote
} else if (type === 'state') {
locationObj["state"] = name
locationObj["stateId"] = elementWoQuote
}
}
});
dataPortion["location"] = locationObj
delete dataPortion["locationids"]
//console.log("Adding location object" + JSON.stringify(completePayload))
}
}
// If there are field specific code, set here.
}
// console.log(completePayload)
// Any extra column to delete from the csv goes here
//delete dataPortion.ParentCode
allPayloads.push(completePayload)
}
console.log("Lengths of tasks = " + arrDynamicData.length + " and " + allPayloads.length)
//console.log(JSON.stringify(allPayloads))
async.forEachOf(allPayloads, function (onePayload, nIter, callback) {
tasks.push(
(cb) => invoke_add(nIter, JSON.stringify(onePayload), function (err, data) {
var returnData = JSON.stringify(err)
if (err != null) {
console.log("Return data = " + returnData + " for payload " + JSON.stringify(onePayload));
}
// Do not cascade the error - fail for certain rows, but don't stop processing.
cb(null, data)
})
)
callback()
})
}
/**
* Executes all the populated tasks in parallel.
*/
var execute_tasks = function (tasks, fileName, cb) {
//async.parallelLimit(tasks, PARALLEL_LIMIT, function (err, callback) {
async.series(tasks, function (err, callback) {
if (!err) {
console.log("Executed tasks")
cb(null)
} else {
console.error(err)
console.log("One or more errors occurred.")
cb(err)
}
})
}
var options = {
delimiter: ',', // optional
quote: '"' // optional
};
var csvToJson = function (csvFileName) {
var data = fs.readFileSync(path.join(__dirname, csvFileName), { encoding: 'utf8' });
const jsonObject = csvjson.toObject(data, options);
//console.log("JSON Object", jsonObject);
return jsonObject;
}
var g_locationObj = {}
var createLocationMap = function () {
var locationArr = csvToJson("prod_location_data_full.csv")
locationArr.forEach(item => {
g_locationObj[item["id"]] = {
"type": item["type"],
"name": item["name"],
"code": item["code"]
}
})
}
// This is the default payload
var addApiPayload = {
"id": "open-saber.registry.create",
"request": {
}
}
// The subject that we have schematized
var entityType = "School"
addApiPayload.request[entityType] = {}
// The URL where the registry is running
var baseUrl = "http://localhost:9080"
// Whether you want to run in dryRun mode
// true - API will not be invoked.
// false - API will be invoked.
var dryRun = false
var PARALLEL_LIMIT = 1;
var dataEntities = {}
function populate(cb) {
var student_tasks = [];
var studentCSV = csvToJson('odisha_schools.csv')
populate_add_tasks(student_tasks, entityType, addApiPayload, studentCSV)
console.log("Total number of students = " + student_tasks.length)
execute_tasks(student_tasks, "data.json", cb)
}
createLocationMap()
setTimeout(function() {
console.log('Waited and continuing now')
}, 3000);
populate(function (err, result) {
if (err) {
return (err);
console.log("Errorrrrr==>", err);
}
console.log("Finished successfully");
return result;
})
|
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2020 Xilinx, Inc. All Rights Reserved.
#
if [ -z "$PATH" ]; then
PATH=/afs/slac.stanford.edu/g/reseng/xilinx/2020.1/Vitis/2020.1/bin:/afs/slac.stanford.edu/g/reseng/xilinx/2020.1/Vivado/2020.1/ids_lite/ISE/bin/lin64:/afs/slac.stanford.edu/g/reseng/xilinx/2020.1/Vivado/2020.1/bin
else
PATH=/afs/slac.stanford.edu/g/reseng/xilinx/2020.1/Vitis/2020.1/bin:/afs/slac.stanford.edu/g/reseng/xilinx/2020.1/Vivado/2020.1/ids_lite/ISE/bin/lin64:/afs/slac.stanford.edu/g/reseng/xilinx/2020.1/Vivado/2020.1/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='/u1/ddoering/localGitAtrdsrv304/dune-wib-firmware/wib_sim/wib_zu6cg/wib_zu6cg.runs/bd_c443_bs_switch_1_0_synth_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
EAStep vivado -log bd_c443_bs_switch_1_0.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source bd_c443_bs_switch_1_0.tcl
|
#ifndef FACE3D_H
#define FACE3D_H
#include "Vector3D.h"
struct Face3D
{
int f1,f2,f3;
Face3D()
{
}
Face3D(int a, int b, int c)
{
f1=a;
f2=b;
f3=c;
}
Face3D(const std::vector <float> &face)
{
if(face.size() == 3)
{
f1 = face[0];
f2 = face[1];
f3 = face[2];
}
else
{
f1=0;
f2=0;
f3=0;
}
};
int getF1()
{
return f1;
}
int getF2()
{
return f2;
}
int getF3()
{
return f3;
}
void setF1(int a)
{
f1=a;
}
void setF2(int b)
{
f2=b;
}
void setF3(int c)
{
f3=c;
}
inline Vector3D FaceNormal(const Vector3D& vec1, const Vector3D& vec2,const Vector3D& vec3)
{
Vector3D n;
return n.Cross(vec2 - vec1,vec3 - vec1);
}
};
#endif // FACE3D_H
|
const express = require('express');
const bodyParser = require('body-parser');
const cors = require('cors');
const app = express();
const fs = require('fs');
// Use body parser middleware
app.use(bodyParser.json());
// Use cors middleware
app.use(cors());
let data;
let balance;
// read from json file
fs.readFile('userData.json', 'utf8', (err, jsonString) => {
if (err) {
console.log("File read failed:", err)
return
}
balance = JSON.parse(jsonString);
});
// get balance
app.get('/balance', (req, res) => {
res.send(balance);
});
// exchange currency
app.post('/exchange', (req, res) => {
let fromCurrency = req.body.from;
let toCurrency = req.body.to;
let amount = req.body.amount;
if (balance[fromCurrency] >= amount) {
balance[fromCurrency] -= amount;
balance[toCurrency] += amount;
res.send({message: 'Exchange successful!'});
} else {
res.send({message: 'Insufficient funds!'});
}
});
// write to json file
fs.writeFile('userData.json', JSON.stringify(balance), err => {
if (err) {
console.log('Error writing file', err)
} else {
console.log('Successfully wrote file')
}
});
// Start the server
app.listen(3000, () => {
console.log(`The server is running on port 3000`);
}); |
<filename>vendor/github.com/terraform-providers/terraform-provider-azuread/azuread/helpers/graph/object_resource.go
package graph
import (
"fmt"
"strings"
"github.com/hashicorp/go-uuid"
)
type ObjectSubResourceId struct {
objectId string
subId string
Type string
}
func (id ObjectSubResourceId) String() string {
return id.objectId + "/" + id.Type + "/" + id.subId
}
func ParseObjectSubResourceId(idString, expectedType string) (ObjectSubResourceId, error) {
parts := strings.Split(idString, "/")
if len(parts) != 3 {
return ObjectSubResourceId{}, fmt.Errorf("Object Resource ID should be in the format {objectId}/{keyId} - but got %q", idString)
}
id := ObjectSubResourceId{
objectId: parts[0],
Type: parts[1],
subId: parts[2],
}
if _, err := uuid.ParseUUID(id.objectId); err != nil {
return ObjectSubResourceId{}, fmt.Errorf("Object ID isn't a valid UUID (%q): %+v", id.objectId, err)
}
if id.Type == "" {
return ObjectSubResourceId{}, fmt.Errorf("Type in {objectID}/{type}/{subID} should not blank")
}
if id.Type != expectedType {
return ObjectSubResourceId{}, fmt.Errorf("Type in {objectID}/{type}/{subID} was expected to be %s, got %s", expectedType, parts[2])
}
if _, err := uuid.ParseUUID(id.subId); err != nil {
return ObjectSubResourceId{}, fmt.Errorf("Object Sub Resource ID isn't a valid UUID (%q): %+v", id.subId, err)
}
return id, nil
}
func ObjectSubResourceIdFrom(objectId, typeId, subId string) ObjectSubResourceId {
return ObjectSubResourceId{
objectId: objectId,
Type: typeId,
subId: subId,
}
}
|
<reponame>maven-nar/cpptasks-parallel
/*
Licensed to the Ant-Contrib Project under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The Ant-Contrib Project licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.github.maven_nar.taskdocs;
import com.sun.javadoc.*;
import org.xml.sax.*;
import org.xml.sax.helpers.AttributesImpl;
import org.xml.sax.helpers.DefaultHandler;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.transform.Source;
import javax.xml.transform.sax.SAXTransformerFactory;
import javax.xml.transform.sax.TransformerHandler;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import java.io.File;
import java.io.StringReader;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
/**
* This document writes an XML representation of the
* Ant related Javadoc through an XSLT transform that creates xdoc files.
*
*/
public final class TaskDoclet {
/**
* Process Javadoc content.
* @param root root of javadoc content.
* @return true if successful
* @throws Exception IO exceptions and the like.
*/
public static boolean start(RootDoc root) throws Exception {
SAXTransformerFactory tf = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
Source typeStyle = new StreamSource(new File("src/taskdocs/resources/com/github/maven_nar/taskdocs/element.xslt"));
//
// replace with tf.newTransformerHandler() if you want to see raw generated XML.
TransformerHandler typeHandler = tf.newTransformerHandler(typeStyle);
Map referencedTypes = new HashMap();
Map documentedTypes = new HashMap();
ClassDoc[] classes = root.classes();
for (int i = 0; i < classes.length; ++i) {
ClassDoc clazz = classes[i];
if (clazz.isPublic() && !clazz.isAbstract()) {
if (isTask(clazz) || isType(clazz)) {
writeClass(typeHandler, clazz, referencedTypes);
documentedTypes.put(clazz.qualifiedTypeName(), clazz);
}
}
}
Map additionalTypes = new HashMap();
for (Iterator iter = referencedTypes.keySet().iterator(); iter.hasNext();) {
String referencedName = (String) iter.next();
if (documentedTypes.get(referencedName) == null) {
ClassDoc referencedClass = root.classNamed(referencedName);
if (referencedClass != null) {
if (!referencedClass.qualifiedTypeName().startsWith("org.apache.tools.ant")) {
writeClass(typeHandler, referencedClass, additionalTypes);
documentedTypes.put(referencedClass.qualifiedTypeName(), referencedClass);
}
}
}
}
return true;
}
/**
* Determine if class is an Ant task.
* @param clazz class to test.
* @return true if class is an Ant task.
*/
private static boolean isTask(final ClassDoc clazz) {
if (clazz == null) return false;
if ("org.apache.tools.ant.Task".equals(clazz.qualifiedTypeName())) {
System.out.print("true");
return true;
}
return isTask(clazz.superclass());
}
/**
* Determine if class is an Ant type.
* @param clazz class to test.
* @return true if class is an Ant type.
*/
private static boolean isType(final ClassDoc clazz) {
if (clazz == null) return false;
if ("org.apache.tools.ant.types.DataType".equals(clazz.qualifiedTypeName())) {
return true;
}
return isType(clazz.superclass());
}
/**
* Namespace URI for class description elements.
*/
private static final String NS_URI = "http://ant-contrib.sf.net/taskdocs";
/**
* Namespace URI for XHTML elements.
*/
private static final String XHTML_URI = "http://www.w3.org/1999/xhtml";
/**
* Write a Java type.
* @param tf content handler.
* @param type documented type.
* @throws Exception if IO or other exception.
*/
private static void writeType(final TransformerHandler tf, final Type type) throws Exception {
AttributesImpl attributes = new AttributesImpl();
attributes.addAttribute(null, "name", "name", "CDATA", type.simpleTypeName());
attributes.addAttribute(null, "qualifiedTypeName", "qualifiedTypeName", "CDATA", type.qualifiedTypeName());
tf.startElement(NS_URI, "type", "type", attributes);
ClassDoc typeDoc = type.asClassDoc();
if (typeDoc != null && typeDoc.commentText() != null && typeDoc.commentText().length() > 0) {
writeDescription(tf, typeDoc.commentText());
} else {
tf.characters(type.typeName().toCharArray(), 0, type.typeName().length());
}
tf.endElement(NS_URI, "type", "type");
}
/**
* Write an Ant task or type attribute (aka property).
* @param tf content handler.
* @param method set method for property.
* @throws Exception if IO or other exception.
*/
private static void writeAttribute(final TransformerHandler tf, final MethodDoc method) throws Exception {
AttributesImpl attributes = new AttributesImpl();
attributes.addAttribute(null, "name", "name", "CDATA", method.name().substring(3).toLowerCase(Locale.US));
tf.startElement(NS_URI, "attribute", "attribute", attributes);
writeType(tf, method.parameters()[0].type());
attributes.clear();
tf.startElement(NS_URI, "comment", "comment", attributes);
writeDescription(tf, method.commentText());
tf.endElement(NS_URI, "comment", "comment");
tf.endElement(NS_URI, "attribute", "attribute");
}
/**
* Write an Ant nested element.
* @param tf content handler.
* @param method method to add element to task or type.
* @param name name of nested element.
* @param type type of nested element.
* @param referencedTypes map of types referenced in documentation.
* @throws Exception if IO or other exception.
*/
private static void writeChild(final TransformerHandler tf,
final MethodDoc method,
final String name,
final Type type,
final Map referencedTypes) throws Exception {
AttributesImpl attributes = new AttributesImpl();
attributes.addAttribute(null, "name", "name", "CDATA", name.toLowerCase(Locale.US));
tf.startElement(NS_URI, "child", "child", attributes);
attributes.clear();
tf.startElement(NS_URI, "comment", "comment", attributes);
writeDescription(tf, method.commentText());
tf.endElement(NS_URI, "comment", "comment");
writeType(tf, type);
tf.endElement(NS_URI, "child", "child");
referencedTypes.put(type.qualifiedTypeName(), type);
}
/**
* Redirects parsed XHTML comment into output stream.
* Drops start and end document and body element.
*/
private static class RedirectHandler extends DefaultHandler {
/**
* output handler.
*/
private final ContentHandler tf;
/**
* Create new instance.
* @param tf output handler, may not be null.
*/
public RedirectHandler(final TransformerHandler tf) {
if (tf == null) { throw new IllegalArgumentException("tf"); }
this.tf = tf;
}
/** {@inheritDoc} */
public void characters(final char[] ch, final int start, final int length) throws SAXException {
tf.characters(ch, start, length);
}
/** {@inheritDoc} */
public void endDocument() {
}
/** {@inheritDoc} */
public void endElement(final String namespaceURI,
final String localName,
final String qName) throws SAXException {
if (!"body".equals(localName)) {
tf.endElement(namespaceURI, localName, qName);
}
}
/** {@inheritDoc} */
public void endPrefixMapping(final String prefix) throws SAXException {
tf.endPrefixMapping(prefix);
}
/** {@inheritDoc} */
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {
tf.ignorableWhitespace(ch, start, length);
}
/** {@inheritDoc} */
public void processingInstruction(final String target, final String data) throws SAXException {
tf.processingInstruction(target, data);
}
/** {@inheritDoc} */
public void setDocumentLocator(final Locator locator) {
tf.setDocumentLocator(locator);
}
/** {@inheritDoc} */
public void skippedEntity(String name) throws SAXException {
tf.skippedEntity(name);
}
/** {@inheritDoc} */
public void startDocument() {
}
/** {@inheritDoc} */
public void startElement(final String namespaceURI,
final String localName,
final String qName,
final Attributes atts) throws SAXException {
if (!"body".equals(localName)) {
tf.startElement(namespaceURI, localName, qName, atts);
}
}
/** {@inheritDoc} */
public void startPrefixMapping(String prefix, String uri) throws SAXException {
tf.startPrefixMapping(prefix, uri);
}
}
/**
* Writes description.
* @param tf destination.
* @param description description, may contain XHTML elements.
* @throws SAXException if IO or other exception.
*/
private static void writeDescription(final TransformerHandler tf,
final String description) throws SAXException {
if (description.indexOf('<') == -1) {
tf.characters(description.toCharArray(), 0, description.length());
} else {
//
// attempt to fabricate an XHTML fragment
//
StringBuffer buf = new StringBuffer(description);
buf.insert(0, "<body xmlns='" + XHTML_URI + "'>");
buf.append("</body>");
try {
SAXParserFactory sf = SAXParserFactory.newInstance();
sf.setNamespaceAware(true);
SAXParser parser = sf.newSAXParser();
parser.parse(new InputSource(new StringReader(buf.toString())), new RedirectHandler(tf));
} catch (Exception ex) {
tf.characters(ex.toString().toCharArray(), 0, ex.toString().length());
}
}
}
/**
* Write all Ant attributes in this class and superclasses.
* @param tf destination.
* @param clazz class documentation.
* @param processed map of processed methods.
* @param referencedTypes map of referenced types.
* @throws Exception if IO or other exception.
*/
private static void writeAttributes(final TransformerHandler tf,
final ClassDoc clazz,
final Map processed,
final Map referencedTypes) throws Exception {
MethodDoc[] methods = clazz.methods();
for (int i = 0; i < methods.length; i++) {
MethodDoc method = methods[i];
if (processed.get(method.name()) == null) {
if (method.name().startsWith("set") && method.isPublic() && method.parameters().length == 1) {
writeAttribute(tf, method);
referencedTypes.put(method.parameters()[0].typeName(), method.parameters()[0].type());
}
processed.put(method.name(), method);
}
}
if (clazz.superclass() != null) {
writeAttributes(tf, clazz.superclass(), processed, referencedTypes);
}
}
/**
* Write all Ant nested elements in this class and superclasses.
* @param tf destination.
* @param clazz class documentation.
* @param processed map of processed methods.
* @param referencedTypes map of referenced types.
* @throws Exception if IO or other exception.
*/
private static final void writeChildren(final TransformerHandler tf,
final ClassDoc clazz,
final Map processed,
final Map referencedTypes) throws Exception {
MethodDoc[] methods = clazz.methods();
for (int i = 0; i < methods.length; i++) {
MethodDoc method = methods[i];
if (processed.get(method.name()) == null) {
if (method.name().startsWith("addConfigured") && method.isPublic() && method.parameters().length == 1) {
writeChild(tf, method, method.name().substring(13), method.parameters()[0].type(), referencedTypes);
} else if (method.name().startsWith("add") && method.isPublic() && method.parameters().length == 1) {
writeChild(tf, method, method.name().substring(3), method.parameters()[0].type(), referencedTypes);
} else if (method.isPublic() && method.parameters().length == 0 && method.name().startsWith("create")) {
writeChild(tf, method, method.name().substring(6), method.returnType(), referencedTypes);
}
processed.put(method.name(), method);
}
}
if (clazz.superclass() != null) {
writeChildren(tf, clazz.superclass(), processed, referencedTypes);
}
}
/**
* Write Ant documentation for this class.
* @param tf destination.
* @param clazz class documentation.
* @param referencedTypes map of referenced types.
* @throws Exception if IO or other exception.
*/
private static void writeClass(final TransformerHandler tf,
final ClassDoc clazz,
final Map referencedTypes) throws Exception {
StreamResult result = new StreamResult(new File("src/site/xdoc/antdocs/" + clazz.name() + ".xml"));
tf.setResult(result);
AttributesImpl attributes = new AttributesImpl();
attributes.addAttribute(null, "name", "name", "CDATA", clazz.name());
StringBuffer firstSentence = new StringBuffer();
Tag[] tags = clazz.firstSentenceTags();
for (int i = 0; i < tags.length; i++) {
firstSentence.append(tags[i].text());
}
if (firstSentence.length() > 0) {
attributes.addAttribute(null, "firstSentence", "firstSentence", "CDATA", firstSentence.toString());
}
tf.startDocument();
tf.startElement(NS_URI, "class", "class", attributes);
attributes.clear();
tf.startElement(NS_URI, "comment", "comment", attributes);
writeDescription(tf, clazz.commentText());
tf.endElement(NS_URI, "comment", "comment");
tf.startElement(NS_URI, "attributes", "attributes", attributes);
Map methods = new HashMap();
methods.put("setProject", "setProject");
methods.put("setRuntimeConfigurableWrapper", "setRuntimeConfigurableWrapper");
writeAttributes(tf, clazz, methods, referencedTypes);
tf.endElement(NS_URI, "attributes", "attributes");
tf.startElement(NS_URI, "children", "children", attributes);
Map children = new HashMap();
writeChildren(tf, clazz, children, referencedTypes);
tf.endElement(NS_URI, "children", "children");
tf.endElement(NS_URI, "class", "class");
tf.endDocument();
}
}
|
function generateEventSummary(array $clientEvents): array {
$summary = [];
foreach ($clientEvents as $event) {
$type = $event['type'];
$timestamp = $event['timestamp'];
if (!isset($summary[$type])) {
$summary[$type] = [];
}
$summary[$type][] = $timestamp;
}
// Sort the timestamps for each event type
foreach ($summary as $type => $timestamps) {
sort($summary[$type]);
}
return $summary;
} |
#!/bin/bash
CONFIG_FILES="aliases exports functions gitconfig gitignore_global git-prompt nanorc zshrc"
CONFIG_FILES_BACKUP_FOLDER="${HOME}/dotfiles_backup"
CONFIG_FILES_DIR="${RESOURCES_DIRECTORY}/dotfiles"
function backup_old_dotfiles(){
bckfld="${CONFIG_FILES_BACKUP_FOLDER}/$(date +%s)"
mkdir -p ${bckfld}
for file in ${CONFIG_FILES}; do
if [ -L "$file" ]; then
# no need to backup if its a link. just unlink it
unlink "${HOME}/.${file}"
else
mv "${HOME}/.${file}" ${bckfld}
fi
done
}
function link_dotfiles(){
for file in ${CONFIG_FILES}; do
link "${CONFIG_FILES_DIR}/${file}" "${HOME}/.${file}"
done
}
function setup_dotfiles(){
run "backup current dotfiles"
backup_old_dotfiles
run "linking new dotfiles"
link_dotfiles
}
|
package bootstrap
import (
"errors"
"fmt"
"html/template"
"io/ioutil"
"log"
"github.com/beego/beego/v2/server/web"
)
var templateFunctions = map[string]interface{}{
"render_file": renderFile,
"render_icon": renderIcon,
"args": args,
}
// SetUpTemplateFunction register additional template functions
func SetUpTemplateFunction() {
for name, fn := range templateFunctions {
err := web.AddFuncMap(name, fn)
if err != nil {
log.Fatal("Failed to add template function", err.Error())
}
}
}
func renderFile(path string) template.HTML {
content, err := ioutil.ReadFile(path)
if err != nil {
log.Fatal(err)
}
return web.Str2html(string(content))
}
func renderIcon(iconName string, options ...string) template.HTML {
iconTemplate := `<svg class="icon %s" viewBox="0 0 16 16">
<use xlink:href="#%s" />
</svg>`
var classList string
if len(options) > 0 {
classList = options[0]
}
htmlString := fmt.Sprintf(iconTemplate, classList, iconName)
return web.Str2html(htmlString)
}
func args(values ...interface{}) (map[string]interface{}, error) {
if len(values) % 2 != 0 {
return nil, errors.New("invalid args call")
}
args := make(map[string]interface{}, len(values) / 2)
for i := 0; i < len(values); i += 2 {
key, ok := values[i].(string)
if !ok {
return nil, errors.New("args keys must be strings")
}
args[key] = values[i + 1]
}
return args, nil
}
|
// 5339. 콜센터
// 2020.10.11
// 구현
#include<iostream>
using namespace std;
int main()
{
cout << " /~\\" << endl;
cout << " ( oo|" << endl;
cout << " _\\=/_" << endl;
cout << " / _ \\" << endl;
cout << " //|/.\\|\\\\" << endl;
cout << " || \\ / ||" << endl;
cout << "============" << endl;
cout << "| |" << endl;
cout << "| |" << endl;
cout << "| |" << endl;
return 0;
}
|
import {
Table,
Column,
CreatedAt,
UpdatedAt,
Model,
PrimaryKey,
AutoIncrement,
AllowNull,
Unique,
BelongsToMany
} from "sequelize-typescript";
import User from "./User";
import UserQueue from "./UserQueue";
import Whatsapp from "./Whatsapp";
import WhatsappQueue from "./WhatsappQueue";
@Table
class Queue extends Model<Queue> {
@PrimaryKey
@AutoIncrement
@Column
id: number;
@AllowNull(false)
@Unique
@Column
name: string;
@AllowNull(false)
@Unique
@Column
color: string;
@Column
greetingMessage: string;
@CreatedAt
createdAt: Date;
@UpdatedAt
updatedAt: Date;
@BelongsToMany(() => Whatsapp, () => WhatsappQueue)
whatsapps: Array<Whatsapp & { WhatsappQueue: WhatsappQueue }>;
@BelongsToMany(() => User, () => UserQueue)
users: Array<User & { UserQueue: UserQueue }>;
}
export default Queue;
|
<filename>src/Utilities/ToggleVisibility.js
export function toggleListVisibility(listOfItems) {
document.getElementById('active-list').classList.remove('visible');
for(const task of listOfItems) {
if(task.set === 'active-list') {
document.getElementById('active-list').classList.add('visible');
}
}
}
|
#!/usr/bin/env bats
DOCKER_COMPOSE_FILE="${BATS_TEST_DIRNAME}/php-7.0_ini_short_open_tag_off.yml"
container() {
echo "$(docker-compose -f ${DOCKER_COMPOSE_FILE} ps php | grep php | awk '{ print $1 }')"
}
setup() {
docker-compose -f "${DOCKER_COMPOSE_FILE}" up -d
sleep 20
}
teardown() {
docker-compose -f "${DOCKER_COMPOSE_FILE}" kill
docker-compose -f "${DOCKER_COMPOSE_FILE}" rm --force
}
@test "php-7.0: ini: short_open_tag: off" {
run docker exec "$(container)" /bin/su - root -mc "cat /usr/local/src/phpfarm/inst/current/etc/conf.d/short_open_tag.ini | grep 'short_open_tag'"
[ "${status}" -eq 0 ]
[[ "${output}" == *"Off"* ]]
}
|
#!/bin/bash
# Prepare Coreutils for compilation:
./configure --prefix=/tools --enable-install-program=hostname
# The meaning of the configure options:
#--enable-install-program=hostname
# This enables the hostname binary to be built and installed – it is disabled by default but is required by the Perl test suite.
# Compile the package:
make
# Compilation is now complete. As discussed earlier, running the test suite is not mandatory for the temporary tools here in this chapter. To run the Coreutils test suite anyway, issue the following command:
make RUN_EXPENSIVE_TESTS=yes check
# The RUN_EXPENSIVE_TESTS=yes parameter tells the test suite to run several additional tests that are considered relatively expensive (in terms of CPU power and memory usage) on some platforms, but generally are not a problem on Linux.
# Install the package:
make install
|
<filename>modules/django-cms/chuck_module.py
depends = ['django-1.3', 'pil', 'html5lib']
description = """
Adds Django CMS to your project.
Please note that as of yet Django CMS is not compatible with Django 1.4 and therefore
needs the django-1.3 module.
Another requirement of Django CMS is django-mptt, but because of the CMS not
being compatible with the latest django-mptt version, it is not listed as a chuck module
requirement. django-mptt will be included as a normal pip requirement (requirements/requirements.txt)
with an explicit call for the latest version that is known to be working with Django CMS.
For further information, visit;
http://docs.django-cms.org
""" |
$LOAD_PATH.unshift(File.dirname(__FILE__))
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'quarantino'
require 'rspec'
require 'rspec/autorun'
require 'webmock/rspec'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[File.join(File.expand_path('../support', __FILE__), '**/*.rb')].each {|f| require f}
ENV['QUARANTINO_API_KEY'] = 'TEST-API-KEY'
RSpec.configure do |config|
config.color_enabled = true
end
|
./gradlew clean assembleRelease
echo "build successful"
rm ./app/src/main/assets/plugins/plugina.jar
rm ./app/src/main/assets/plugins/usercenter.jar
echo "delete last build apk successful"
cp ./plugina/build/outputs/apk/plugina-release.apk ./app/src/main/assets/plugins/plugina.jar
cp ./usercenter/build/outputs/apk/usercenter-release.apk ./app/src/main/assets/plugins/usercenter.jar
echo "all successful" |
source $stdenv/setup
dontPatchELF=1 # must keep libXv, $out in RPATH
unpackFile() {
skip=$(sed 's/^skip=//; t; d' $src)
tail -n +$skip $src | xz -d | tar xvf -
sourceRoot=.
}
buildPhase() {
if test -z "$libsOnly"; then
# Create the module.
echo "Building linux driver against kernel: $kernel";
cd kernel
kernelVersion=$(cd $kernel/lib/modules && ls)
sysSrc=$(echo $kernel/lib/modules/$kernelVersion/source)
sysOut=$(echo $kernel/lib/modules/$kernelVersion/build)
unset src # used by the nv makefile
make SYSSRC=$sysSrc SYSOUT=$sysOut module
# nvidia no longer provides uvm kernel module for 32-bit archs
# http://www.nvidia.com/download/driverResults.aspx/79722/en-us
if [[ "$system" = "x86_64-linux" ]]; then
cd uvm
make SYSSRC=$sysSrc SYSOUT=$sysOut module
cd ..
fi
cd ..
fi
}
installPhase() {
# Install libGL and friends.
mkdir -p "$out/lib/vendors"
cp -p nvidia.icd $out/lib/vendors/
cp -prd *.so.* tls "$out/lib/"
rm "$out"/lib/lib{glx,nvidia-wfb}.so.* # handled separately
if test -z "$libsOnly"; then
# Install the X drivers.
mkdir -p $out/lib/xorg/modules
cp -p libnvidia-wfb.* $out/lib/xorg/modules/
mkdir -p $out/lib/xorg/modules/drivers
cp -p nvidia_drv.so $out/lib/xorg/modules/drivers
mkdir -p $out/lib/xorg/modules/extensions
cp -p libglx.so.* $out/lib/xorg/modules/extensions
# Install the kernel module.
mkdir -p $out/lib/modules/$kernelVersion/misc
for i in $(find ./kernel -name '*.ko'); do
nuke-refs $i
cp $i $out/lib/modules/$kernelVersion/misc/
done
fi
# add backfix for vglrun and GLX version for recent nvidia driver
# the .0 version of GLX is missing for these
ln -s $out/lib/libGLX_nvidia.so* $out/lib/libGLX_nvidia.so.0
# All libs except GUI-only are in $out now, so fixup them.
for libname in `find "$out/lib/" -name '*.so.*'`
do
# I'm lazy to differentiate needed libs per-library, as the closure is the same.
# Unfortunately --shrink-rpath would strip too much.
patchelf --set-rpath "$out/lib:$allLibPath" "$libname"
libname_short=`echo -n "$libname" | sed 's/so\..*/so/'`
# nvidia's EGL stack seems to expect libGLESv2.so.2 to be available
if [ $(basename "$libname_short") == "libGLESv2.so" ]; then
ln -srnf "$libname" "$libname_short.2" || true
fi
ln -srnf "$libname" "$libname_short" || true
ln -srnf "$libname" "$libname_short.1" || true
done
#patchelf --set-rpath $out/lib:$glPath $out/lib/libGL.so.*.*
#patchelf --set-rpath $out/lib:$glPath $out/lib/libvdpau_nvidia.so.*.*
#patchelf --set-rpath $cudaPath $out/lib/libcuda.so.*.*
#patchelf --set-rpath $openclPath $out/lib/libnvidia-opencl.so.*.*
if test -z "$libsOnly"; then
# Install headers and /share files etc.
mkdir -p $out/include/nvidia
cp -p *.h $out/include/nvidia
mkdir -p $out/share/man/man1
cp -p *.1.gz $out/share/man/man1
rm $out/share/man/man1/nvidia-xconfig.1.gz
mkdir -p $out/share/applications
cp -p *.desktop $out/share/applications
mkdir -p $out/share/pixmaps
cp -p nvidia-settings.png $out/share/pixmaps
# Patch the `nvidia-settings.desktop' file.
substituteInPlace $out/share/applications/nvidia-settings.desktop \
--replace '__UTILS_PATH__' $out/bin \
--replace '__PIXMAP_PATH__' $out/share/pixmaps
# Install the programs.
mkdir -p $out/bin
for i in nvidia-settings nvidia-smi; do
cp $i $out/bin/$i
patchelf --interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" \
--set-rpath $out/lib:$programPath:$glPath $out/bin/$i
done
patchelf --set-rpath $glPath:$gtkPath $out/lib/libnvidia-gtk2.so.*.*
# Test a bit.
$out/bin/nvidia-settings --version
else
rm $out/lib/libnvidia-gtk2.* || true
fi
# For simplicity and dependency reduction, don't support the gtk3 interface.
rm $out/lib/libnvidia-gtk3.* || true
# we distribute these separately in `libvdpau`
rm "$out"/lib/libvdpau{.*,_trace.*} || true
}
genericBuild
|
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications.vgg16 import preprocess_input, decode_predictions
import numpy as np
def classify_image(model, img_path, top_n=5):
img = image.load_img(img_path, target_size=(224, 224))
img_array = image.img_to_array(img)
img_array = np.expand_dims(img_array, axis=0)
img_array = preprocess_input(img_array)
predictions = model.predict(img_array)
decoded_predictions = decode_predictions(predictions, top=top_n)[0]
top_classes = [(class_name, round(prob, 2)) for (_, class_name, prob) in decoded_predictions]
return top_classes |
package cn.stylefeng.guns.modular.business.controller;
import cn.stylefeng.guns.core.log.LogObjectHolder;
import cn.stylefeng.guns.modular.business.service.ICreditinfoService;
import cn.stylefeng.guns.modular.system.model.Creditinfo;
import cn.stylefeng.roses.core.base.controller.BaseController;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import java.util.HashMap;
import java.util.List;
/**
* 信用信息控制器
*
* @author fengshuonan
* @Date 2020-01-16 14:09:47
*/
@Controller
@RequestMapping("/top")
public class TopController extends BaseController {
private String PREFIX = "/business/top/";
@Autowired
private ICreditinfoService creditinfoService;
/**
* 跳转到信用信息首页
*/
@RequestMapping("")
public String index() {
return PREFIX + "index.html";
}
/**
* 获取信用信息列表
*/
@RequestMapping(value = "/list")
public String list(@RequestParam String condition,Model model) {
HashMap<String,Object> mapParam = new HashMap<String,Object>();
mapParam.put("condition",condition);
List<Creditinfo> creditinfos = creditinfoService.selectCustomerList(mapParam);
String displayFlg = "0";
if(creditinfos!=null && creditinfos.size()>0){
displayFlg = "1";
}
model.addAttribute("conValue",condition);
model.addAttribute("creditinfos",creditinfos);
model.addAttribute("displayFlg",displayFlg);
return PREFIX + "creditList.html";
}
/**
* 跳转到信用信息首页
*/
@RequestMapping("/list1")
public String list1() {
return PREFIX + "creditList.html";
}
/**
* 跳转到添加信用信息
*/
@RequestMapping("/detail")
public String creditinfoDetail(@RequestParam String condition,@RequestParam String id,Model model) {
Creditinfo creditinfo = creditinfoService.selectById(id);
model.addAttribute("conValue",condition);
model.addAttribute("creditinfo",creditinfo);
return PREFIX + "creditDetail.html";
}
}
|
<gh_stars>10-100
import os
from ssl import SSLError
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from compressor.storage import CompressorFileStorage
from configuration import aws
def get_fs(fs_type, *args):
def _get_fs(*args):
fs_lookup = {
'local': LocalDiskFS,
's3': SimpleStorageServiceFS,
}
return fs_lookup[fs_type](*args)
fs = None
tries = 4
for try_ in range(1, tries + 1):
try:
fs = _get_fs(*args)
break
except SSLError as e:
if try_ == tries:
raise e
return fs
class CanvasFileStorage(CompressorFileStorage):
def url(self, path):
return "//canvas-dynamic-assets.s3.amazonaws.com/static/" + path
class LocalDiskFS(object):
def __init__(self, root):
self.root = root
def save(self, filename, filedata):
filepath = os.path.join(self.root, filename)
output = file(filepath, 'wb')
try:
output.write(filedata)
finally:
output.close()
os.chmod(filepath, 0644) # Intentionally octal, world readable for nginx
def read(self, filename):
filepath = os.path.join(self.root, filename)
return file(filepath, 'rb').read()
class SimpleStorageServiceFS(object):
def __init__(self, bucket, prefix=""):
conn = S3Connection(*aws)
self.bucket = conn.get_bucket(bucket)
self.prefix = prefix
def read(self, filename):
key = Key(self.bucket)
key.key = self.prefix + filename
return key.get_contents_as_string()
def save(self, filename, filedata):
_, ext = os.path.splitext(filename)
content_type = ({
'.gif': 'image/gif',
'.png': 'image/png',
'.json': 'application/json',
'.gz': 'application/x-gzip',
}).get(ext.lower(), 'image/jpeg')
key = Key(self.bucket)
key.key = self.prefix + filename
key.set_contents_from_string(filedata, headers={'Content-Type': content_type})
|
<gh_stars>100-1000
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Copyright (c) Microsoft Corporation. All rights reserved.
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#include <tinyhal.h>
#ifndef _WINPCAP_ETH_LWIP_ADAPTER_H_
#define _WINPCAP_ETH_LWIP_ADAPTER_H_ 1
// 1 2 3
//01234567890123456789012345678901234567
//{00000000-0000-0000-0000-000000000000}
const size_t GuidStringLen = 39; //38 chars + 1 for terminating \0
struct WINPCAP_ETH_LWIP_DRIVER_CONFIG
{
char adapterGuid[GuidStringLen];
};
#ifndef NETWORK_INTERFACE_COUNT
#define NETWORK_INTERFACE_COUNT 1
#endif
struct WINPCAP_ETH_LWIP_DEVICE_CONFIG
{
WINPCAP_ETH_LWIP_DRIVER_CONFIG DeviceConfigs[ NETWORK_INTERFACE_COUNT ];
static LPCSTR GetDriverName( )
{
return "WINPCAP_ETH_LWIP";
}
};
struct WINPCAP_ETH_LWIP_Driver
{
static int Open( WINPCAP_ETH_LWIP_DRIVER_CONFIG* config, int index );
static BOOL Close( int index );
static BOOL Bind( void );
};
//
// _WINPCAP_ETH_LWIP_ADAPTER_H_
//////////////////////////////////////////////////////////////////////////////
#endif
|
<filename>commands/moderation/ban.js
const Util = require('../../util/MitUtil.js');
module.exports = {
name: 'ban',
description: 'Banea a un usuario especifico u.u',
aliases: ['bar', 'outlaw'],
usage: '<usuario>',
cooldown: 2,
args: -1,
catergory: 'Moderación',
async execute(client, message, args) {
try {
let BanMember = message.mentions.members.first() || message.guild.members.cache.get(args[0]);
const usericon = BanMember.user.displayAvatarURL({ format: 'png', dynamic: true, size: 1024 });
if (!BanMember) {
return message.reply("asegurate que es un usuario valido >.<");
}
if (!BanMember.bannable) {
return message.reply("no puedo banear a ese usuario por que tiene un rol mas alto que el mio o caresco de permisos >.<");
}
if (!message.member.hasPermission('BAN_MEMBERS')) {
return message.reply("careces de los permisos requeridos: **Banear miembros** u.u")
}
let reason = args.slice(1).join(' ');
if (!reason) reason = "No se ha dado una razón";
BanMember.ban({ reason: reason }).catch(error => message.channel.send(`Perdon ${message.author}, no pude banear al usuario u.u`));
return message.channel.send({
embed: {
title: "Usuario baneado",
description: `**${BanMember.user.tag}** ha sido baneado`,
fields: [{
name: '• Razón',
value: reason,
inline: false,
},
{
name: '• Admin responsable',
value: `${message.author}`,
inline: false,
}],
color: "RANDOM",
thumbnail: usericon,
timestamp: new Date()
}
});
} catch (err) {
console.log(err);
return message.reply(`asegurate de que estas utilizando bien el comando`);
}
}
};
|
package controller
import (
cniresources "github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/apis/k8s.cni.cncf.io/v1"
resources "github.com/tliron/knap/resources/knap.github.com/v1alpha1"
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func (self *Controller) processDaemonSets(network *resources.Network, networkAttachmentDefinition *cniresources.NetworkAttachmentDefinition) error {
if daemonSets, err := self.Kubernetes.AppsV1().DaemonSets(networkAttachmentDefinition.Namespace).List(self.Context, meta.ListOptions{}); err == nil {
for _, daemonSet := range daemonSets.Items {
object := &daemonSet.Spec.Template.ObjectMeta
if ObjectHasNetwork(object, network.Name) {
self.Log.Infof("processing daemon set %s/%s for network %q", daemonSet.Namespace, daemonSet.Name, network.Name)
if !ObjectHasNetworkAttachmentDefinition(object, networkAttachmentDefinition.Name) {
daemonSet_ := daemonSet.DeepCopy()
object = &daemonSet_.Spec.Template.ObjectMeta
AddNetworkAttachmentDefinitionToObject(object, networkAttachmentDefinition.Name)
if _, err := self.Kubernetes.AppsV1().DaemonSets(networkAttachmentDefinition.Namespace).Update(self.Context, daemonSet_, meta.UpdateOptions{}); err == nil {
self.Log.Infof("attached daemon set %s/%s to network attachment definition %q", daemonSet.Namespace, daemonSet.Name, networkAttachmentDefinition.Name)
} else {
return err
}
}
}
}
return nil
} else {
return err
}
}
|
#! /bin/bash
#SBATCH -o fftw_plan_019.txt
#SBATCH -J fftw_plan_019
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=28
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=02:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=28
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2015_12_27_error_plot_fd
cd ../../../
. local_software/env_vars.sh
mpiexec.hydra -genv OMP_NUM_THREADS 19 -envall -ppn 1 ./fftw_gen_wisdoms_all.sh 19 FFTW_WISDOM_nofreq_T19
|
//
// main.m
// test-runtime-3
//
// Created by Nat! on 04.03.15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#ifndef __MULLE_OBJC__
# define __MULLE_OBJC_NO_TPS__
# define __MULLE_OBJC_FCS__
#endif
#include <mulle-objc-runtime/mulle-objc-runtime.h>
#include <mulle-objc-runtime/mulle-objc-dotdump.h>
#include <stdio.h>
#define ___Object_classid MULLE_OBJC_CLASSID( 0x58e64dae)
#define ___conforms_to_protocol__methodid MULLE_OBJC_METHODID( 0x3d1e9472)
#define ___init__methodid MULLE_OBJC_INIT_METHODID
// x=B; echo "#define ___"$x"__protocolid MULLE_OBJC_PROTOCOLID( 0x"`./build/mulle-objc-uniqueid $x`")"
#define ___E__protocolid MULLE_OBJC_PROTOCOLID( 0x00bf080c)
#define ___D__protocolid MULLE_OBJC_PROTOCOLID( 0x10bf213c)
#define ___A__protocolid MULLE_OBJC_PROTOCOLID( 0x40bf6ccc)
#define ___C__protocolid MULLE_OBJC_PROTOCOLID( 0x60bf9f2c)
#define ___B__protocolid MULLE_OBJC_PROTOCOLID( 0x70bfb85c)
#define ___F__protocolid MULLE_OBJC_PROTOCOLID( 0x30bf539c)
#define ___G__protocolid MULLE_OBJC_PROTOCOLID( 0x20bf3a6c)
/* This example just checks that protocols work
@protocol A
@end
@protocol B < A >
@end
@protocol C
@end
@protocol D < B, C>
@end
@protocol E
@end
@protocol F
@end
@interface Object < D, E >
- (void *) init;
- (BOOL) conformsToProtocol:(Protocol *) aProtocol
@end
@implementation Object
- (void *) init
{
return( self);
}
@end
int main( int argc, const char * argv[])
{
Object *obj;
obj = [[Object alloc] init];
if( [obj conformsToProtocol:@protocol( A)])
printf( "A\n");
if( [obj conformsToProtocol:@protocol( B)])
printf( "B\n");
if( [obj conformsToProtocol:@protocol( C)])
printf( "C\n");
if( [obj conformsToProtocol:@protocol( D)])
printf( "D\n");
if( [obj conformsToProtocol:@protocol( E)])
printf( "E\n");
if( [obj conformsToProtocol:@protocol( F)])
printf( "F\n");
if( [obj conformsToProtocol:@protocol( G)])
printf( "G\n");
[obj release];
return 0;
}
*/
// @interface Object
struct Object;
static void *Object_init( struct Object *self, mulle_objc_methodid_t _cmd, void *_params)
{
return( self);
}
static int Object_conforms_to_protocol( struct Object *self, mulle_objc_methodid_t _cmd, void *_params)
{
mulle_objc_protocolid_t protocolid;
protocolid = ((struct { mulle_objc_protocolid_t protocolid; } *) _params)->protocolid;
return( _mulle_objc_infraclass_conformsto_protocolid( _mulle_objc_object_get_infraclass( (void *) self), protocolid));
}
static struct _mulle_objc_methodlist Object_class_methodlist;
//
// for a reason I have forgotten, I couldn't use this in the regular header
// with the empty array...
//
struct _gnu_mulle_objc_methodlist
{
unsigned int n_methods; // must be #0 and same as struct _mulle_objc_ivarlist
void *owner;
struct _mulle_objc_method methods[];
};
static struct _gnu_mulle_objc_methodlist Object_instance_methodlist =
{
2,
NULL,
{
{
{
___conforms_to_protocol__methodid,
"@:*i",
"conformsToProtocol:",
0
},
(mulle_objc_implementation_t) Object_conforms_to_protocol
},
{
{
___init__methodid,
"@:",
"init",
0
},
(mulle_objc_implementation_t) Object_init
},
}
};
// can't be enum, because must be void * to stay compatible with legacy
// runtimes
// enum is wrong if sizeof( int) ! sizeof( mulle_objc_protocolid_t)
struct _gnu_mulle_objc_protocollist
{
unsigned int n_protocols;
struct _mulle_objc_protocol protocols[];
};
static struct _gnu_mulle_objc_protocollist Object_protocollist =
{
5,
{
// keep sorted by protocolid
{ ___E__protocolid, "E" },
{ ___D__protocolid, "D" },
{ ___A__protocolid, "A" },
{ ___C__protocolid, "C" },
{ ___B__protocolid, "B" },
}
};
static struct _mulle_objc_loadclass Object_loadclass =
{
___Object_classid,
"Object",
0,
0,
NULL,
0,
-1,
4,
NULL,
&Object_class_methodlist,
(struct _mulle_objc_methodlist *) &Object_instance_methodlist,
NULL,
(struct _mulle_objc_protocollist *) &Object_protocollist
};
struct _mulle_objc_loadclasslist class_list =
{
1,
&Object_loadclass
};
#ifdef __MULLE_OBJC_NO_TPS__
# define TPS_BIT 0x4
#else
# define TPS_BIT 0
#endif
#ifdef __MULLE_OBJC_NO_FCS__
# define FCS_BIT 0x8
#else
# define FCS_BIT 0
#endif
#define UNIVERSE_ID 0x7c5f7f6b
#define UNIVERSE_NAME "toy universe"
static struct _mulle_objc_loaduniverse universe_info =
{
UNIVERSE_ID,
UNIVERSE_NAME
};
static struct _mulle_objc_loadinfo load_info =
{
{
MULLE_OBJC_RUNTIME_LOAD_VERSION,
MULLE_OBJC_RUNTIME_VERSION,
0,
0,
TPS_BIT | FCS_BIT
},
&universe_info,
&class_list
};
MULLE_C_CONSTRUCTOR( __load)
static void __load()
{
static int has_loaded;
fprintf( stderr, "--> __load\n");
// windows w/o mulle-clang
if( has_loaded)
return;
has_loaded = 1;
mulle_objc_loadinfo_enqueue_nofail( &load_info);
}
struct _mulle_objc_universe *
__register_mulle_objc_universe( mulle_objc_universeid_t universeid,
char *universename)
{
struct _mulle_objc_universe *universe;
universe = __mulle_objc_global_get_universe( universeid, universename);
if( ! _mulle_objc_universe_is_initialized( universe))
{
_mulle_objc_universe_bang( universe, 0, NULL, NULL);
universe->config.ignore_ivarhash_mismatch = 1;
}
fprintf( stderr, "__register_mulle_objc_universe done");
return( universe);
}
int main( int argc, const char * argv[])
{
struct _mulle_objc_infraclass *cls;
struct _mulle_objc_object *obj;
struct _mulle_objc_universe *universe;
// windows...
#if ! defined( __clang__) && ! defined( __GNUC__)
__load();
#endif
// obj = [[Object alloc] init];
cls = mulle_objc_global_lookup_infraclass_nofail( UNIVERSE_ID, ___Object_classid);
obj = mulle_objc_infraclass_alloc_instance( cls);
obj = mulle_objc_object_call( obj, ___init__methodid, NULL);
// if( [obj conformsToProtocol:@protocol( A)])
// printf( "A\n");
universe = mulle_objc_global_get_universe( UNIVERSE_ID);
mulle_objc_universe_dotdump_to_directory( universe, ".");
if( mulle_objc_object_call( obj, ___conforms_to_protocol__methodid, &(struct { mulle_objc_protocolid_t a; }){ .a = ___A__protocolid } ))
printf( "A\n");
if( mulle_objc_object_call( obj, ___conforms_to_protocol__methodid, &(struct { mulle_objc_protocolid_t a; }){ .a = ___B__protocolid } ))
printf( "B\n");
if( mulle_objc_object_call( obj, ___conforms_to_protocol__methodid, &(struct { mulle_objc_protocolid_t a; }){ .a = ___C__protocolid } ))
printf( "C\n");
if( mulle_objc_object_call( obj, ___conforms_to_protocol__methodid, &(struct { mulle_objc_protocolid_t a; }){ .a = ___D__protocolid } ))
printf( "D\n");
if( mulle_objc_object_call( obj, ___conforms_to_protocol__methodid, &(struct { mulle_objc_protocolid_t a; }){ .a = ___E__protocolid } ))
printf( "E\n");
if( mulle_objc_object_call( obj, ___conforms_to_protocol__methodid, &(struct { mulle_objc_protocolid_t a; }){ .a = ___F__protocolid } ))
printf( "F\n");
if( mulle_objc_object_call( obj, ___conforms_to_protocol__methodid, &(struct { mulle_objc_protocolid_t a; }){ .a = ___G__protocolid } ))
printf( "G\n");
// [obj print];
// [obj release];
mulle_objc_instance_free( obj);
_mulle_objc_universe_release( universe); // since its not default
return 0;
}
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.payapi
import java.time.LocalDateTime
import generators.AmlsReferenceNumberGenerator
import models.payapi.TaxTypes.`other`
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.play.PlaySpec
import play.api.libs.json.Json
class PaymentSpec extends PlaySpec with MockitoSugar with AmlsReferenceNumberGenerator {
val id = "biuh98huiu"
val ref = "ref"
val amountInPence = 100
val commissionInPence = 20
val totalInPence = 120
val name = "providerName"
val providerRef = "providerRef"
val now = LocalDateTime.now()
"Payment" must {
"serialise to JSON with no description" in {
Json.toJson(Payment(
id,
other,
ref,
None,
amountInPence,
PaymentStatuses.Successful
)) must be(Json.obj(
"id" -> id,
"taxType" -> "other",
"reference" -> ref,
"amountInPence" -> amountInPence,
"status" -> "Successful"
))
}
"serialise to JSON with a description" in {
Json.toJson(Payment(
id,
other,
ref,
Some("Desc"),
amountInPence,
PaymentStatuses.Successful
)) must be(Json.obj(
"id" -> id,
"taxType" -> "other",
"reference" -> ref,
"description" -> "Desc",
"amountInPence" -> amountInPence,
"status" -> "Successful"
))
}
}
}
|
<reponame>ZacharyBabbitt/office-ui-fabric-react
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
import * as Axe from 'axe-core';
import { CheckMessageTransformer } from './check-message-transformer';
import { CustomRuleConfigurations } from './custom-rule-configurations';
import { DocumentUtils } from './document-utils';
import { HelpUrlGetter } from './help-url-getter';
import { MessageDecorator } from './message-decorator';
import { ResultDecorator } from './result-decorator';
import { SarifConverter } from './sarif-converter';
import { SarifLog } from './sarif/sarifLog';
import { wcagLinkData } from './wcag';
import { rulesWCAGConfiguration } from './wcag-mappings';
export function axeToSarif(axeResults: Axe.AxeResults): SarifLog {
const messageDecorator = new MessageDecorator(CustomRuleConfigurations, new CheckMessageTransformer());
const helpUrlGetter = new HelpUrlGetter(CustomRuleConfigurations);
const resultDecorator = new ResultDecorator(new DocumentUtils(), messageDecorator, ruleId => helpUrlGetter.getHelpUrl(ruleId));
resultDecorator.setWCAGConfiguration(rulesWCAGConfiguration);
const sarifConverter = new SarifConverter(wcagLinkData);
// AxeResults -> ScannerResults
const scannerResults = resultDecorator.decorateResults(axeResults);
// ScannerResults -> ISarifLog
// TODO - ScannerOptions w/ scanName, testCaseId, scanId
return sarifConverter.convert(scannerResults, {});
}
|
@API.func("unsigned long long HPyLong_AsUnsignedLongLong(HPyContext *ctx, HPy h)",
error_value=API.cast("unsigned long long", -1))
def HPyLong_AsUnsignedLongLong(space, handles, ctx, h):
w_long = handles.deref(h)
if not space.isinstance_w(w_long, space.w_long):
return API.cast("unsigned long long", -1) # Return error value for non-long object
try:
val = space.r_long_w(w_long)
if val < 0:
return API.cast("unsigned long long", -1) # Return error value for negative value
return rffi.cast(rffi.ULONGLONG, val)
except OperationError:
return API.cast("unsigned long long", -1) # Return error value for conversion failure |
var express = require("express");
var req = require('request');
var async = require('async');
var bodyParser = require('body-parser');
var cookieParser = require('cookie-parser');
var ejs = require('ejs');
var csrf = require('csurf');
var app = express();
app.set('view engine', 'ejs');
// app.use(express.static(__dirname + '/views'));
app.use('/assets', express.static('static'));
app.use(bodyParser.urlencoded({
extended: false
}));
app.use(cookieParser());
app.use(csrf({ cookie: true }));
app.get('/', function(request, response) {
var opts = {};
if (request.query.apitoken && request.query.projectid && request.query.synthesisid && request.query.cteamid) {
var baseurl = 'https://www.geodesignhub.com/api/v1/projects/';
// var baseurl = 'http://local.test:8000/api/v1/projects/';
var apikey = request.query.apitoken;
var cred = "Token " + apikey;
var projectid = request.query.projectid;
var cteamid = request.query.cteamid;
var synthesisid = request.query.synthesisid;
var synprojectsurl = baseurl + projectid + '/cteams/' + cteamid + '/' + synthesisid + '/';
var timelineurl = baseurl + projectid + '/cteams/' + cteamid + '/' + synthesisid + '/timeline/';
var systemsurl = baseurl + projectid + '/systems/';
var boundsurl = baseurl + projectid + '/bounds/';
var boundaryurl = baseurl + projectid + '/boundaries/';
var syndiagramsurl = baseurl + projectid + '/cteams/' + cteamid + '/' + synthesisid + '/diagrams/';
var projecturl = baseurl + projectid + '/';
var URLS = [synprojectsurl, boundsurl, timelineurl, systemsurl, projecturl, syndiagramsurl, boundaryurl];
async.map(URLS, function(url, done) {
req({
url: url,
headers: {
"Authorization": cred,
"Content-Type": "application/json"
}
}, function(err, response, body) {
if (err || response.statusCode !== 200) {
return done(err || new Error());
}
return done(null, JSON.parse(body));
});
}, function(err, results) {
if (err) return response.sendStatus(500);
var sURls = [];
var systems = results[3];
for (x = 0; x < systems.length; x++) {
var curSys = systems[x];
var systemdetailurl = baseurl + projectid + '/systems/' + curSys['id'] + '/';
sURls.push(systemdetailurl);
}
async.map(sURls, function(url, done) {
req({
url: url,
headers: {
"Authorization": cred,
"Content-Type": "application/json"
}
}, function(err, response, body) {
if (err || response.statusCode !== 200) {
return done(err || new Error());
}
return done(null, JSON.parse(body));
});
}, function(err, sysdetails) {
if (err) return response.sendStatus(500);
var timeline = results[2]['timeline'];
opts = {
"csrfToken": request.csrfToken(),
"apitoken": request.query.apitoken,
"projectid": request.query.projectid,
"status": 1,
"design": JSON.stringify(results[0]),
"bounds": JSON.stringify(results[1]),
"systems": JSON.stringify(results[3]),
"timeline": JSON.stringify(timeline),
"projectdetails": JSON.stringify(results[4]),
"syndiagrams": JSON.stringify(results[5]),
"boundaries": JSON.stringify(results[6].geojson),
"systemdetail": JSON.stringify(sysdetails),
};
response.render('investmentanalysis', opts);
});
});
} else {
opts = { 'csrfToken': request.csrfToken(), 'boundaries': '0', 'systemdetail': '0', 'apitoken': '0', 'projectid': '0', 'cteamid': '0', "diagramdetail": '0', 'systems': '0', 'synthesisid': '0', "projectdetails": '0' };
response.render('investmentanalysis', opts);
}
});
app.listen(process.env.PORT || 5001); |
package json
import java.time.LocalDate
import java.util.UUID
import json.CarAdvertFormat._
import model.CarAdvert
import model.FuelTypes._
import org.scalatest.{FlatSpec, Matchers}
import play.api.libs.json.{JsSuccess, Json}
class CarAdvertFormatSpec extends FlatSpec with Matchers {
val uuid = UUID.fromString("75fb4ade-f5c7-4da3-b88a-4d6b7d8c42a8")
val advertUsed = CarAdvert(uuid, "advert1", GASOLINE, 1234, 5678, LocalDate.of(2016, 10, 11))
val jsonAdvertUsed = """{"id":"75fb4ade-f5c7-4da3-b88a-4d6b7d8c42a8","title":"advert1","fuel":"GASOLINE","price":1234,"isnew":false,"mileage":5678,"firstRegistration":"2016-10-11"}"""
val advertNew = CarAdvert(uuid, "advert1", GASOLINE, 1234)
val jsonAdvertNew = """{"id":"75fb4ade-f5c7-4da3-b88a-4d6b7d8c42a8","title":"advert1","fuel":"GASOLINE","price":1234,"isnew":true}"""
"CarAdvertFormat" should "convert object to json" in {
Json.toJson(advertUsed).toString should equal(jsonAdvertUsed)
}
it should "omit none members from json" in {
val uuid = UUID.fromString("75fb4ade-f5c7-4da3-b88a-4d6b7d8c42a8")
Json.toJson(advertNew).toString should equal(jsonAdvertNew)
}
it should "parse used car advert with all properties set" in {
val advert = Json.parse(jsonAdvertUsed).validate[CarAdvert]
advert should equal (JsSuccess(advertUsed))
}
it should "parse car advert json with not existing properties" in {
val advert = Json.parse(jsonAdvertNew).validate[CarAdvert]
advert should equal (JsSuccess(advertNew))
}
}
|
public class Student {
private String name;
private int age;
private double grade;
public Student() {
}
public Student(String name, int age, double grade) {
this.name = name;
this.age = age;
this.grade = grade;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public double getGrade() {
return grade;
}
public void setGrade(double grade) {
this.grade = grade;
}
} |
package com.atjl.dbservice.api.domain;
import com.atjl.dbservice.api.validator.DftRawDataDuplicateChecker;
import com.atjl.dbservice.api.RawDataDuplicateChecker;
import com.atjl.dbservice.api.RawDataValidator;
import com.atjl.dbservice.api.TgtDataNeedUpdateChecker;
import com.atjl.common.domain.KeyValue;
import com.atjl.dbservice.util.DataFieldUtil;
import com.atjl.util.character.StringCheckUtil;
import com.atjl.util.collection.CollectionSortUtil;
import com.atjl.util.collection.CollectionUtil;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ApiModel("数据表间拷贝配置")
public class DataCpConfig extends DataBaseConfig {
@ApiModelProperty(value = "是否清空目标表")
private boolean clearTgtTable = false;
@ApiModelProperty(value = "原始数据 初步校验 校验器,不可有spring依赖,只校验 是否为空,格式,长度等简单数据")
private RawDataValidator rawDataValidator;
@ApiModelProperty(value = "原始数据+目标数据 校验,不满足的不更新,原表数据注入 所有字段,目标表 数据注入 主键+普通字段+指定的附加字段")
private TgtDataNeedUpdateChecker tgtDataUpdateCheck;
@ApiModelProperty(value = "一个分页内的原始数据重复,保留哪条校验器")
private RawDataDuplicateChecker rawDataDuplicateCheck = new DftRawDataDuplicateChecker();
@ApiModelProperty(value = "原表-目标表 主键字段映射关系")
private Map<String, String> pkFieldMapping;
@ApiModelProperty(value = "原表-目标表 基础字段映射关系")
private Map<String, String> fieldMapping;
@ApiModelProperty(value = "原表-目标表 json字段映射关系")
private Map<String, String> jsonFieldMapping;
@ApiModelProperty(value = "目标表 需要预置 默认值字段")
private Map<String, String> defaultValues;
@ApiModelProperty(value = "原始表名")
private String rawTable;
@ApiModelProperty(value = "是否自定义查询")
private boolean customSelect = false;
@ApiModelProperty(value = "自定义查询sql select 前缀")
private String customSelectSqlPrefix;
@ApiModelProperty(value = "自定义查询sql select 后缀")
private String customSelectSqlSuffix;
@ApiModelProperty(value = "自定义查询sql count前缀")
private String customCountSqlPrefix;
@ApiModelProperty(value = "自定义查询sql count后缀")
private String customCountSqlSuffix;
@ApiModelProperty(value = "目标表,需要取的附加字段")
private List<String> tgtTableGetExtFields;
@ApiModelProperty(value = "目标表json字段名")
private String jsonField;
// @ApiModelProperty(value = "需要转换的字段")
// private Map<String, PropertyCovertor> covertors;
public List<String> getRawPkFieldList() {
return CollectionUtil.map2list(pkFieldMapping, true);
}
@ApiModelProperty(value = "主键域 字符串")
public String getTgtPkFields() {
String res;
List<String> pkField = CollectionUtil.map2list(pkFieldMapping, false);
res = DataFieldUtil.field2string(pkField);
res = this.getTgtTablePk() + "," + res;
return res;
}
@ApiModelProperty(value = "目标表查询的字段列表,主键域+需要校验 是否存在 的字符串")
public String getTgtPksAndUpdCheckColsFields() {
List<String> pkField = CollectionUtil.map2list(pkFieldMapping, false);
List<String> normalField = CollectionUtil.map2list(fieldMapping, false);
// List<String> noUpdChkFields = CollectionUtil.map2list(noUpdateCheckMapping, false);
if (!CollectionUtil.isEmpty(normalField)) {
pkField.addAll(normalField);
}
if (!CollectionUtil.isEmpty(tgtTableGetExtFields)) {
pkField.addAll(tgtTableGetExtFields);
}
pkField.add(getTgtTablePk());
return DataFieldUtil.field2string(pkField);
}
public Map.Entry<String, String> getPkFieldRandomOne() {
for (Map.Entry<String, String> entry : pkFieldMapping.entrySet()) {
return entry;
}
return null;
}
@ApiModelProperty(value = "目标表 所有字段,经过排序")
public List<String> getAllTgtSortFields() {
List<String> res = CollectionUtil.map2list(pkFieldMapping, false);
if (!CollectionUtil.isEmpty(fieldMapping)) {
List<String> field = CollectionUtil.map2list(fieldMapping, false);
res.addAll(field);
}
if (!CollectionUtil.isEmpty(defaultValues)) {
List<String> field = CollectionUtil.map2list(defaultValues, true);
res.addAll(field);
}
if (!StringCheckUtil.isEmpty(jsonField)) {
res.add(jsonField);
}
res = CollectionSortUtil.sort(res);
return res;
}
public List<KeyValue> getAllTgtSortKV() {
List<String> keys = getAllTgtSortFields();
List<KeyValue> res = new ArrayList<>();
for (String k : keys) {
res.add(new KeyValue(k));
}
return res;
}
@ApiModelProperty(value = "原始数据 fields")
public String getAllRawFieldsStr() {
String res = "";
List<String> pkField = CollectionUtil.map2list(pkFieldMapping, true);
res = DataFieldUtil.field2string(pkField);
if (!CollectionUtil.isEmpty(fieldMapping)) {
List<String> field = CollectionUtil.map2list(fieldMapping, true);
res = res + "," + DataFieldUtil.field2string(field);
}
if (!CollectionUtil.isEmpty(jsonFieldMapping)) {
List<String> jsonFields = CollectionUtil.map2list(jsonFieldMapping, true);
res += "," + DataFieldUtil.field2string(jsonFields);
}
// if (!CollectionUtil.isEmpty(noUpdateCheckMapping)) {
// List<String> updFields = CollectionUtil.map2list(noUpdateCheckMapping, true);
// res += "," + DataFieldUtil.field2string(updFields);
// }
return res;
}
public Map<String, String> getDefaultValues() {
return defaultValues;
}
public void setDefaultValues(Map<String, String> defaultValues) {
this.defaultValues = defaultValues;
}
// public Map<String, String> getNoUpdateCheckMapping() {
// return noUpdateCheckMapping;
// }
// public void setNoUpdateCheckMapping(Map<String, String> noUpdateCheckMapping) {
// this.noUpdateCheckMapping = noUpdateCheckMapping;
// }
public boolean isCustomSelect() {
return customSelect;
}
public void setCustomSelect(boolean customSelect) {
this.customSelect = customSelect;
}
public String getCustomSelectSqlPrefix() {
return customSelectSqlPrefix;
}
public void setCustomSelectSqlPrefix(String customSelectSqlPrefix) {
this.customSelectSqlPrefix = customSelectSqlPrefix;
}
public String getCustomSelectSqlSuffix() {
return customSelectSqlSuffix;
}
public void setCustomSelectSqlSuffix(String customSelectSqlSuffix) {
this.customSelectSqlSuffix = customSelectSqlSuffix;
}
public Map<String, String> getJsonFieldMapping() {
return jsonFieldMapping;
}
public void setJsonFieldMapping(Map<String, String> jsonFieldMapping) {
this.jsonFieldMapping = jsonFieldMapping;
}
public RawDataValidator getRawDataValidator() {
return rawDataValidator;
}
public void setRawDataValidator(RawDataValidator rawDataValidator) {
this.rawDataValidator = rawDataValidator;
}
public Map<String, String> getFieldMapping() {
return fieldMapping;
}
public void setFieldMapping(Map<String, String> fieldMapping) {
this.fieldMapping = fieldMapping;
}
public Map<String, String> getPkFieldMapping() {
return pkFieldMapping;
}
public void setPkFieldMapping(Map<String, String> pkFieldMapping) {
this.pkFieldMapping = pkFieldMapping;
}
public String getRawTable() {
return rawTable;
}
public void setRawTable(String rawTable) {
this.rawTable = rawTable;
}
public String getJsonField() {
return jsonField;
}
public void setJsonField(String jsonField) {
this.jsonField = jsonField;
}
public TgtDataNeedUpdateChecker getTgtDataUpdateCheck() {
return tgtDataUpdateCheck;
}
public void setTgtDataUpdateCheck(TgtDataNeedUpdateChecker tgtDataUpdateCheck) {
this.tgtDataUpdateCheck = tgtDataUpdateCheck;
}
public List<String> getTgtTableGetExtFields() {
return tgtTableGetExtFields;
}
public void setTgtTableGetExtFields(List<String> tgtTableGetExtFields) {
this.tgtTableGetExtFields = tgtTableGetExtFields;
}
public boolean isClearTgtTable() {
return clearTgtTable;
}
public RawDataDuplicateChecker getRawDataDuplicateCheck() {
return rawDataDuplicateCheck;
}
public String getCustomCountSqlPrefix() {
return customCountSqlPrefix;
}
public void setCustomCountSqlPrefix(String customCountSqlPrefix) {
this.customCountSqlPrefix = customCountSqlPrefix;
}
public String getCustomCountSqlSuffix() {
return customCountSqlSuffix;
}
public void setCustomCountSqlSuffix(String customCountSqlSuffix) {
this.customCountSqlSuffix = customCountSqlSuffix;
}
public void setRawDataDuplicateCheck(RawDataDuplicateChecker rawDataDuplicateCheck) {
this.rawDataDuplicateCheck = rawDataDuplicateCheck;
}
public void setClearTgtTable(boolean clearTgtTable) {
this.clearTgtTable = clearTgtTable;
}
}
|
#!/bin/bash
set -u
set -e
function usage() {
echo ""
echo "Usage:"
echo " $0 [tessera | tessera-remote | constellation] [--tesseraOptions \"options for Tessera start script\"] [--blockPeriod blockPeriod] [--verbosity verbosity]"
echo ""
echo "Where:"
echo " tessera | tessera-remote | constellation (default = tessera): specifies which privacy implementation to use"
echo " --tesseraOptions: allows additional options as documented in tessera-start.sh usage which is shown below:"
echo " --blockPeriod: raftblocktime default is 50 ms"
echo " --verbosity: verbosity for logging default is 3"
echo ""
echo "Note that this script will examine the file qdata/numberOfNodes to"
echo "determine how many nodes to start up. If the file doesn't exist"
echo "then 7 nodes will be assumed"
echo ""
./tessera-start.sh --help
exit -1
}
function performValidation() {
# Warn the user if chainId is same as Ethereum main net (see https://github.com/jpmorganchase/quorum/issues/487)
genesisFile=$1
NETWORK_ID=$(cat $genesisFile | tr -d '\r' | grep chainId | awk -F " " '{print $2}' | awk -F "," '{print $1}')
if [ $NETWORK_ID -eq 1 ]
then
echo " Quorum should not be run with a chainId of 1 (Ethereum mainnet)"
echo " please set the chainId in the $genesisFile to another value "
echo " 1337 is the recommend ChainId for Geth private clients."
fi
# Check that the correct geth executable is on the path
set +e
if [ "`which geth`" == "" ]; then
echo "ERROR: geth executable not found. Ensure that Quorum geth is on the path."
exit -1
else
GETH_VERSION=`geth version |grep -i "Quorum Version"`
if [ "$GETH_VERSION" == "" ]; then
echo "ERROR: you appear to be running with upstream geth. Ensure that Quorum geth is on the PATH (before any other geth version)."
exit -1
fi
echo " Found geth: \"$GETH_VERSION\""
fi
set -e
}
privacyImpl=tessera
tesseraOptions=
blockTime=50
verbosity=3
while (( "$#" )); do
case "$1" in
tessera)
privacyImpl=tessera
shift
;;
constellation)
privacyImpl=constellation
shift
;;
tessera-remote)
privacyImpl="tessera-remote"
shift
;;
--tesseraOptions)
tesseraOptions=$2
shift 2
;;
--blockPeriod)
blockTime=$2
shift 2
;;
--verbosity)
verbosity=$2
shift 2
;;
--help)
shift
usage
;;
*)
echo "Error: Unsupported command line parameter $1"
usage
;;
esac
done
# Perform any necessary validation
performValidation genesis.json
mkdir -p qdata/logs
numNodes=7
if [[ -f qdata/numberOfNodes ]]; then
numNodes=`cat qdata/numberOfNodes`
fi
if [ "$privacyImpl" == "tessera" ]; then
echo "[*] Starting Tessera nodes"
./tessera-start.sh ${tesseraOptions}
elif [ "$privacyImpl" == "constellation" ]; then
echo "[*] Starting Constellation nodes"
./constellation-start.sh
elif [ "$privacyImpl" == "tessera-remote" ]; then
echo "[*] Starting tessera nodes"
./tessera-start-remote.sh ${tesseraOptions}
else
echo "Unsupported privacy implementation: ${privacyImpl}"
usage
fi
echo "[*] Starting $numNodes Ethereum nodes with ChainID and NetworkId of $NETWORK_ID"
QUORUM_GETH_ARGS=${QUORUM_GETH_ARGS:-}
set -v
#check geth version and if it is below 1.9 then dont include allowSecureUnlock
allowSecureUnlock=
chk=`geth help | grep "allow-insecure-unlock" | wc -l`
if (( $chk == 1 )); then
allowSecureUnlock="--allow-insecure-unlock"
fi
ARGS="--nodiscover --nousb ${allowSecureUnlock} --verbosity ${verbosity} --networkid $NETWORK_ID --raft --raftblocktime ${blockTime} --rpc --rpccorsdomain=* --rpcvhosts=* --rpcaddr 0.0.0.0 --rpcapi admin,eth,debug,miner,net,shh,txpool,personal,web3,quorum,raft,quorumPermission,quorumExtension --ws --wsaddr=localhost --wsorigins=* --wsapi admin,eth,debug,miner,net,shh,txpool,personal,web3,quorum,raft,quorumPermission,quorumExtension --emitcheckpoints --unlock 0 --password passwords.txt $QUORUM_GETH_ARGS"
basePort=21000
baseRpcPort=22000
baseWsPort=23000
baseRaftPort=50401
for i in `seq 1 ${numNodes}`
do
port=$(($basePort + ${i} - 1))
rpcPort=$(($baseRpcPort + ${i} - 1))
wsPort=$(($baseWsPort + ${i} - 1))
raftPort=$(($baseRaftPort + ${i} - 1))
permissioned=
if [[ $i -le 4 ]]; then
permissioned="--permissioned"
elif ! [[ -z "${STARTPERMISSION+x}" ]] ; then
permissioned="--permissioned"
fi
PRIVATE_CONFIG=qdata/c${i}/tm.ipc nohup geth --datadir qdata/dd${i} ${ARGS} ${permissioned} --raftport ${raftPort} --rpcport ${rpcPort} --wsport ${wsPort} --port ${port} 2>>qdata/logs/${i}.log &
done
set +v
echo
echo "All nodes configured. See 'qdata/logs' for logs, and run e.g. 'geth attach qdata/dd1/geth.ipc' to attach to the first Geth node."
echo "To test sending a private transaction from Node 1 to Node 7, run './runscript.sh private-contract.js'"
exit 0
|
#!/bin/bash
# CLONE PHASE
git clone https://github.com/MyGUI/mygui mygui
pushd mygui
git checkout -f 8a05127d
git submodule update --init --recursive
popd
# BUILD PHASE
pushd "mygui"
mkdir -p build
cd build
cmake \
-DCMAKE_INSTALL_PREFIX="$pfx" \
-DMYGUI_RENDERSYSTEM=1 \
-DMYGUI_BUILD_DEMOS=OFF \
-DMYGUI_BUILD_TOOLS=OFF \
-DMYGUI_BUILD_PLUGINS=OFF \
..
make -j "$(nproc)"
make install
popd
|
export * from './Account';
export * from './Address';
export * from './AddressInput';
export * from './Balance';
export * from './Blockie';
export * from './EtherInput';
export * from './Faucet';
export * from './GasGauge';
export * from './PunkBlockie';
export * from './Wallet';
export * from './generic-contract';
|
ffmpeg -i test.mp4 -codec:v libx264 -codec:a mp3 -map 0 -f ssegment -segment_format mpegts -segment_list playlist.m3u8 -segment_time 10 out%d.ts
|
/*
* Copyright (C) 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gilecode.yagson.refs.impl;
import com.gilecode.yagson.refs.ReferencePlaceholder;
import java.lang.reflect.Field;
/**
* A special kind of the reference placeholders, used to return references to other
* fields of the object being read, i.e. '@.field' references.
*
* @author <NAME>
*/
class FieldReferencePlaceholder<T> extends ReferencePlaceholder<T> {
/**
* The (serialization) field name, is known at creation time.
*/
private final String referencedFieldName;
/**
* The field name, resolved to the actual field during the placeholder processing.
*/
private Field referencedField;
FieldReferencePlaceholder(String referencedFieldName) {
this.referencedFieldName = referencedFieldName;
}
String getReferencedFieldName() {
return referencedFieldName;
}
Field getReferencedField() {
return referencedField;
}
void setReferencedField(Field referencedField) {
this.referencedField = referencedField;
}
boolean isResolved() {
return referencedField != null;
}
@Override
public String toString() {
return "FieldReferencePlaceholder{" +
"referencedFieldName='" + referencedFieldName + '\'' +
"}";
}
}
|
# -*- coding: utf-8 -*-
import logging
import os
import re
import requests
from tqdm import tqdm
from lp2jira.config import config, lp
from lp2jira.export import Export
from lp2jira.issue import Issue
from lp2jira.utils import bug_template, json_dump, translate_blueprint_status, clean_id
class Blueprint(Issue):
issue_type = config['mapping']['blueprint_type']
@classmethod
def create(cls, spec):
project = lp.projects[config['launchpad']['project']]
status = translate_blueprint_status(spec)
description = f'{spec.summary}\n\n{spec.whiteboard}\n\n{spec.workitems_text}'
custom_fields = Issue.create_custom_fields(spec)
# TODO: issue type can't be hardcoded
return cls(issue_id=spec.name, status=status, owner=clean_id(spec.owner_link), title=spec.title,
desc=description, priority=spec.priority,
created=spec.date_created.isoformat(), tags=[],
assignee=spec.assignee, custom_fields=custom_fields, affected_versions=[])
def export(self):
self._export_related_users()
filename = self.filename(self.issue_id)
if self.exists(filename):
logging.debug(f'Blueprint {self.issue_id} already exists, skipping: "{filename}"')
return True
export_bug = bug_template()
export_bug['projects'][0]['issues'] = [self._dump()]
export_bug['links'] = []
with open(filename, 'w') as f:
json_dump(export_bug, f)
logging.debug(f'Blueprint {self.issue_id} export success')
return True
class ExportBlueprint(Export):
def __init__(self):
super().__init__(entity=Blueprint)
class ExportBlueprints(ExportBlueprint):
def run(self):
logging.info('===== Export: Blueprints =====')
project = lp.projects[config['launchpad']['project']]
specs = project.all_specifications
failed_specs = []
counter = 0
for index, spec in enumerate(tqdm(specs, desc='Export blueprints')):
if super().run(spec):
counter += 1
else:
failed_specs.append(f'index: {index}, name: {spec.name}')
logging.info(f'Exported blueprints: {counter}/{len(specs)}')
if failed_specs:
fail_log = '\n'.join(failed_specs)
logging.info(f'Failed blueprints:\n{fail_log}')
|
<filename>Sensor-Code-2018/Redundant Code/opencvprogram.py
import cv2
camera = cv2.VideoCapture("/dev/stdin")
while 1:
(grabbed, frame) = camera.read()
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
|
package de.unistuttgart.ims.coref.annotator.uima;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.GZIPInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.uima.UIMAException;
import org.apache.uima.cas.impl.XmiCasDeserializer;
import org.apache.uima.fit.factory.JCasFactory;
import org.apache.uima.jcas.JCas;
import org.xml.sax.SAXException;
public class UimaUtil {
public static JCas readJCas(String filename)
throws UIMAException, FileNotFoundException, SAXException, IOException {
if (filename.endsWith(".gz")) {
try (InputStream is = new GZIPInputStream(new FileInputStream(filename))) {
return readJCas(is);
}
} else if (filename.endsWith(".zip")) {
try (InputStream is = new ZipArchiveInputStream(new FileInputStream(filename))) {
return readJCas(is);
}
} else {
try (InputStream is = new FileInputStream(filename)) {
return readJCas(is);
}
}
}
public static JCas readJCas(InputStream is) throws UIMAException, SAXException, IOException {
JCas jcas = JCasFactory.createJCas();
XmiCasDeserializer.deserialize(is, jcas.getCas(), true);
return jcas;
}
}
|
<gh_stars>10-100
/**
* @file Mat.h
* @brief C++11 class template for Matrices, ready for Matrix multiplication,
* and the taking of the transpose
* @author <NAME>, <<EMAIL>>
* @date 20170727
* */
#ifndef __MAT_H__
#define __MAT_H__
#include <array> // std::array
#include <vector> // std::vector
#include <numeric> // std::inner_product
template<typename Type>
class Mat
{
public :
/////////////////////////////////////////////
// Constructors - initializers of the class
/////////////////////////////////////////////
Mat(std::array<unsigned int,2> Size_Dims)
: Size_Dims_(Size_Dims)
{
// set definitively the size dimensions of our Rows, Columns, and Entries
Rows_.resize(Size_Dims[0]);
for (int i=0; i < Size_Dims[0]; i++) {
Rows_[i].resize(Size_Dims[1]);
}
Columns_.resize(Size_Dims[1]);
for (int j=0; j < Size_Dims[1]; j++) {
Columns_[j].resize(Size_Dims[0]);
}
const int number_of_entries = Size_Dims[0]*Size_Dims[1];
Entries_.resize(number_of_entries);
} // END of constructor
Mat(unsigned int first_dim, unsigned int second_dim)
: Size_Dims_({first_dim,second_dim}) {
// set definitively the size dimensions of our Rows, Columns, and Entries
Rows_.resize(first_dim);
for (int i=0; i < first_dim; i++) {
Rows_[i].resize(second_dim);
}
Columns_.resize(second_dim);
for (int j=0; j < second_dim; j++) {
Columns_[j].resize(first_dim);
}
const int number_of_entries = first_dim*second_dim;
Entries_.resize(number_of_entries);
} // END of constructor
Mat(std::array<unsigned int,2> Size_Dims,
std::vector<Type> & Entries)
: Size_Dims_(Size_Dims), Entries_(Entries) {
const int M = Size_Dims[0]; // M, "number of rows"
const int P = Size_Dims[1]; // P, "number of columns"
// set definitively the size dimensions of our Rows, Columns
Rows_.resize(M);
Columns_.resize(P);
// fill up Rows,Columns from Entries, assumed in "row-major ordering"
for (int i=0; i<M; i++) {
for (int j=0; j<P;j++) {
// idx_global is the index of the entry, if the matrix was
// "laid out flat" as a 1-dim. array (i.e. vector)
const int idx_global = j+P*i;
Type entry_input = Entries[idx_global];
Rows_[i].push_back( entry_input );
Columns_[j].push_back( entry_input);
}
}
} // END of constructor
/**
* @fn Mat = Mat(unsigned int first_dim, unsigned int second_dim,
* std::vector<Type> & Entries)
* @brief Constructor (Initializer) for the Mat class template; make Matrices with this
* @param Type - class template parameter; keep this in mind to set the entries to be same type
* @param unsigned int first_dim - first size dimension of the matrix
* @param unsigned int second_dim - second size dimension of the matrix
* @param std::vector<Type> second_dim - entries of the matrix as a 1-dim. std::vector, assuming row-major ordering
*
* */
Mat(unsigned int first_dim, unsigned int second_dim,
std::vector<Type> & Entries)
: Size_Dims_({first_dim,second_dim}), Entries_(Entries) {
const int M = first_dim; // M, "number of rows"
const int P = second_dim; // P, "number of columns"
// set definitively the size dimensions of our Rows, Columns
Rows_.resize(M);
Columns_.resize(P);
// fill up Rows,Columns from Entries, assumed in "row-major ordering"
for (int i=0; i<M; i++) {
for (int j=0; j<P;j++) {
// idx_global is the index of the entry, if the matrix was
// "laid out flat" as a 1-dim. array (i.e. vector)
const int idx_global = j+P*i;
Type entry_input = Entries[idx_global];
Rows_[i].push_back( entry_input );
Columns_[j].push_back( entry_input);
}
}
} // END of constructor
// following 2 constructors are if we're given a
// vector of vectors for the rows
Mat(std::array<unsigned int,2> Size_Dims,
std::vector<std::vector<Type>> & Rows)
: Size_Dims_(Size_Dims), Rows_(Rows) {
const int M = Size_Dims[0]; // M, "number of rows"
const int P = Size_Dims[1]; // P, "number of columns"
// set definitively the size dimensions of our Rows, Columns
Columns_.resize(P);
Entries_.resize(M*P);
// fill up Entries, assuming "row-major ordering" for entries
for (auto row : Rows_) {
for (auto entry : row) {
Entries_.push_back( entry) ; }
}
// fill up Columns from Rows, assumed in "row-major ordering"
for (int i=0; i<M; i++) {
for (int j=0; j<P;j++) {
Type entry_input = (Rows[i])[j];
Columns_[j].push_back( entry_input);
}
}
} // END of constructor
Mat(unsigned int first_dim, unsigned int second_dim,
std::vector<std::vector<Type>> & Rows)
: Size_Dims_({first_dim,second_dim}), Rows_(Rows) {
const int M = first_dim; // M, "number of rows"
const int P = second_dim; // P, "number of columns"
// set definitively the size dimensions of our Rows, Columns
Columns_.resize(P);
Entries_.resize(M*P);
// fill up Entries, assuming "row-major ordering" for entries
for (auto row : Rows_) {
for (auto entry : row) {
Entries_.push_back( entry) ; }
}
// fill up Columns from Rows, assumed in "row-major ordering"
for (int i=0; i<M; i++) {
for (int j=0; j<P;j++) {
Type entry_input = (Rows[i])[j];
Columns_[j].push_back( entry_input);
}
}
} // END of constructor
// getter functions
// get size dimensions of the matrix
std::array<unsigned int,2> get_size_dims() {
return Size_Dims_;
}
unsigned int get_first_dim() {
return this->Size_Dims_[0];
}
unsigned int get_second_dim() {
return Size_Dims_[1];
}
// get the (i,j)th entry, i.e. A_{ij}
Type get_entry(const int i, const int j) {
return (Rows_[i])[j];
}
// get the ith row, i.e. A_{i*}
std::vector<Type> get_row(const int i) {
return Rows_[i] ;
}
// get the jth column, i.e. A_{*j}
std::vector<Type> get_column(const int j) {
return Columns_[j] ;
}
// get all the columns of A
std::vector<std::vector<Type>> get_all_columns() {
return Columns_ ;
}
// pretty print functions
void print_all() {
for (auto row : Rows_) {
for (auto entry : row) {
std::cout << entry << " "; }
std::cout << std::endl;
}
}
//////////////////////////
// Matrix Multiplication
//////////////////////////
Mat<Type> operator*(const Mat<Type>& rhs) {
// get the size dimensions of the matrices
// for this M x P matrix (those are its size dimensions)
unsigned int M = Size_Dims_[0];
unsigned int P = Size_Dims_[1];
unsigned int rhs_number_of_rows = rhs.Size_Dims_[0];
// N is the number of columns of argument rhs, i.e. 2nd size dim. of the 2nd. matrix to multiply together, with
unsigned int N = rhs.Size_Dims_[1];
// initialize the rows of resulting matrix C, from this information
std::vector<std::vector<Type>> C_Rows;
// Actual Matrix Multiplication
auto B_Columns = rhs.Columns_;
for (int i = 0; i < M; i++) {
std::vector<Type> C_Row; // a row of the resulting matrix C
auto A_i = Rows_[i]; // A_{i*}
for (auto col : B_Columns) {
// computing A_{ik} * (B^T)_{jk}
Type product = std::inner_product( A_i.begin(),
A_i.end(),
col.begin(),
static_cast<Type>(0) );
C_Row.push_back(product);
}
C_Rows.push_back(C_Row);
}
// initialize the final matrix C
Mat<Type> C(M,N,C_Rows);
return C;
} // END of Matrix Multiplication
//////////////
// Transpose
//////////////
// T for taking the transpose
Mat<Type> T() {
// reverse the size dimensions
unsigned int first_dim = Size_Dims_[1];
unsigned int second_dim = Size_Dims_[0];
auto new_Rows = Columns_;
Mat<Type> Atranspose(first_dim,second_dim,new_Rows);
return Atranspose;
}
// destructors - they tell how to end or terminate the class
~Mat() {};
private :
// size dimensions of the matrix, MxP
// Size_Dims is (M,P) or number of rows x number of columns;
std::array<unsigned int,2> Size_Dims_;
// vectors of vectors for rows and columns of the Matrix
// A_{i*}, i =1,2,...M or Size_Dims[0]
std::vector<std::vector<Type>> Rows_;
// A_{*j}, j=1,2,...P or Size_Dims[1]
std::vector<std::vector<Type>> Columns_;
// vector of the entries of the Matrix, in ROW-MAJOR ORDERING
std::vector<Type> Entries_;
};
#endif // __MAT_H__
|
<filename>examples/parallel_for/polygon_overlay/polyover.cpp
/*
Copyright (c) 2005-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Polygon overlay
//
#include <cstdlib>
#include <cstring>
#include <cassert>
#include <iostream>
#include <algorithm>
#include "oneapi/tbb/tick_count.h"
#include "oneapi/tbb/blocked_range.h"
#include "oneapi/tbb/parallel_for.h"
#include "oneapi/tbb/spin_mutex.h"
#include "oneapi/tbb/global_control.h"
#include "common/utility/get_default_num_threads.hpp"
#include "polyover.hpp"
#include "polymain.hpp"
#include "pover_video.hpp"
/*!
* @brief intersects a polygon with a map, adding any results to output map
*
* @param[out] resultMap output map (must be allocated)
* @param[in] polygon to be intersected
* @param[in] map intersected against
* @param[in] lock to use when adding output polygons to result map
*
*/
void OverlayOnePolygonWithMap(Polygon_map_t *resultMap,
RPolygon *myPoly,
Polygon_map_t *map2,
oneapi::tbb::spin_mutex *rMutex) {
int r1, g1, b1, r2, g2, b2;
int myr = 0;
int myg = 0;
int myb = 0;
int p1Area = myPoly->area();
for (unsigned int j = 1; (j < map2->size()) && (p1Area > 0); j++) {
RPolygon *p2 = &((*map2)[j]);
RPolygon *pnew;
int newxMin, newxMax, newyMin, newyMax;
myPoly->getColor(&r1, &g1, &b1);
if (PolygonsOverlap(myPoly, p2, newxMin, newyMin, newxMax, newyMax)) {
p2->getColor(&r2, &g2, &b2);
myr = r1 + r2;
myg = g1 + g2;
myb = b1 + b2;
p1Area -= (newxMax - newxMin + 1) * (newyMax - newyMin + 1);
if (rMutex) {
oneapi::tbb::spin_mutex::scoped_lock lock(*rMutex);
resultMap->push_back(RPolygon(newxMin, newyMin, newxMax, newyMax, myr, myg, myb));
}
else {
resultMap->push_back(RPolygon(newxMin, newyMin, newxMax, newyMax, myr, myg, myb));
}
}
}
}
/*!
* @brief Serial version of polygon overlay
* @param[out] output map
* @param[in] first map (map that individual polygons are taken from)
* @param[in] second map (map passed to OverlayOnePolygonWithMap)
*/
void SerialOverlayMaps(Polygon_map_t **resultMap, Polygon_map_t *map1, Polygon_map_t *map2) {
std::cout << "SerialOverlayMaps called"
<< "\n";
*resultMap = new Polygon_map_t;
RPolygon *p0 = &((*map1)[0]);
int mapxSize, mapySize, ignore1, ignore2;
p0->get(&ignore1, &ignore2, &mapxSize, &mapySize);
(*resultMap)->reserve(mapxSize * mapySize); // can't be any bigger than this
// push the map size as the first polygon,
(*resultMap)->push_back(RPolygon(0, 0, mapxSize, mapySize));
for (unsigned int i = 1; i < map1->size(); i++) {
RPolygon *p1 = &((*map1)[i]);
OverlayOnePolygonWithMap(*resultMap, p1, map2, nullptr);
}
}
/*!
* @class ApplyOverlay
* @brief Simple version of parallel overlay (make parallel on polygons in map1)
*/
class ApplyOverlay {
Polygon_map_t *m_map1, *m_map2, *m_resultMap;
oneapi::tbb::spin_mutex *m_rMutex;
public:
/*!
* @brief functor to apply
* @param[in] r range of polygons to intersect from map1
*/
void operator()(const oneapi::tbb::blocked_range<int> &r) const {
PRINT_DEBUG("From " << r.begin() << " to " << r.end());
for (int i = r.begin(); i != r.end(); i++) {
RPolygon *myPoly = &((*m_map1)[i]);
OverlayOnePolygonWithMap(m_resultMap, myPoly, m_map2, m_rMutex);
}
}
ApplyOverlay(Polygon_map_t *resultMap,
Polygon_map_t *map1,
Polygon_map_t *map2,
oneapi::tbb::spin_mutex *rmutex)
: m_resultMap(resultMap),
m_map1(map1),
m_map2(map2),
m_rMutex(rmutex) {}
};
/*!
* @brief apply the parallel algorithm
* @param[out] result_map generated map
* @param[in] polymap1 first map to be applied (algorithm is parallel on this map)
* @param[in] polymap2 second map.
*/
void NaiveParallelOverlay(Polygon_map_t *&result_map,
Polygon_map_t &polymap1,
Polygon_map_t &polymap2) {
// -----------------------------------
bool automatic_threadcount = false;
if (gThreadsLow == THREADS_UNSET || gThreadsLow == utility::get_default_num_threads()) {
gThreadsLow = gThreadsHigh = utility::get_default_num_threads();
automatic_threadcount = true;
}
result_map = new Polygon_map_t;
RPolygon *p0 = &(polymap1[0]);
int mapxSize, mapySize, ignore1, ignore2;
p0->get(&ignore1, &ignore2, &mapxSize, &mapySize);
result_map->reserve(mapxSize * mapySize); // can't be any bigger than this
// push the map size as the first polygon,
oneapi::tbb::spin_mutex *resultMutex = new oneapi::tbb::spin_mutex();
int grain_size = gGrainSize;
for (int nthreads = gThreadsLow; nthreads <= gThreadsHigh; nthreads++) {
oneapi::tbb::global_control c(oneapi::tbb::global_control::max_allowed_parallelism,
nthreads);
if (gIsGraphicalVersion) {
RPolygon *xp =
new RPolygon(0, 0, gMapXSize - 1, gMapYSize - 1, 0, 0, 0); // Clear the output space
delete xp;
}
// put size polygon in result map
result_map->push_back(RPolygon(0, 0, mapxSize, mapySize));
oneapi::tbb::tick_count t0 = oneapi::tbb::tick_count::now();
oneapi::tbb::parallel_for(
oneapi::tbb::blocked_range<int>(1, (int)(polymap1.size()), grain_size),
ApplyOverlay(result_map, &polymap1, &polymap2, resultMutex));
oneapi::tbb::tick_count t1 = oneapi::tbb::tick_count::now();
double naiveParallelTime = (t1 - t0).seconds() * 1000;
std::cout << "Naive parallel with spin lock and ";
if (automatic_threadcount)
std::cout << "automatic";
else
std::cout << nthreads;
std::cout << ((nthreads == 1) ? " thread" : " threads");
std::cout << " took " << naiveParallelTime << " msec : speedup over serial "
<< (gSerialTime / naiveParallelTime) << "\n";
if (gCsvFile.is_open()) {
gCsvFile << "," << naiveParallelTime;
}
#if _DEBUG
CheckPolygonMap(result_map);
ComparePolygonMaps(result_map, gResultMap);
#endif
result_map->clear();
}
delete resultMutex;
if (gCsvFile.is_open()) {
gCsvFile << "\n";
}
// -----------------------------------
}
template <typename T>
void split_at(Flagged_map_t &in_map,
Flagged_map_t &left_out,
Flagged_map_t &right_out,
const T median) {
left_out.reserve(in_map.size());
right_out.reserve(in_map.size());
for (Flagged_map_t::iterator i = in_map.begin(); i != in_map.end(); ++i) {
RPolygon *p = i->p();
if (p->xmax() < median) {
// in left map
left_out.push_back(*i);
}
else if (p->xmin() >= median) {
right_out.push_back(*i);
// in right map
}
else {
// in both maps.
left_out.push_back(*i);
right_out.push_back(RPolygon_flagged(p, true));
}
}
}
// range that splits the maps as well as the range. the flagged_map_t are
// vectors of pointers, and each range owns its maps (has to free them on destruction.)
template <typename T>
class blocked_range_with_maps {
typedef oneapi::tbb::blocked_range<T> my_range_type;
private:
my_range_type my_range;
Flagged_map_t my_map1;
Flagged_map_t my_map2;
public:
blocked_range_with_maps(T begin,
T end,
typename my_range_type::size_type my_grainsize,
Polygon_map_t *p1,
Polygon_map_t *p2)
: my_range(begin, end, my_grainsize) {
my_map1.reserve(p1->size());
my_map2.reserve(p2->size());
for (int i = 1; i < p1->size(); ++i) {
my_map1.push_back(RPolygon_flagged(&((*p1)[i]), false));
}
for (int i = 1; i < p2->size(); ++i) {
my_map2.push_back(RPolygon_flagged(&(p2->at(i)), false));
}
}
// copy-constructor required for deep copy of flagged maps. One copy is done at the start of the
// parallel for.
blocked_range_with_maps(const blocked_range_with_maps &other)
: my_range(other.my_range),
my_map1(other.my_map1),
my_map2(other.my_map2) {}
bool empty() const {
return my_range.empty();
}
bool is_divisible() const {
return my_range.is_divisible();
}
#if _DEBUG
void check_my_map() {
assert(my_range.begin() <= my_range.end());
for (Flagged_map_t::iterator i = my_map1.begin(); i != my_map1.end(); ++i) {
RPolygon *rp = i->p();
assert(rp->xmax() >= my_range.begin());
assert(rp->xmin() < my_range.end());
}
for (Flagged_map_t::iterator i = my_map2.begin(); i != my_map2.end(); ++i) {
RPolygon *rp = i->p();
assert(rp->xmax() >= my_range.begin());
assert(rp->xmin() < my_range.end());
}
}
void dump_map(Flagged_map_t &mapx) {
std::cout << " ** MAP **\n";
for (Flagged_map_t::iterator i = mapx.begin(); i != mapx.end(); ++i) {
std::cout << *(i->p());
if (i->isDuplicate()) {
std::cout << " -- is_duplicate";
}
std::cout << "\n";
}
std::cout << "\n";
}
#endif
blocked_range_with_maps(blocked_range_with_maps &lhs_r, oneapi::tbb::split)
: my_range(my_range_type(lhs_r.my_range, oneapi::tbb::split())) {
// lhs_r.my_range makes my_range from [median, high) and rhs_r.my_range from [low, median)
Flagged_map_t original_map1 = lhs_r.my_map1;
Flagged_map_t original_map2 = lhs_r.my_map2;
lhs_r.my_map1.clear();
lhs_r.my_map2.clear();
split_at(original_map1, lhs_r.my_map1, my_map1, my_range.begin());
split_at(original_map2, lhs_r.my_map2, my_map2, my_range.begin());
#if _DEBUG
this->check_my_map();
lhs_r.check_my_map();
#endif
}
const my_range_type &range() const {
return my_range;
}
Flagged_map_t &map1() {
return my_map1;
}
Flagged_map_t &map2() {
return my_map2;
}
};
/*!
* @class ApplySplitOverlay
* @brief parallel by columnar strip
*/
class ApplySplitOverlay {
Polygon_map_t *m_map1, *m_map2, *m_resultMap;
oneapi::tbb::spin_mutex *m_rMutex;
public:
/*!
* @brief functor for columnar parallel version
* @param[in] r range of map to be operated on
*/
void operator()(/*const*/ blocked_range_with_maps<int> &r) const {
#ifdef _DEBUG
// if we are debugging, serialize the method. That way we can
// see what is happening in each strip without the interleaving
// confusing things.
oneapi::tbb::spin_mutex::scoped_lock lock(*m_rMutex);
std::cout << std::unitbuf << "From " << r.range().begin() << " to " << r.range().end() - 1
<< "\n";
#endif
// get yMapSize
int r1, g1, b1, r2, g2, b2;
int myr = -1;
int myg = -1;
int myb = -1;
int i1, i2, i3, yMapSize;
(*m_map1)[0].get(&i1, &i2, &i3, &yMapSize);
Flagged_map_t &fmap1 = r.map1();
Flagged_map_t &fmap2 = r.map2();
// When intersecting polygons from fmap1 and fmap2, if BOTH are flagged
// as duplicate, don't add the result to the output map. We can still
// intersect them, because we are keeping track of how much of the polygon
// is left over from intersecting, and quitting when the polygon is
// used up.
for (unsigned int i = 0; i < fmap1.size(); i++) {
RPolygon *p1 = fmap1[i].p();
bool is_dup = fmap1[i].isDuplicate();
int parea = p1->area();
p1->getColor(&r1, &g1, &b1);
for (unsigned int j = 0; (j < fmap2.size()) && (parea > 0); j++) {
int xl, yl, xh, yh;
RPolygon *p2 = fmap2[j].p();
if (PolygonsOverlap(p1, p2, xl, yl, xh, yh)) {
if (!(is_dup && fmap2[j].isDuplicate())) {
p2->getColor(&r2, &g2, &b2);
myr = r1 + r2;
myg = g1 + g2;
myb = b1 + b2;
#ifdef _DEBUG
#else
oneapi::tbb::spin_mutex::scoped_lock lock(*m_rMutex);
#endif
(*m_resultMap).push_back(RPolygon(xl, yl, xh, yh, myr, myg, myb));
}
parea -= (xh - xl + 1) * (yh - yl + 1);
}
}
}
}
ApplySplitOverlay(Polygon_map_t *resultMap,
Polygon_map_t *map1,
Polygon_map_t *map2,
oneapi::tbb::spin_mutex *rmutex)
: m_resultMap(resultMap),
m_map1(map1),
m_map2(map2),
m_rMutex(rmutex) {}
};
/*!
* @brief intersects two maps strip-wise
*
* @param[out] resultMap output map (must be allocated)
* @param[in] polymap1 map to be intersected
* @param[in] polymap2 map to be intersected
*/
void SplitParallelOverlay(Polygon_map_t **result_map,
Polygon_map_t *polymap1,
Polygon_map_t *polymap2) {
int nthreads;
bool automatic_threadcount = false;
double domainSplitParallelTime;
oneapi::tbb::tick_count t0, t1;
oneapi::tbb::spin_mutex *resultMutex;
if (gThreadsLow == THREADS_UNSET || gThreadsLow == utility::get_default_num_threads()) {
gThreadsLow = gThreadsHigh = utility::get_default_num_threads();
automatic_threadcount = true;
}
*result_map = new Polygon_map_t;
RPolygon *p0 = &((*polymap1)[0]);
int mapxSize, mapySize, ignore1, ignore2;
p0->get(&ignore1, &ignore2, &mapxSize, &mapySize);
(*result_map)->reserve(mapxSize * mapySize); // can't be any bigger than this
resultMutex = new oneapi::tbb::spin_mutex();
int grain_size;
#ifdef _DEBUG
grain_size = gMapXSize / 4;
#else
grain_size = gGrainSize;
#endif
for (nthreads = gThreadsLow; nthreads <= gThreadsHigh; nthreads++) {
oneapi::tbb::global_control c(oneapi::tbb::global_control::max_allowed_parallelism,
nthreads);
if (gIsGraphicalVersion) {
RPolygon *xp =
new RPolygon(0, 0, gMapXSize - 1, gMapYSize - 1, 0, 0, 0); // Clear the output space
delete xp;
}
// push the map size as the first polygon,
(*result_map)->push_back(RPolygon(0, 0, mapxSize, mapySize));
t0 = oneapi::tbb::tick_count::now();
oneapi::tbb::parallel_for(
blocked_range_with_maps<int>(0, (int)(mapxSize + 1), grain_size, polymap1, polymap2),
ApplySplitOverlay((*result_map), polymap1, polymap2, resultMutex));
t1 = oneapi::tbb::tick_count::now();
domainSplitParallelTime = (t1 - t0).seconds() * 1000;
std::cout << "Splitting parallel with spin lock and ";
if (automatic_threadcount)
std::cout << "automatic";
else
std::cout << nthreads;
std::cout << ((nthreads == 1) ? " thread" : " threads");
std::cout << " took " << domainSplitParallelTime << " msec : speedup over serial "
<< (gSerialTime / domainSplitParallelTime) << "\n";
if (gCsvFile.is_open()) {
gCsvFile << "," << domainSplitParallelTime;
}
#if _DEBUG
CheckPolygonMap(*result_map);
ComparePolygonMaps(*result_map, gResultMap);
#endif
(*result_map)->clear();
}
delete resultMutex;
if (gCsvFile.is_open()) {
gCsvFile << "\n";
}
}
class ApplySplitOverlayCV {
Polygon_map_t *m_map1, *m_map2;
concurrent_Polygon_map_t *m_resultMap;
public:
/*!
* @brief functor for columnar parallel version
* @param[in] r range of map to be operated on
*/
void operator()(blocked_range_with_maps<int> &r) const {
// get yMapSize
int r1, g1, b1, r2, g2, b2;
int myr = -1;
int myg = -1;
int myb = -1;
int i1, i2, i3, yMapSize;
(*m_map1)[0].get(&i1, &i2, &i3, &yMapSize);
Flagged_map_t &fmap1 = r.map1();
Flagged_map_t &fmap2 = r.map2();
// When intersecting polygons from fmap1 and fmap2, if BOTH are flagged
// as duplicate, don't add the result to the output map. We can still
// intersect them, because we are keeping track of how much of the polygon
// is left over from intersecting, and quitting when the polygon is
// used up.
for (unsigned int i = 0; i < fmap1.size(); i++) {
RPolygon *p1 = fmap1[i].p();
bool is_dup = fmap1[i].isDuplicate();
int parea = p1->area();
p1->getColor(&r1, &g1, &b1);
for (unsigned int j = 0; (j < fmap2.size()) && (parea > 0); j++) {
int xl, yl, xh, yh;
RPolygon *p2 = fmap2[j].p();
if (PolygonsOverlap(p1, p2, xl, yl, xh, yh)) {
if (!(is_dup && fmap2[j].isDuplicate())) {
p2->getColor(&r2, &g2, &b2);
myr = r1 + r2;
myg = g1 + g2;
myb = b1 + b2;
(*m_resultMap).push_back(RPolygon(xl, yl, xh, yh, myr, myg, myb));
}
parea -= (xh - xl + 1) * (yh - yl + 1);
}
}
}
}
ApplySplitOverlayCV(concurrent_Polygon_map_t *resultMap,
Polygon_map_t *map1,
Polygon_map_t *map2)
: m_resultMap(resultMap),
m_map1(map1),
m_map2(map2) {}
};
/*!
* @brief intersects two maps strip-wise, accumulating into a concurrent_vector
*
* @param[out] resultMap output map (must be allocated)
* @param[in] polymap1 map to be intersected
* @param[in] polymap2 map to be intersected
*/
void SplitParallelOverlayCV(concurrent_Polygon_map_t **result_map,
Polygon_map_t *polymap1,
Polygon_map_t *polymap2) {
int nthreads;
bool automatic_threadcount = false;
double domainSplitParallelTime;
oneapi::tbb::tick_count t0, t1;
if (gThreadsLow == THREADS_UNSET || gThreadsLow == utility::get_default_num_threads()) {
gThreadsLow = gThreadsHigh = utility::get_default_num_threads();
automatic_threadcount = true;
}
*result_map = new concurrent_Polygon_map_t;
RPolygon *p0 = &((*polymap1)[0]);
int mapxSize, mapySize, ignore1, ignore2;
p0->get(&ignore1, &ignore2, &mapxSize, &mapySize);
// (*result_map)->reserve(mapxSize*mapySize); // can't be any bigger than this
int grain_size;
#ifdef _DEBUG
grain_size = gMapXSize / 4;
#else
grain_size = gGrainSize;
#endif
for (nthreads = gThreadsLow; nthreads <= gThreadsHigh; nthreads++) {
oneapi::tbb::global_control c(oneapi::tbb::global_control::max_allowed_parallelism,
nthreads);
if (gIsGraphicalVersion) {
RPolygon *xp =
new RPolygon(0, 0, gMapXSize - 1, gMapYSize - 1, 0, 0, 0); // Clear the output space
delete xp;
}
// push the map size as the first polygon,
(*result_map)->push_back(RPolygon(0, 0, mapxSize, mapySize));
t0 = oneapi::tbb::tick_count::now();
oneapi::tbb::parallel_for(
blocked_range_with_maps<int>(0, (int)(mapxSize + 1), grain_size, polymap1, polymap2),
ApplySplitOverlayCV((*result_map), polymap1, polymap2));
t1 = oneapi::tbb::tick_count::now();
domainSplitParallelTime = (t1 - t0).seconds() * 1000;
std::cout << "Splitting parallel with concurrent_vector and ";
if (automatic_threadcount)
std::cout << "automatic";
else
std::cout << nthreads;
std::cout << ((nthreads == 1) ? " thread" : " threads");
std::cout << " took " << domainSplitParallelTime << " msec : speedup over serial "
<< (gSerialTime / domainSplitParallelTime) << "\n";
if (gCsvFile.is_open()) {
gCsvFile << "," << domainSplitParallelTime;
}
#if _DEBUG
{
Polygon_map_t s_result_map;
for (concurrent_Polygon_map_t::const_iterator i = (*result_map)->begin();
i != (*result_map)->end();
++i) {
s_result_map.push_back(*i);
}
CheckPolygonMap(&s_result_map);
ComparePolygonMaps(&s_result_map, gResultMap);
}
#endif
(*result_map)->clear();
}
if (gCsvFile.is_open()) {
gCsvFile << "\n";
}
}
// ------------------------------------------------------
class ApplySplitOverlayETS {
Polygon_map_t *m_map1, *m_map2;
ETS_Polygon_map_t *m_resultMap;
public:
/*!
* @brief functor for columnar parallel version
* @param[in] r range of map to be operated on
*/
void operator()(blocked_range_with_maps<int> &r) const {
// get yMapSize
int r1, g1, b1, r2, g2, b2;
int myr = -1;
int myg = -1;
int myb = -1;
int i1, i2, i3, yMapSize;
(*m_map1)[0].get(&i1, &i2, &i3, &yMapSize);
Flagged_map_t &fmap1 = r.map1();
Flagged_map_t &fmap2 = r.map2();
// When intersecting polygons from fmap1 and fmap2, if BOTH are flagged
// as duplicate, don't add the result to the output map. We can still
// intersect them, because we are keeping track of how much of the polygon
// is left over from intersecting, and quitting when the polygon is
// used up.
for (unsigned int i = 0; i < fmap1.size(); i++) {
RPolygon *p1 = fmap1[i].p();
bool is_dup = fmap1[i].isDuplicate();
int parea = p1->area();
p1->getColor(&r1, &g1, &b1);
for (unsigned int j = 0; (j < fmap2.size()) && (parea > 0); j++) {
int xl, yl, xh, yh;
RPolygon *p2 = fmap2[j].p();
if (PolygonsOverlap(p1, p2, xl, yl, xh, yh)) {
if (!(is_dup && fmap2[j].isDuplicate())) {
p2->getColor(&r2, &g2, &b2);
myr = r1 + r2;
myg = g1 + g2;
myb = b1 + b2;
(*m_resultMap).local().push_back(RPolygon(xl, yl, xh, yh, myr, myg, myb));
}
parea -= (xh - xl + 1) * (yh - yl + 1);
}
}
}
}
ApplySplitOverlayETS(ETS_Polygon_map_t *resultMap, Polygon_map_t *map1, Polygon_map_t *map2)
: m_resultMap(resultMap),
m_map1(map1),
m_map2(map2) {}
};
/*!
* @brief intersects two maps strip-wise, accumulating into an ets variable
*
* @param[out] resultMap output map (must be allocated)
* @param[in] polymap1 map to be intersected
* @param[in] polymap2 map to be intersected
*/
void SplitParallelOverlayETS(ETS_Polygon_map_t **result_map,
Polygon_map_t *polymap1,
Polygon_map_t *polymap2) {
int nthreads;
bool automatic_threadcount = false;
double domainSplitParallelTime;
oneapi::tbb::tick_count t0, t1;
if (gThreadsLow == THREADS_UNSET || gThreadsLow == utility::get_default_num_threads()) {
gThreadsLow = gThreadsHigh = utility::get_default_num_threads();
automatic_threadcount = true;
}
*result_map = new ETS_Polygon_map_t;
RPolygon *p0 = &((*polymap1)[0]);
int mapxSize, mapySize, ignore1, ignore2;
p0->get(&ignore1, &ignore2, &mapxSize, &mapySize);
// (*result_map)->reserve(mapxSize*mapySize); // can't be any bigger than this
int grain_size;
#ifdef _DEBUG
grain_size = gMapXSize / 4;
#else
grain_size = gGrainSize;
#endif
for (nthreads = gThreadsLow; nthreads <= gThreadsHigh; nthreads++) {
oneapi::tbb::global_control c(oneapi::tbb::global_control::max_allowed_parallelism,
nthreads);
if (gIsGraphicalVersion) {
RPolygon *xp =
new RPolygon(0, 0, gMapXSize - 1, gMapYSize - 1, 0, 0, 0); // Clear the output space
delete xp;
}
// push the map size as the first polygon,
// This polygon needs to be first, so we can push it at the start of a combine.
// (*result_map)->local.push_back(RPolygon(0,0,mapxSize, mapySize));
t0 = oneapi::tbb::tick_count::now();
oneapi::tbb::parallel_for(
blocked_range_with_maps<int>(0, (int)(mapxSize + 1), grain_size, polymap1, polymap2),
ApplySplitOverlayETS((*result_map), polymap1, polymap2));
t1 = oneapi::tbb::tick_count::now();
domainSplitParallelTime = (t1 - t0).seconds() * 1000;
std::cout << "Splitting parallel with ETS and ";
if (automatic_threadcount)
std::cout << "automatic";
else
std::cout << nthreads;
std::cout << ((nthreads == 1) ? " thread" : " threads");
std::cout << " took " << domainSplitParallelTime << " msec : speedup over serial "
<< (gSerialTime / domainSplitParallelTime) << "\n";
if (gCsvFile.is_open()) {
gCsvFile << "," << domainSplitParallelTime;
}
#if _DEBUG
{
Polygon_map_t s_result_map;
oneapi::tbb::flattened2d<ETS_Polygon_map_t> psv = flatten2d(**result_map);
s_result_map.push_back(RPolygon(0, 0, mapxSize, mapySize));
for (oneapi::tbb::flattened2d<ETS_Polygon_map_t>::const_iterator ci = psv.begin();
ci != psv.end();
++ci) {
s_result_map.push_back(*ci);
}
CheckPolygonMap(&s_result_map);
ComparePolygonMaps(&s_result_map, gResultMap);
}
#endif
(*result_map)->clear();
}
if (gCsvFile.is_open()) {
gCsvFile << "\n";
}
}
|
def dissatisfaction(farmland):
max_yield = float('-inf')
min_yield = float('inf')
for row in farmland:
max_yield = max(max_yield, max(row))
min_yield = min(min_yield, min(row))
return max_yield - min_yield |
<reponame>kerikh/domain_porfolio<filename>db/migrate/20191121172041_create_domain_names.rb
class CreateDomainNames < ActiveRecord::Migration[6.0]
def change
create_table :domain_names do |t|
t.string :domain_name
t.date :expiry_date
end
end
end
|
import base64
def caesar_cipher_base64(s_b64: str, shift: int) -> str:
B64_ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
def char_shift(c, shift):
if c in B64_ALPHABET:
i = B64_ALPHABET.index(c)
return B64_ALPHABET[(i+shift) % 64]
else:
return c
s_bytes = base64.b64decode(s_b64)
s_shifted = ''.join(map(lambda c: char_shift(c, shift), s_bytes.decode('utf-8')))
return base64.b64encode(s_shifted.encode('utf-8')).decode('utf-8') |
import React from 'react';
import { Feather } from '@expo/vector-icons';
import { useNavigation } from '@react-navigation/native';
import type { NativeStackNavigationProp } from '@react-navigation/native-stack';
import { Skeleton } from 'moti/skeleton';
import * as Atoms from '@components/atoms';
import type { Routes } from '@routes/app.routes';
import type { ArticleProps } from '@types';
import { formatRelativeDate } from '@utils/helpers';
type NavigationParam = NativeStackNavigationProp<Routes, 'LaunchDetail'>;
export function Article({
article,
onDailyFeed,
}: {
article: ArticleProps;
onDailyFeed?: boolean;
}) {
const { navigate } = useNavigation<NavigationParam>();
const [hasLoadedImage, setHasLoadedImage] = React.useState(false);
return (
<Atoms.Pressable onPress={() => navigate('ArticleDetail', { article })}>
<Atoms.Row
sx={{
bg: 'secondary',
mt: '16px',
mb: onDailyFeed ? '10px' : 0,
borderRadius: 8,
overflow: 'hidden',
}}
>
<Skeleton show={!hasLoadedImage} radius={0}>
<Atoms.Image
source={{
uri: article.imageUrl,
}}
sx={{
height: 120,
width: 100,
borderTopLeftRadius: 8,
borderBottomLeftRadius: 8,
}}
accessibilityLabel={`Published image of the article: ${article.title}`}
onLoadEnd={() => setHasLoadedImage(true)}
/>
</Skeleton>
<Atoms.Box
sx={{
flex: 1,
justifyContent: 'space-between',
p: '16px',
}}
>
<Atoms.Text
variant="text-sm"
sx={{
color: 'white',
fontWeight: 'bold',
fontSize: 11,
}}
numberOfLines={3}
>
{article.title}
</Atoms.Text>
<Atoms.Row
sx={{
width: '100%',
alignItems: 'center',
justifyContent: 'space-between',
}}
>
<Atoms.Row sx={{ flex: 1, alignItems: 'center' }}>
<Feather name="clock" color="white" />
<Atoms.Text
variant="text-xs"
sx={{
color: 'white',
fontWeight: 500,
fontSize: 8,
ml: '6px',
}}
>
{formatRelativeDate(article.publishedAt)}
</Atoms.Text>
</Atoms.Row>
<Atoms.Text
variant="text-xs"
sx={{
flex: 1,
color: 'white',
fontWeight: 500,
fontSize: 8,
textAlign: 'right',
}}
>
{article.newsSite}
</Atoms.Text>
</Atoms.Row>
</Atoms.Box>
</Atoms.Row>
</Atoms.Pressable>
);
}
|
#pragma once
#include "Object.h"
#include "impl/ShadedPyramid.h"
#include "impl/ShadedSheet.h"
#include "impl/DynamicSphere.h"
|
from bg_atlasapi.list_atlases import show_atlases
from bg_atlasapi.update_atlases import update_atlas, install_atlas
from bg_atlasapi.config import cli_modify_config
import click
@click.command()
@click.argument("command")
@click.option("-s", "--show", is_flag=True)
@click.option("-a", "--atlas_name")
@click.option("-f", "--force", is_flag=True)
@click.option("-k", "--key")
@click.option("-v", "--value")
def bg_cli(
command, atlas_name=None, force=False, show=False, key=None, value=None
):
"""
Command line dispatcher. Given a command line call to `brainglobe`
it calls the correct function, depending on which `command` was passed.
Arguments:
----------
command: str. Name of the command:
- list: list available atlases
- install: isntall new atlas
- update: update an installed atlas
- config: modify config
show: bool. If True when using `list` shows the local path of installed atlases
and when using 'config' it prints the modify config results.
atlas_name: ts. Used with `update` and `install`, name of the atlas to install
force: bool, used with `update`. If True it forces the update
"""
if command == "list": # list atlases
return show_atlases(show_local_path=show)
elif command == "install": # install atlas
if atlas_name is None:
raise ValueError(
'No atlas named passed with command "install". Use the "-a"\
argument to pass an atls name'
)
return install_atlas(atlas_name=atlas_name)
elif command == "update": # update installed atlas
if atlas_name is None:
raise ValueError(
'No atlas named passed with command "update". Use the "-a"\
argument to pass an atls name'
)
return update_atlas(atlas_name, force=force)
elif command == "config": # update config
return cli_modify_config(key=key, value=value, show=show)
else: # command not recognized
raise ValueError(
f'Invalid command {command}. use "brainglobe -h for more info."'
)
|
<reponame>MrDavv/wexindenglu
/**
* Created by fengtaotao on 2017/7/26.
*/
'use strict';
const mongoose = require('mongoose');
const db=require('./db');
const modelName='RoomCardRecord';
const schema=new mongoose.Schema({
aboutUserId: String,
modifyType: String,
preNumber: Number,
curNumber: Number,
afterNumber: Number,
description: String,
createTime: Date
});
schema.index( {aboutUserId : 1} );
module.exports= {
load:function(){
let model = mongoose.model(modelName, schema); //把模型与数据中的表连接起来
console.log(`模块${modelName}被注册`);
}
}; |
#include "mview.h"
#include "LoaderEXR.h"
#include <iostream>
#include <fstream>
#include <Eigen/Dense>
#include <opencv2/core.hpp>
#include <opencv2/core/eigen.hpp>
#include <opencv2/imgcodecs.hpp>
static std::pair<GrayImage, RgbImage> ReadImageFromFile(std::string);
static float ColourToGray(Eigen::Vector3f rgb);
static RgbImage convertOpenCVToRgb(cv::Mat rgbMat);
static const std::string data_directory = "data/output640x480/";
std::string dataset_file() {
return data_directory + "parameter.txt";
}
std::vector<CameraParameter> read_dataset(std::istream& parameter) {
std::vector<CameraParameter> output;
std::string line;
for(int i = 0; std::getline(parameter, line); i++) {
CameraParameter camera;
std::ifstream matrix_file(line+".dat");
camera.extrinsics.setIdentity();
for(int j = 0; j < 4; j++)
for(int i = 0; i < 3; i++)
matrix_file >> camera.extrinsics(i, j);
for(int j = 0; j < 3; j++)
camera.extrinsics.col(j).normalize();
// camera.extrinsics = camera.extrinsics.inverse().eval();
camera.filename = line + ".jpg";
camera.ground_truth = line + ".exr";
output.push_back(camera);
}
return output;
}
Image read_image(CameraParameter cameraParameter) {
Image image;
std::tie(image.gray_pixels, image.rgb_pixels) = ReadImageFromFile(cameraParameter.filename);
image.extrinsics = cameraParameter.extrinsics;
image.ground_truth = GrayImage(image.gray_pixels.rows(), image.gray_pixels.cols());
read_openexr(cameraParameter.ground_truth, image.ground_truth.data(),
image.gray_pixels.cols(), image.gray_pixels.rows(), 1);
/*
INTRINSIC:
float fx = IMAGE_WIDTH*4.1/4.54, fy = IMAGE_HEIGHT*4.1/3.42,
cx = IMAGE_WIDTH/2.0, cy = IMAGE_HEIGHT/2.0;
*/
const float image_width = image.gray_pixels.cols();
const float image_height = image.gray_pixels.rows();
std::cout << image_width << "x" << image_height << "\n";
image.intrinsics.setIdentity();
image.intrinsics(0, 0) = image_width*4.1/4.54;
image.intrinsics(1, 1) = image_height*4.1/3.42;
image.intrinsics(0, 2) = image_width/2.;
image.intrinsics(1, 2) = image_height/2.;
std::cout << image.intrinsics << "\n";
return image;
}
std::pair<GrayImage, RgbImage> ReadImageFromFile(std::string filename) {
std::cout << filename << std::endl;
cv::Mat rgb_mat = cv::imread(filename, cv::IMREAD_COLOR);
rgb_mat.assignTo(rgb_mat, CV_32F);
rgb_mat /= 255.;
RgbImage rgb_target = convertOpenCVToRgb(rgb_mat);
GrayImage gray_target = rgb_target.unaryExpr([](auto pixel) { return ColourToGray(pixel); });
return {gray_target, rgb_target};
}
float ColourToGray(Eigen::Vector3f rgb) {
static constexpr float GAMMA = 2.2;
const Eigen::Vector3f gamma_correct = Eigen::Array3f(rgb).pow(GAMMA);
return gamma_correct.dot(Eigen::Vector3f { .2126, .7152, .0722 });
}
RgbImage convertOpenCVToRgb(const cv::Mat imageMat){
cv::Mat bgr[3];
cv::split(imageMat, bgr);
int width = imageMat.cols;
int height = imageMat.rows;
GrayImage r(height, width), g(height, width), b(height, width);
cv::cv2eigen(bgr[2], r);
cv::cv2eigen(bgr[1], g);
cv::cv2eigen(bgr[0], b);
std::cout << "converted\n";
RgbImage rgbImage (g.rows(), g.cols());
for(int row=0;row<g.rows();row++){
for(int col=0;col<g.cols();col++){
rgbImage(row,col)<<r(row,col),g(row,col),b(row,col);
}
}
return rgbImage;
}
|
<reponame>kaushlakers/punycode.js
'use strict';
const fs = require('fs');
const path = require('path');
const regex = /module\.exports = punycode;/;
const sourceContents = fs.readFileSync(path.resolve(__dirname, '../punycode.js'), 'utf-8');
if (!regex.test(sourceContents)) {
throw new Error('The underlying library has changed. Please update the prepublish script.');
}
const outputContents = sourceContents.replace(regex, 'export default punycode;');
fs.writeFileSync(path.resolve(__dirname, '../punycode.es6.js'), outputContents);
|
#!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
ROOT="${BASH_SOURCE[0]%/*}/.."
cd "$ROOT"
# Skip on pull request builds
if [[ -n "${CIRCLE_PR_NUMBER:-}" ]]; then
exit
fi
: ${AZURE_CONTAINER:?"AZURE_CONTAINER environment variable is not set"}
: ${AZURE_STORAGE_ACCOUNT:?"AZURE_STORAGE_ACCOUNT environment variable is not set"}
: ${AZURE_STORAGE_KEY:?"AZURE_STORAGE_KEY environment variable is not set"}
VERSION=
if [[ -n "${CIRCLE_TAG:-}" ]]; then
VERSION="${CIRCLE_TAG}"
elif [[ "${CIRCLE_BRANCH:-}" == "master" ]]; then
VERSION="canary"
else
exit 1
fi
# NOTE(bacongobbler): azure-cli needs a newer version of libffi/libssl. See https://github.com/Azure/azure-cli/issues/3720#issuecomment-350335381
echo "Installing Azure components"
apt-get update && apt-get install -yq python-pip libffi-dev libssl-dev
easy_install pyOpenSSL
pip install --disable-pip-version-check --no-cache-dir azure-cli~=2.0
echo "Building binaries"
make build-cross
VERSION="${VERSION}" make dist checksum
if [[ -n "${CIRCLE_TAG:-}" ]]; then
VERSION="latest" make dist checksum
fi
echo "Pushing binaries to Azure Blob Storage"
az storage blob upload-batch --source _dist/ --destination "${AZURE_CONTAINER}" --pattern *.tar.gz*
az storage blob upload-batch --source _dist/ --destination "${AZURE_CONTAINER}" --pattern *.zip*
|
import os
def generate_plot(data, centre_cor, attributes, naming_varN, naming_simdir):
# Read font size and marker size from environment variables or use default values
font_size = os.getenv('FONT_SIZE', default=12, type=float)
marker_size = os.getenv('MARKER_SIZE', default=5, type=int)
# Extract x and y coordinates from centre_cor tuple
x_zero, y_zero = centre_cor
# Extract attributes from the attributes dictionary
attr1 = attributes.get('attr1', 'default_attr1')
attr2 = attributes.get('attr2', 'default_attr2')
# Create a plot using the provided data
# Example: plt.scatter(data[:, 0], data[:, 1])
# Set font size and marker size for the plot
# Example: plt.rcParams.update({'font.size': font_size, 'lines.markersize': marker_size})
# Use the extracted attributes in the plot title
# Example: plt.title(f'Plot with {attr1} and {attr2}')
# Utilize the naming_varN and naming_simdir objects for file naming conventions
# Example: file_name = naming_varN.generate_name(attr1) + '_' + naming_simdir.generate_name(attr2) + '.png'
# Return the file name for the generated plot
# return file_name |
<gh_stars>1-10
# Usage:
# python3 src/bayesian_pipeline.py --sample GA000000 --admixtures configs/admixtures.csv --window-len 100 data/QuechuaCandelaria_3.GA002786.txt
import time
import argparse
import pandas as pd
import numpy as np
from pathlib import Path
from collections import Counter, namedtuple
Record = namedtuple('Record', ('chrom', 'start', 'end', 'confidence', 'prediction'))
class ModelConfig:
def __init__(self, args, populations):
self.populations = sorted(populations)
self.n_pops = len(self.populations)
self.mode = 'bayes'
self.window_len = args.window_len
self._init_paths(args.file, args.output, args.sample)
self._set_admixtures(args.admixtures)
self.start_time = time.monotonic()
def _init_paths(self, file, output, sample):
self.file_path = Path(file)
self.filename = self.file_path.stem
if sample:
self.sample = sample
else:
self.group, self.sample = self.filename.split('.')
self.base_path = Path(output) if output else Path(file).parent
self.base_path.mkdir(exist_ok=True, parents=True)
self.input_file = str(self.file_path)
self.snp_file = f'{self.base_path}/{self.sample}_{self.mode}_snp_prob.tsv'
self.prediction_file = f'{self.base_path}/{self.sample}_{self.mode}_{self.window_len}_predictions.csv'
self.results_file = f'{self.base_path}/{self.sample}_{self.mode}_{self.window_len}_result.csv'
self.stats_file = f'{self.base_path}/{self.sample}_{self.mode}_{self.window_len}_stats.csv'
def _set_admixtures(self, admixtures_file):
if admixtures_file:
self.admixtures_file = admixtures_file
df = pd.read_csv(self.admixtures_file, sep=',', index_col=0)
self.admixtures = df.loc[self.sample, :]
else:
self.admixtures_file = None
self.admixtures = pd.Series([1/self.n_pops] * self.n_pops, index=self.populations)
@property
def header(self):
return (f"CHROM POS AF_{self.sample} AF_" + " AF_".join(self.populations)).split()
def run_bayes(config, alpha=0.0001):
n_pops = config.n_pops
sample_frequency = 'AF_' + config.sample
df = pd.read_csv(config.input_file, sep=' ', skiprows=1, names=config.header)
df[sample_frequency] = pd.to_numeric(df[sample_frequency], errors='coerce').fillna(0)
print(df.info())
n_snp = len(df)
with open(config.snp_file, 'w') as f_out:
f_out.write('CHROM\tPOS\t' + '\t'.join(config.populations) + '\n')
for i, row in df.iterrows():
snp_id = f"{int(row['CHROM'])}\t{int(row['POS'])}"
snp = row[sample_frequency]
if snp > 0.99:
p = (row[3:].values ** 2 + alpha) / (1 + n_pops * alpha)
elif snp < 0.01:
p = ((1 - row[3:].values) ** 2 + alpha) / (1 + n_pops * alpha)
else:
p = (2 * row[3:].values * (1 - row[3:].values) + alpha) / (1 + n_pops * alpha)
f_out.write(f'{snp_id}\t' + '\t'.join(map(lambda x: f'{x:.6f}', p / np.sum(p))) + '\n')
if i % 100000 == 0:
print(f'Processed {i} / {n_snp}.')
print(f"Probabilities for each SNP are available at {config.snp_file}")
def solve_region_smoothed(df, populations, pop_prob, prev, chrom, alpha=100):
start = df.iloc[0, 1]
end = df.iloc[-1, 1]
s = pd.Series([np.log(df[pop].values).sum()
+ np.log(pop_prob[pop] / (1 + alpha) if pop != prev else (pop_prob[pop] + alpha) / (1 + alpha))
for pop in populations], index=populations)
first, second = sorted(s.nlargest(2))
best = s.idxmax()
return f'{chrom},{start},{end},{first/second:.5f},{best}', best
def solve_region(df, populations, pop_prob, prev, chrom):
start = df.iloc[0, 1]
end = df.iloc[-1, 1]
s = pd.Series([np.log(df[pop].values).sum() + np.log(pop_prob[pop])
for pop in populations], index=populations)
first, second = sorted(s.nlargest(2))
best = s.idxmax()
return f'{chrom},{start},{end},{first/second:.5f},{best}', best
def process_probabilities(config):
result = []
q = 0
prev = None
pop_prob = config.admixtures
for df in pd.read_csv(config.snp_file, sep='\t', chunksize=config.window_len, dtype={"CHROM": int, "POS": int}):
chrom_start = df.iloc[0, 0]
chrom_end = df.iloc[-1, 0]
if chrom_start == chrom_end:
rec, prev = solve_region(df, config.populations, pop_prob, prev, chrom_start)
result.append(rec)
else:
for chrom in (chrom_start, chrom_end):
tdf = df[df['CHROM'] == chrom]
rec, prev = solve_region(tdf, config.populations, pop_prob, prev, chrom)
result.append(rec)
q += 1
if q % 1000 == 0:
print(q, 'windows processed.')
with open(config.prediction_file, 'w') as f_out:
f_out.write('\n'.join(result))
print(pop_prob)
def calculate_stats(config):
with open(config.prediction_file) as f:
lines = f.readlines()
counts = Counter([line.strip().split(',')[4] for line in lines])
res = [(pop, counts[pop] / len(lines)) for pop in config.populations]
series = pd.Series([x[1] for x in res], index=[x[0] for x in res])
# series.to_csv(config.stats_file, header=False)
df = pd.DataFrame({'Predicted': series, 'Prior': config.admixtures})
df.to_csv(config.stats_file, sep='\t')
print('Total error: ', np.sum((df['Predicted'] - df['Prior']) ** 2))
print(f"Overall stats are available at {config.stats_file}")
def merge_windows(config):
res = []
with open(config.prediction_file, 'r') as f_in:
line = f_in.readline()
prev_record = Record(*line.strip().split(','))
for line in f_in.readlines():
record = Record(*line.strip().split(','))
if prev_record.chrom == record.chrom \
and prev_record.prediction == record.prediction:
conf = round(float(prev_record.confidence) * float(record.confidence), 5)
prev_record = Record(record.chrom, prev_record.start, record.end, conf, record.prediction)
else:
res.append(f'{prev_record.chrom},{prev_record.start},{prev_record.end},'
f'{prev_record.confidence},{prev_record.prediction}')
prev_record = record
res.append(f'{prev_record.chrom},{prev_record.start},{prev_record.end},'
f'{prev_record.confidence},{prev_record.prediction}')
with open(config.results_file, 'w') as f_out:
f_out.write('\n'.join(res))
def main(config):
if not Path(config.snp_file).exists():
run_bayes(config)
process_probabilities(config)
calculate_stats(config)
merge_windows(config)
print(f'Finished in {time.monotonic() - config.start_time} sec.')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("file", help="Input filename")
parser.add_argument("-o", "--output", default=None,
help="Output directory. Will be created automatically. "
"If already exists, some files may be modified")
parser.add_argument("--sample", type=str, default=None,
help="Sample name. If not specified will be inferred form input filename")
parser.add_argument("--window-len", help="Window length to use.", type=int, default=250)
parser.add_argument("--admixtures", help="Csv file with admixture vectors for sample.",
type=str, default=None)
args = parser.parse_args()
# populations = [
# 'Mediterranean', 'NativeAmerican', 'NorthEastAsian', 'NorthernEuropean',
# 'Oceanian', 'SouthAfrican', 'SouthEastAsian', 'SouthWestAsian', 'SubsaharanAfrican'
# ]
population_list = [
'Amazonian', 'Andamanese', 'Austronesian', 'BrazilianYanomami', 'Dravidian', 'EastAsian', 'EastIndian',
'Eskimo', 'Malaysian', 'NearEastern', 'NorthernEuropean', 'Papuan', 'PapuanBaining', 'PlillippinoNegrito',
'SouthAmerican_Chaco', 'SouthAmerican_Quechua', 'SubSaharanAfrican', 'Yeniseyan'
]
model_config = ModelConfig(args, population_list)
main(model_config)
|
<gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = '<NAME> (張道博)'
__copyright__ = 'Copyright (c) 2014, University of Hawaii Smart Energy Project'
__license__ = 'https://raw.github' \
'.com/Hawaii-Smart-Energy-Project/Maui-Smart-Grid/master/BSD' \
'-LICENSE.txt'
import inspect
class MSGPythonUtil(object):
"""
Utility methods related to the Python language.
Usage:
from msg_python_util import MSGPythonUtil
pythonUtil = MSGPythonUtil()
"""
def __init__(self):
"""
Constructor.
"""
pass
def callerName(self, skip = 2):
"""
Get a name of a caller in the format module.class.method
`skip` specifies how many levels of stack to skip while getting
caller
name. skip=1 means "who calls me", skip=2 "who calls my caller" etc.
An empty string is returned if skipped levels exceed stack height
Source: http://code.activestate.com/recipes/578352-get-full-caller
-name-packagemodulefunction/
"""
stack = inspect.stack()
start = 0 + skip
if len(stack) < start + 1:
return ''
parentframe = stack[start][0]
name = []
module = inspect.getmodule(parentframe)
# `modname` can be None when frame is executed directly in console
# TODO(techtonik): consider using __main__
if module:
name.append(module.__name__)
# detect classname
if 'self' in parentframe.f_locals:
# I don't know any way to detect call from the object method
# XXX: there seems to be no way to detect static method call - it
# will
# be just a function call
name.append(parentframe.f_locals['self'].__class__.__name__)
codename = parentframe.f_code.co_name
if codename != '<module>': # top level usually
name.append(codename) # function or a method
del parentframe
return ".".join(name)
|
<gh_stars>0
/**
* Copyright (c) 2021 OpenLens Authors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import type { Cluster } from "./cluster";
import type { KubernetesObject } from "@kubernetes/client-node";
import { exec } from "child_process";
import fs from "fs";
import * as yaml from "js-yaml";
import path from "path";
import * as tempy from "tempy";
import logger from "./logger";
import { appEventBus } from "../common/event-bus";
import { cloneJsonObject } from "../common/utils";
export class ResourceApplier {
constructor(protected cluster: Cluster) {
}
async apply(resource: KubernetesObject | any): Promise<string> {
resource = this.sanitizeObject(resource);
appEventBus.emit({name: "resource", action: "apply"});
return await this.kubectlApply(yaml.safeDump(resource));
}
protected async kubectlApply(content: string): Promise<string> {
const { kubeCtl } = this.cluster;
const kubectlPath = await kubeCtl.getPath();
const proxyKubeconfigPath = await this.cluster.getProxyKubeconfigPath();
return new Promise<string>((resolve, reject) => {
const fileName = tempy.file({ name: "resource.yaml" });
fs.writeFileSync(fileName, content);
const cmd = `"${kubectlPath}" apply --kubeconfig "${proxyKubeconfigPath}" -o json -f "${fileName}"`;
logger.debug(`shooting manifests with: ${cmd}`);
const execEnv: NodeJS.ProcessEnv = Object.assign({}, process.env);
const httpsProxy = this.cluster.preferences?.httpsProxy;
if (httpsProxy) {
execEnv["HTTPS_PROXY"] = httpsProxy;
}
exec(cmd, { env: execEnv },
(error, stdout, stderr) => {
if (stderr != "") {
fs.unlinkSync(fileName);
reject(stderr);
return;
}
fs.unlinkSync(fileName);
resolve(JSON.parse(stdout));
});
});
}
public async kubectlApplyAll(resources: string[], extraArgs = ["-o", "json"]): Promise<string> {
return this.kubectlCmdAll("apply", resources, extraArgs);
}
public async kubectlDeleteAll(resources: string[], extraArgs?: string[]): Promise<string> {
return this.kubectlCmdAll("delete", resources, extraArgs);
}
protected async kubectlCmdAll(subCmd: string, resources: string[], args: string[] = []): Promise<string> {
const { kubeCtl } = this.cluster;
const kubectlPath = await kubeCtl.getPath();
const proxyKubeconfigPath = await this.cluster.getProxyKubeconfigPath();
return new Promise((resolve, reject) => {
const tmpDir = tempy.directory();
// Dump each resource into tmpDir
resources.forEach((resource, index) => {
fs.writeFileSync(path.join(tmpDir, `${index}.yaml`), resource);
});
args.push("-f", `"${tmpDir}"`);
const cmd = `"${kubectlPath}" ${subCmd} --kubeconfig "${proxyKubeconfigPath}" ${args.join(" ")}`;
logger.info(`[RESOURCE-APPLIER] running cmd ${cmd}`);
exec(cmd, (error, stdout) => {
if (error) {
logger.error(`[RESOURCE-APPLIER] cmd errored: ${error}`);
const splitError = error.toString().split(`.yaml": `);
if (splitError[1]) {
reject(splitError[1]);
} else {
reject(error);
}
return;
}
resolve(stdout);
});
});
}
protected sanitizeObject(resource: KubernetesObject | any) {
resource = cloneJsonObject(resource);
delete resource.status;
delete resource.metadata?.resourceVersion;
const annotations = resource.metadata?.annotations;
if (annotations) {
delete annotations["kubectl.kubernetes.io/last-applied-configuration"];
}
return resource;
}
}
|
<reponame>Duroktar/godsmack
import type { IApplication, IApplicationBuilder, IApplicationCreationService, IContainer, MergeDefaultProviders } from '../interfaces';
import { TerminalInk } from '../services/tui/TerminalInk';
import { Application } from './Application';
import { ApplicationCreationService } from './ApplicationCreationService';
import { Container } from './Container';
import { DockerService } from './Docker';
import { SettingsService } from './Settings';
/**
* The default implementation of the IApplicationBuilder used
* to create and configure new Applications.
*
* @class ApplicationBuilder
*/
export class ApplicationBuilder implements IApplicationBuilder {
/**
* Used to create and configure a new Application.
*
* @static
* @returns An Application instance
*/
static Create<T extends IContainer<any>>(
service: IApplicationCreationService<T>,
): IApplication<MergeDefaultProviders<T>> {
// https://www.youtube.com/watch?v=oHg5SJYRHA0
const app = new Application(new Container())
app.container
.addSingleton(SettingsService)
.addSingletonInstance(Application, app)
.addSingletonInstance(ApplicationCreationService, service)
.addSingletonInstance(Container, app.container)
.addSingleton(DockerService)
.addSingleton(TerminalInk)
return app
}
}
|
#!/bin/bash
edge_path='./input/ali_data/user_edge.csv'
field_path='./input/ali_data/user_field.npy'
target_path='./input/ali_data/user_buy.csv'
code_path='./main.py'
gpus=0
gnn_units='none'
gnn_hops=8
graph_layer='sgc'
graph_refining='none'
grn_units='none'
bi_interaction='none'
nfm_units='none'
aggr_style='none'
# sh sh/ali_buy/SGC.sh
learning_rate=0.1
weight_decay=0.0
dropout=0.5
printf "\n#### learning_rate=$learning_rate, weight_decay=$weight_decay, dropout=$dropout ####\n"
CUDA_VISIBLE_DEVICES=$gpus python $code_path --seed 42 --epochs 9999 --weight-balanced True \
--learning-rate $learning_rate --weight-decay $weight_decay --dropout $dropout \
--graph-refining $graph_refining --aggr-pooling mean --grn-units $grn_units \
--bi-interaction $bi_interaction --nfm-units $nfm_units \
--graph-layer $graph_layer --gnn-hops $gnn_hops --gnn-units $gnn_units \
--aggr-style $aggr_style \
--edge-path $edge_path --field-path $field_path --target-path $target_path |
<filename>opener_test.go<gh_stars>0
package main
import (
"errors"
"fmt"
"io"
"math/rand"
"net"
"path/filepath"
"testing"
)
func TestOpenerOptionsValidate(t *testing.T) {
tt := []struct {
test string
o *OpenerOptions
expectedErr string
}{
{
"unix domain socket can be used",
&OpenerOptions{
Network: "unix",
Address: filepath.Join("/", "tmp", fmt.Sprintf("%03d", rand.Intn(1000)), "copier.sock"),
},
"",
},
{
"tcp can be used",
&OpenerOptions{
Network: "tcp",
Address: "127.0.0.1:8888",
},
"",
},
{
"udp cannot be used",
&OpenerOptions{
Network: "udp",
Address: "127.0.0.1:8888",
},
"allowed network are: unix,tcp",
},
}
for _, tc := range tt {
t.Run(tc.test, func(t *testing.T) {
err := tc.o.Validate()
if err == nil {
if tc.expectedErr != "" {
t.Errorf("expect err nil, but actual %q", err)
}
} else {
if tc.expectedErr != err.Error() {
t.Errorf("expect err %q, but actual %q", tc.expectedErr, err)
}
}
})
}
}
func TestHandleConnection(t *testing.T) {
tt := []struct {
test string
openURLFunc func(string) (string, error)
data string
err error
}{
{
"Say nothing when successful",
func(line string) (string, error) {
return "pong\n", nil
},
"",
io.EOF,
},
{
"Sending back the logs when failure",
func(line string) (string, error) {
return "pong\n", errors.New("exit status 1")
},
"pong\n",
nil,
},
}
ln, _ := net.Listen("tcp", "127.0.0.1:0")
defer ln.Close()
for _, tc := range tt {
t.Run(tc.test, func(t *testing.T) {
openURL = tc.openURLFunc
go func() {
conn, _ := ln.Accept()
go handleConnection(conn, io.Discard)
}()
client, err := net.Dial("tcp", ln.Addr().String())
if err != nil {
t.Fatal(err)
}
defer client.Close()
if _, err := client.Write([]byte("ping\n")); err != nil {
t.Fatal(err)
}
buf := make([]byte, 1024)
n, err := client.Read(buf)
data := string(buf[:n])
if tc.data != data {
t.Errorf("expect %q, but actual %q", tc.data, data)
}
if tc.err != err {
t.Errorf("expect %v, but actual %v", tc.err, err)
}
})
}
}
|
package com.share.system.data.dao;
import com.share.system.data.entity.ResourceInfo;
import com.share.system.data.entity.TimeQuery;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface ResourceInfoDao {
/**
* 新增
*
* @param resourceInfo
* @return
*/
Integer insertResourceInfo(@Param("resourceInfo") ResourceInfo resourceInfo);
/**
* 删除
*
* @param id
* @return
*/
Integer deleteResourceInfo(Integer id);
/**
* 多条件查询
*
* @param resourceInfo
* @return
*/
List<ResourceInfo> selectByResourceInfo(@Param("resourceInfo") ResourceInfo resourceInfo);
/**
* 更新
*
* @param resourceInfo
* @return
*/
Integer updateResourceInfo(@Param("resourceInfo") ResourceInfo resourceInfo);
/**
* 查找所有
*/
List<ResourceInfo> selectAll();
/**
* 根据id进行查找
*/
ResourceInfo selectByid(Integer id);
/**
* 根据租赁时间查询
*/
List<ResourceInfo> selectByLeaseTime(@Param("timeQuery")TimeQuery timeQuery);
/**
* 根据预约时间查询
*/
List<ResourceInfo> selectByAppointmentTime(@Param("timeQuery")TimeQuery timeQuery);
/**
* 根据超期时间查询
*/
List<ResourceInfo> selectByBackTime(@Param("timeQuery")TimeQuery timeQuery);
/**
* 归还
*
* @param resourceInfo
* @return
*/
Integer backResource(@Param("resourceInfo") ResourceInfo resourceInfo);
}
|
#!/bin/bash
###########################################
# (c) 2016-2018 FORTH-ICS
# Author: Polyvios Pratikakis
# Email: polyvios@ics.forth.gr
###########################################
cd $CRAWLERDIR
echo $$ > $DATADIR/crawl-lists2.pid
if [ -f $DATADIR/runcrawler ]; then
true;
else
echo "Crawler finished"
rm $DATADIR/crawl-lists2.pid
exit
fi
date
$CRAWLERBIN/addlists.py --id --all --memberships
sleep 1h
. $CRAWLERDIR/scripts/crawl-lists2.sh
|
import unittest
from script import xmlScript as xml
class xml_test_1(unittest.TestCase):
def test_SaveOneElement(self):
test = 'ocr'
customer = 10017
time = '00:55'
product = '3 pc black 1 fuse r (1213)'
order = [time,customer, product]
xml.createXml(test)
xml.saveOrder(order)
testWord = xml.getOrder()
self.assertEqual('10017', testWord[0][1])
def test_SaveOneElementFail(self):
test = 'ocr'
customer = 10017
time = '00:55'
product = '3 pc black 1 fuse r (1213)'
order = [time,customer, product]
xml.createXml(test)
xml.saveOrder(order)
testWord = xml.getOrder()
self.assertNotEqual('10018' , testWord[0][1])
def test_SaveListElement(self):
test = 'ocr'
customer = 10017
time = '00:55'
product = ['3 pc black 1 fuse r (1213)']
order = [customer, time, product]
xml.createXml(test)
xml.saveOrder(order)
testWord = xml.getOrder()
print(testWord[0][3])
self.assertEqual('10017', testWord[0][2])
self.assertEqual('00:55', testWord[0][1])
self.assertEqual('3', testWord[0][3])
self.assertEqual('Black 1 fuse R (1213)', testWord[0][4])
def test_SaveListElementFail(self):
test = 'ocr'
customer = 10017
time = '00:55'
product = ['3 pc black 1 fuse r (1213)']
order = [customer, time, product]
xml.createXml(test)
xml.saveOrder(order)
testWord = xml.getOrder()
self.assertNotEqual('10018', testWord[0][2])
self.assertNotEqual('00:50', testWord[0][1])
self.assertNotEqual('2', testWord[0][3])
self.assertNotEqual('Black 1 fuse L (1213)', testWord[0][4])
if __name__ == '__main__':
unittest.main()
|
<reponame>HarshPatel5940/DataHandlingProject
import csv
from DHP.context import cprint
from DHP.paths import user_file_path
def write_user(id11, name, password):
with open(user_file_path, "a", newline="") as f:
cprint("Data Is OK", "green")
file = csv.writer(f)
user_row = [id11, name, password]
file.writerow(user_row)
|
<reponame>richardmarston/cim4j
package cim4j;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import cim4j.SeasonDayTypeSchedule;
import java.lang.ArrayIndexOutOfBoundsException;
import java.lang.IllegalArgumentException;
import cim4j.ConformLoadGroup;
/*
A curve of load versus time (X-axis) showing the active power values (Y1-axis) and reactive power (Y2-axis) for each unit of the period covered. This curve represents a typical pattern of load over the time period for a given day type and season.
*/
public class ConformLoadSchedule extends SeasonDayTypeSchedule
{
private BaseClass[] ConformLoadSchedule_class_attributes;
private BaseClass[] ConformLoadSchedule_primitive_attributes;
private java.lang.String rdfid;
public void setRdfid(java.lang.String id) {
rdfid = id;
}
private abstract interface PrimitiveBuilder {
public abstract BaseClass construct(java.lang.String value);
};
private enum ConformLoadSchedule_primitive_builder implements PrimitiveBuilder {
LAST_ENUM() {
public BaseClass construct (java.lang.String value) {
return new cim4j.Integer("0");
}
};
}
private enum ConformLoadSchedule_class_attributes_enum {
ConformLoadGroup,
LAST_ENUM;
}
public ConformLoadSchedule() {
ConformLoadSchedule_primitive_attributes = new BaseClass[ConformLoadSchedule_primitive_builder.values().length];
ConformLoadSchedule_class_attributes = new BaseClass[ConformLoadSchedule_class_attributes_enum.values().length];
}
public void updateAttributeInArray(ConformLoadSchedule_class_attributes_enum attrEnum, BaseClass value) {
try {
ConformLoadSchedule_class_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void updateAttributeInArray(ConformLoadSchedule_primitive_builder attrEnum, BaseClass value) {
try {
ConformLoadSchedule_primitive_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void setAttribute(java.lang.String attrName, BaseClass value) {
try {
ConformLoadSchedule_class_attributes_enum attrEnum = ConformLoadSchedule_class_attributes_enum.valueOf(attrName);
updateAttributeInArray(attrEnum, value);
System.out.println("Updated ConformLoadSchedule, setting " + attrName);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
/* If the attribute is a String, it is a primitive and we will make it into a BaseClass */
public void setAttribute(java.lang.String attrName, java.lang.String value) {
try {
ConformLoadSchedule_primitive_builder attrEnum = ConformLoadSchedule_primitive_builder.valueOf(attrName);
updateAttributeInArray(attrEnum, attrEnum.construct(value));
System.out.println("Updated ConformLoadSchedule, setting " + attrName + " to: " + value);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
public java.lang.String toString(boolean topClass) {
java.lang.String result = "";
java.lang.String indent = "";
if (topClass) {
for (ConformLoadSchedule_primitive_builder attrEnum: ConformLoadSchedule_primitive_builder.values()) {
BaseClass bc = ConformLoadSchedule_primitive_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " ConformLoadSchedule." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
for (ConformLoadSchedule_class_attributes_enum attrEnum: ConformLoadSchedule_class_attributes_enum.values()) {
BaseClass bc = ConformLoadSchedule_class_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " ConformLoadSchedule." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
result += super.toString(true);
}
else {
result += "(ConformLoadSchedule) RDFID: " + rdfid;
}
return result;
}
public final java.lang.String debugName = "ConformLoadSchedule";
public java.lang.String debugString()
{
return debugName;
}
public void setValue(java.lang.String s) {
System.out.println(debugString() + " is not sure what to do with " + s);
}
public BaseClass construct() {
return new ConformLoadSchedule();
}
};
|
package handlers
import (
"encoding/json"
"net/http"
"reflect"
"github.com/gorilla/mux"
corev2 "github.com/sensu/sensu-go/api/core/v2"
"github.com/sensu/sensu-go/backend/apid/actions"
"github.com/sensu/sensu-go/backend/store"
)
// CreateResource creates the resource given in the request body but only if it
// does not already exist
func (h Handlers) CreateResource(r *http.Request) (interface{}, error) {
payload := reflect.New(reflect.TypeOf(h.Resource).Elem())
if err := json.NewDecoder(r.Body).Decode(payload.Interface()); err != nil {
return nil, actions.NewError(actions.InvalidArgument, err)
}
if err := CheckMeta(payload.Interface(), mux.Vars(r), "id"); err != nil {
return nil, actions.NewError(actions.InvalidArgument, err)
}
resource, ok := payload.Interface().(corev2.Resource)
if !ok {
return nil, actions.NewErrorf(actions.InvalidArgument)
}
if err := h.Store.CreateResource(r.Context(), resource); err != nil {
switch err := err.(type) {
case *store.ErrAlreadyExists:
return nil, actions.NewErrorf(actions.AlreadyExistsErr)
case *store.ErrNotValid:
return nil, actions.NewErrorf(actions.InvalidArgument)
default:
return nil, actions.NewError(actions.InternalErr, err)
}
}
return nil, nil
}
|
<gh_stars>0
var searchData=
[
['json11',['json11',['../namespacejson11.html',1,'']]]
];
|
<gh_stars>100-1000
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.hammer.validation.issues;
import org.eclipse.swt.graphics.Image;
import com.archimatetool.model.IArchimateConcept;
import com.archimatetool.model.IDiagramModelArchimateComponent;
/**
* Issue Type
*
* @author <NAME>
*/
public abstract class AbstractIssueType implements IIssue {
private String fName = ""; //$NON-NLS-1$
private String fDescription = ""; //$NON-NLS-1$
private String fExplanation = ""; //$NON-NLS-1$
private Object fObject;
protected AbstractIssueType() {
}
/**
* @param name The name of the Issue
* @param description The description of the Issue
* @param explanation The explnation of the Issue
* @param obj The object in question
*/
protected AbstractIssueType(String name, String description, String explanation, Object obj) {
setName(name);
setDescription(description);
setExplanation(explanation);
setObject(obj);
}
@Override
public void setName(String name) {
fName = name;
}
@Override
public String getName() {
return fName;
}
@Override
public void setDescription(String description) {
fDescription = description;
}
@Override
public String getDescription() {
return fDescription;
}
@Override
public void setExplanation(String explanation) {
fExplanation = explanation;
}
@Override
public String getExplanation() {
return fExplanation;
}
@Override
public void setObject(Object obj) {
fObject = obj;
}
@Override
public Object getObject() {
return fObject;
}
@Override
public Image getImage() {
return null;
}
@Override
public String getHelpHintTitle() {
return getName();
}
@Override
public String getHelpHintContent() {
return getExplanation();
}
@Override
@SuppressWarnings({"rawtypes", "unchecked"})
public Object getAdapter(Class adapter) {
// These are to update the Properties View...
if(adapter == null) {
return null;
}
Object object = getObject();
if(object == null) {
return null;
}
// Return the object
if(adapter.isInstance(object)) {
return object;
}
// Archimate concept inside of diagram component
if(object instanceof IDiagramModelArchimateComponent) {
IArchimateConcept concept = ((IDiagramModelArchimateComponent)object).getArchimateConcept();
if(concept != null && adapter.isAssignableFrom(concept.getClass())) {
return concept;
}
}
return null;
}
}
|
<gh_stars>0
require "rake_migration/migration"
require 'rake_migration/version'
require 'rake_migration/task_migration'
require "rake_migration/railtie" if defined?(Rails)
module RakeMigration
class Error < StandardError; end
end
|
<filename>pkg/orchestration/wiring/wiringutil/oap/types.go
package oap
import (
"encoding/json"
"github.com/atlassian/voyager"
)
type EnvVarPrefix string
type ResourceType string
type CfnTemplate string
type ServiceInstanceSpec struct {
ServiceName voyager.ServiceName `json:"serviceName"`
Resource RPSResource `json:"resource"`
Environment ServiceEnvironment `json:"environment"`
}
type ServiceEnvironment struct {
NotificationEmail string `json:"notificationEmail,omitempty"`
LowPriorityPagerdutyEndpoint string `json:"lowPriorityPagerdutyEndpoint,omitempty"`
PagerdutyEndpoint string `json:"pagerdutyEndpoint,omitempty"`
Tags map[voyager.Tag]string `json:"tags,omitempty"`
ServiceSecurityGroup string `json:"serviceSecurityGroup,omitempty"`
PrimaryVpcEnvironment *VPCEnvironment `json:"primaryVpcEnvironment,omitempty"`
Fallback *bool `json:"fallback,omitempty"`
}
type VPCEnvironment struct {
VPCID string `json:"vpcId,omitempty"`
PrivateDNSZone string `json:"privateDnsZone,omitempty"`
PrivatePaasDNSZone string `json:"privatePaasDnsZone,omitempty"`
ServiceSecurityGroup string `json:"serviceSecurityGroup,omitempty"`
InstanceSecurityGroup string `json:"instanceSecurityGroup,omitempty"`
JumpboxSecurityGroup string `json:"jumpboxSecurityGroup,omitempty"`
SSLCertificateID string `json:"sslCertificateId,omitempty"`
Label string `json:"label,omitempty"`
AppSubnets []string `json:"appSubnets,omitempty"`
Zones []string `json:"zones,omitempty"`
Region string `json:"region,omitempty"`
EMRSubnet string `json:"emrSubnet,omitempty"`
}
type RPSResource struct {
Type string `json:"type"`
Name string `json:"name"`
Attributes json.RawMessage `json:"attributes,omitempty"`
Alarms json.RawMessage `json:"alarms,omitempty"`
}
|
#!/bin/bash
#
# This script generates from driver files fake C sources to be passed
# through a C preprocessor to get the actual Java sources. It expects
# as arguments the name of the driver and the name of the file to be
# generated.
#
# The types we specialise to (these are actual Java types, so references appear here as Object).
TYPE=(boolean byte short int long char float double Object Object)
# The capitalized types used to build class and method names (now references appear as Reference).
TYPE_CAP=(Boolean Byte Short Int Long Char Float Double Object Reference)
# Much like $TYPE_CAP, by the place occupied by Reference is now occupied by Object.
TYPE_CAP2=(Boolean Byte Short Int Long Char Float Double Object Object)
# Much like $TYPE_CAP, but object type get the empty string.
TYPE_STD=(Boolean Byte Short Int Long Char Float Double "" "")
# The upper case types used to build class and method names.
TYPE_UC=(BOOLEAN BYTE SHORT INT LONG CHAR FLOAT DOUBLE OBJECT REFERENCE)
# The downcased types used to build method names.
TYPE_LC=(boolean byte short int long char float double object reference)
# Much like $TYPE_LC, by the place occupied by reference is now occupied by object.
TYPE_LC2=(boolean byte short int long char float double object object)
# The corresponding classes (in few cases, there are differences with $TYPE_CAP).
CLASS=(Boolean Byte Short Integer Long Character Float Double Object Reference)
export LC_ALL=C
shopt -s extglob
file=${2##*/}
name=${file%.*}
class=${name#Abstract}
if [[ "$class" == "$name" ]]; then
abstract=
else
abstract=Abstract
fi
class=${class#Striped}
# Now we rip off the types.
rem=${class##[A-Z]+([a-z])}
keylen=$(( ${#class} - ${#rem} ))
root=$rem
KEY_TYPE_CAP=${class:0:$keylen}
VALUE_TYPE_CAP=Object # Just for filling holes
if [[ "${rem:0:1}" == "2" ]]; then
isFunction=true
rem=${rem:1}
rem2=${rem##[A-Z]+([a-z])}
valuelen=$(( ${#rem} - ${#rem2} ))
VALUE_TYPE_CAP=${rem:0:$valuelen}
root=$rem2
else
isFunction=false
fi
for((k=0; k<${#TYPE_CAP[*]}; k++)); do
if [[ ${TYPE_CAP[$k]} == $KEY_TYPE_CAP ]]; then break; fi;
done
for((v=0; v<${#TYPE_CAP[*]}; v++)); do
if [[ ${TYPE_CAP[$v]} == $VALUE_TYPE_CAP ]]; then break; fi;
done
if [[ $root == *Linked* ]]; then
Linked=Linked
echo -e \
"#define SET_PREV( f64, p32 ) SET_UPPER( f64, p32 )\n"\
"#define SET_NEXT( f64, n32 ) SET_LOWER( f64, n32 )\n"\
"#define COPY_PREV( f64, p64 ) SET_UPPER64( f64, p64 )\n"\
"#define COPY_NEXT( f64, n64 ) SET_LOWER64( f64, n64 )\n"\
"#define GET_PREV( f64 ) GET_UPPER( f64 )\n"\
"#define GET_NEXT( f64 ) GET_LOWER( f64 )\n"\
"#define SET_UPPER_LOWER( f64, up32, low32 ) f64 = ( ( up32 & 0xFFFFFFFFL ) << 32 ) | ( low32 & 0xFFFFFFFFL )\n"\
"#define SET_UPPER( f64, up32 ) f64 ^= ( ( f64 ^ ( ( up32 & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L )\n"\
"#define SET_LOWER( f64, low32 ) f64 ^= ( ( f64 ^ ( low32 & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL )\n"\
"#define SET_UPPER64( f64, up64 ) f64 ^= ( ( f64 ^ ( up64 & 0xFFFFFFFF00000000L ) ) & 0xFFFFFFFF00000000L )\n"\
"#define SET_LOWER64( f64, low64 ) f64 ^= ( ( f64 ^ ( low64 & 0xFFFFFFFFL ) ) & 0xFFFFFFFFL )\n"\
"#define GET_UPPER( f64 ) (int) ( f64 >>> 32 )\n"\
"#define GET_LOWER( f64 ) (int) f64\n"
fi
if [[ $root == *Custom* ]]; then Custom=Custom; fi
echo -e \
\
\
"/* Generic definitions */\n"\
\
\
"${Linked:+#define Linked}\n"\
"${Custom:+#define Custom}\n"\
"#define PACKAGE it.unimi.dsi.fastutil.${TYPE_LC2[$k]}s\n"\
"#define VALUE_PACKAGE it.unimi.dsi.fastutil.${TYPE_LC2[$v]}s\n"\
\
\
"/* Assertions (useful to generate conditional code) */\n"\
\
\
$(if [[ "${CLASS[$k]}" != "" ]]; then\
echo "#unassert keyclass\\n#assert keyclass(${CLASS[$k]})\\n#unassert keys\\n";\
if [[ "${CLASS[$k]}" != "Object" && "${CLASS[$k]}" != "Reference" ]]; then\
echo "#assert keys(primitive)\\n";\
else\
echo "#assert keys(reference)\\n";\
fi;\
fi)\
$(if [[ "${CLASS[$v]}" != "" ]]; then\
echo "#unassert valueclass\\n#assert valueclass(${CLASS[$v]})\\n#unassert values\\n";\
if [[ "${CLASS[$v]}" != "Object" && "${CLASS[$v]}" != "Reference" ]]; then\
echo "#assert values(primitive)\\n";\
else\
echo "#assert values(reference)\\n";\
fi;\
fi)\
\
\
"/* Current type and class (and size, if applicable) */\n"\
\
\
"#define KEY_TYPE ${TYPE[$k]}\n"\
"#define VALUE_TYPE ${TYPE[$v]}\n"\
"#define KEY_CLASS ${CLASS[$k]}\n"\
"#define VALUE_CLASS ${CLASS[$v]}\n"\
\
\
"#if #keyclass(Object) || #keyclass(Reference)\n"\
"#define KEY_GENERIC_CLASS K\n"\
"#define KEY_GENERIC_TYPE K\n"\
"#define KEY_GENERIC <K>\n"\
"#define KEY_GENERIC_WILDCARD <?>\n"\
"#define KEY_EXTENDS_GENERIC <? extends K>\n"\
"#define KEY_SUPER_GENERIC <? super K>\n"\
"#define KEY_GENERIC_CAST (K)\n"\
"#define KEY_GENERIC_ARRAY_CAST (K[])\n"\
"#define KEY_GENERIC_BIG_ARRAY_CAST (K[][])\n"\
"#else\n"\
"#define KEY_GENERIC_CLASS KEY_CLASS\n"\
"#define KEY_GENERIC_TYPE KEY_TYPE\n"\
"#define KEY_GENERIC\n"\
"#define KEY_GENERIC_WILDCARD\n"\
"#define KEY_EXTENDS_GENERIC\n"\
"#define KEY_SUPER_GENERIC\n"\
"#define KEY_GENERIC_CAST\n"\
"#define KEY_GENERIC_ARRAY_CAST\n"\
"#define KEY_GENERIC_BIG_ARRAY_CAST\n"\
"#endif\n"\
\
"#if #valueclass(Object) || #valueclass(Reference)\n"\
"#define VALUE_GENERIC_CLASS V\n"\
"#define VALUE_GENERIC_TYPE V\n"\
"#define VALUE_GENERIC <V>\n"\
"#define VALUE_EXTENDS_GENERIC <? extends V>\n"\
"#define VALUE_GENERIC_CAST (V)\n"\
"#define VALUE_GENERIC_ARRAY_CAST (V[])\n"\
"#else\n"\
"#define VALUE_GENERIC_CLASS VALUE_CLASS\n"\
"#define VALUE_GENERIC_TYPE VALUE_TYPE\n"\
"#define VALUE_GENERIC\n"\
"#define VALUE_EXTENDS_GENERIC\n"\
"#define VALUE_GENERIC_CAST\n"\
"#define VALUE_GENERIC_ARRAY_CAST\n"\
"#endif\n"\
\
"#if #keyclass(Object) || #keyclass(Reference)\n"\
"#if #valueclass(Object) || #valueclass(Reference)\n"\
"#define KEY_VALUE_GENERIC <K,V>\n"\
"#define KEY_VALUE_EXTENDS_GENERIC <? extends K, ? extends V>\n"\
"#else\n"\
"#define KEY_VALUE_GENERIC <K>\n"\
"#define KEY_VALUE_EXTENDS_GENERIC <? extends K>\n"\
"#endif\n"\
"#else\n"\
"#if #valueclass(Object) || #valueclass(Reference)\n"\
"#define KEY_VALUE_GENERIC <V>\n"\
"#define KEY_VALUE_EXTENDS_GENERIC <? extends V>\n"\
"#else\n"\
"#define KEY_VALUE_GENERIC\n"\
"#define KEY_VALUE_EXTENDS_GENERIC\n"\
"#endif\n"\
"#endif\n"\
\
\
"/* Value methods */\n"\
\
\
"#define KEY_VALUE ${TYPE[$k]}Value\n"\
"#define VALUE_VALUE ${TYPE[$v]}Value\n"\
\
\
"/* Interfaces (keys) */\n"\
\
\
"#define COLLECTION ${TYPE_CAP[$k]}Collection\n\n"\
"#define SET ${TYPE_CAP[$k]}Set\n\n"\
"#define HASH ${TYPE_CAP[$k]}Hash\n\n"\
"#define SORTED_SET ${TYPE_CAP[$k]}SortedSet\n\n"\
"#define STD_SORTED_SET ${TYPE_STD[$k]}SortedSet\n\n"\
"#define FUNCTION ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n"\
"#define MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n"\
"#define SORTED_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMap\n"\
"#if #keyclass(Object) || #keyclass(Reference)\n"\
"#define STD_SORTED_MAP SortedMap\n\n"\
"#define STRATEGY Strategy\n\n"\
"#else\n"\
"#define STD_SORTED_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMap\n\n"\
"#define STRATEGY PACKAGE.${TYPE_CAP[$k]}Hash.Strategy\n\n"\
"#endif\n"\
"#define LIST ${TYPE_CAP[$k]}List\n\n"\
"#define BIG_LIST ${TYPE_CAP[$k]}BigList\n\n"\
"#define STACK ${TYPE_STD[$k]}Stack\n\n"\
"#define PRIORITY_QUEUE ${TYPE_STD[$k]}PriorityQueue\n\n"\
"#define INDIRECT_PRIORITY_QUEUE ${TYPE_STD[$k]}IndirectPriorityQueue\n\n"\
"#define INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_STD[$k]}IndirectDoublePriorityQueue\n\n"\
"#define KEY_ITERATOR ${TYPE_CAP2[$k]}Iterator\n\n"\
"#define KEY_ITERABLE ${TYPE_CAP2[$k]}Iterable\n\n"\
"#define KEY_BIDI_ITERATOR ${TYPE_CAP2[$k]}BidirectionalIterator\n\n"\
"#define KEY_LIST_ITERATOR ${TYPE_CAP2[$k]}ListIterator\n\n"\
"#define KEY_BIG_LIST_ITERATOR ${TYPE_CAP2[$k]}BigListIterator\n\n"\
"#define STD_KEY_ITERATOR ${TYPE_STD[$k]}Iterator\n\n"\
"#define KEY_COMPARATOR ${TYPE_STD[$k]}Comparator\n\n"\
\
\
"/* Interfaces (values) */\n"\
\
\
"#define VALUE_COLLECTION ${TYPE_CAP[$v]}Collection\n\n"\
"#define VALUE_ARRAY_SET ${TYPE_CAP[$v]}ArraySet\n\n"\
"#define VALUE_ITERATOR ${TYPE_CAP2[$v]}Iterator\n\n"\
"#define VALUE_LIST_ITERATOR ${TYPE_CAP2[$v]}ListIterator\n\n"\
\
\
"/* Abstract implementations (keys) */\n"\
\
\
"#define ABSTRACT_COLLECTION Abstract${TYPE_CAP[$k]}Collection\n\n"\
"#define ABSTRACT_SET Abstract${TYPE_CAP[$k]}Set\n\n"\
"#define ABSTRACT_SORTED_SET Abstract${TYPE_CAP[$k]}SortedSet\n"\
"#define ABSTRACT_FUNCTION Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n"\
"#define ABSTRACT_MAP Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n"\
"#define ABSTRACT_FUNCTION Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n"\
"#define ABSTRACT_SORTED_MAP Abstract${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMap\n"\
"#define ABSTRACT_LIST Abstract${TYPE_CAP[$k]}List\n\n"\
"#define ABSTRACT_BIG_LIST Abstract${TYPE_CAP[$k]}BigList\n\n"\
"#define SUBLIST ${TYPE_CAP[$k]}SubList\n\n"\
"#define ABSTRACT_PRIORITY_QUEUE Abstract${TYPE_STD[$k]}PriorityQueue\n\n"\
"#define ABSTRACT_STACK Abstract${TYPE_STD[$k]}Stack\n\n"\
"#define KEY_ABSTRACT_ITERATOR Abstract${TYPE_CAP2[$k]}Iterator\n\n"\
"#define KEY_ABSTRACT_BIDI_ITERATOR Abstract${TYPE_CAP2[$k]}BidirectionalIterator\n\n"\
"#define KEY_ABSTRACT_LIST_ITERATOR Abstract${TYPE_CAP2[$k]}ListIterator\n\n"\
"#define KEY_ABSTRACT_BIG_LIST_ITERATOR Abstract${TYPE_CAP2[$k]}BigListIterator\n\n"\
"#if #keyclass(Object)\n"\
"#define KEY_ABSTRACT_COMPARATOR Comparator\n\n"\
"#else\n"\
"#define KEY_ABSTRACT_COMPARATOR Abstract${TYPE_CAP[$k]}Comparator\n\n"\
"#endif\n"\
\
\
"/* Abstract implementations (values) */\n"\
\
\
"#define VALUE_ABSTRACT_COLLECTION Abstract${TYPE_CAP[$v]}Collection\n\n"\
"#define VALUE_ABSTRACT_ITERATOR Abstract${TYPE_CAP2[$v]}Iterator\n\n"\
"#define VALUE_ABSTRACT_BIDI_ITERATOR Abstract${TYPE_CAP2[$v]}BidirectionalIterator\n\n"\
\
\
"/* Static containers (keys) */\n"\
\
\
"#define COLLECTIONS ${TYPE_CAP[$k]}Collections\n\n"\
"#define SETS ${TYPE_CAP[$k]}Sets\n\n"\
"#define SORTED_SETS ${TYPE_CAP[$k]}SortedSets\n\n"\
"#define LISTS ${TYPE_CAP[$k]}Lists\n\n"\
"#define BIG_LISTS ${TYPE_CAP[$k]}BigLists\n\n"\
"#define MAPS ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Maps\n"\
"#define FUNCTIONS ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Functions\n"\
"#define SORTED_MAPS ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}SortedMaps\n"\
"#define PRIORITY_QUEUES ${TYPE_CAP2[$k]}PriorityQueues\n\n"\
"#define HEAPS ${TYPE_CAP2[$k]}Heaps\n\n"\
"#define SEMI_INDIRECT_HEAPS ${TYPE_CAP2[$k]}SemiIndirectHeaps\n\n"\
"#define INDIRECT_HEAPS ${TYPE_CAP2[$k]}IndirectHeaps\n\n"\
"#define ARRAYS ${TYPE_CAP2[$k]}Arrays\n\n"\
"#define BIG_ARRAYS ${TYPE_CAP2[$k]}BigArrays\n\n"\
"#define ITERATORS ${TYPE_CAP2[$k]}Iterators\n\n"\
"#define BIG_LIST_ITERATORS ${TYPE_CAP2[$k]}BigListIterators\n\n"\
"#define COMPARATORS ${TYPE_CAP2[$k]}Comparators\n\n"\
\
\
"/* Static containers (values) */\n"\
\
\
"#define VALUE_COLLECTIONS ${TYPE_CAP[$v]}Collections\n\n"\
"#define VALUE_SETS ${TYPE_CAP[$v]}Sets\n\n"\
"#define VALUE_ARRAYS ${TYPE_CAP2[$v]}Arrays\n\n"\
\
\
"/* Implementations */\n"\
\
\
"#define OPEN_HASH_SET ${TYPE_CAP[$k]}${Linked}Open${Custom}HashSet\n\n"\
"#define OPEN_HASH_BIG_SET ${TYPE_CAP[$k]}${Linked}Open${Custom}HashBigSet\n\n"\
"#define OPEN_DOUBLE_HASH_SET ${TYPE_CAP[$k]}${Linked}Open${Custom}DoubleHashSet\n\n"\
"#define OPEN_HASH_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}${Linked}Open${Custom}HashMap\n\n"\
"#define STRIPED_OPEN_HASH_MAP Striped${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Open${Custom}HashMap\n\n"\
"#define OPEN_DOUBLE_HASH_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}${Linked}Open${Custom}DoubleHashMap\n\n"\
"#define ARRAY_SET ${TYPE_CAP[$k]}ArraySet\n\n"\
"#define ARRAY_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}ArrayMap\n\n"\
"#define LINKED_OPEN_HASH_SET ${TYPE_CAP[$k]}LinkedOpenHashSet\n\n"\
"#define AVL_TREE_SET ${TYPE_CAP[$k]}AVLTreeSet\n\n"\
"#define RB_TREE_SET ${TYPE_CAP[$k]}RBTreeSet\n\n"\
"#define AVL_TREE_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}AVLTreeMap\n\n"\
"#define RB_TREE_MAP ${TYPE_CAP[$k]}2${TYPE_CAP[$v]}RBTreeMap\n\n"\
"#define ARRAY_LIST ${TYPE_CAP[$k]}ArrayList\n\n"\
"#define BIG_ARRAY_BIG_LIST ${TYPE_CAP[$k]}BigArrayBigList\n\n"\
"#define ARRAY_FRONT_CODED_LIST ${TYPE_CAP[$k]}ArrayFrontCodedList\n\n"\
"#define HEAP_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapPriorityQueue\n\n"\
"#define HEAP_SEMI_INDIRECT_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapSemiIndirectPriorityQueue\n\n"\
"#define HEAP_INDIRECT_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapIndirectPriorityQueue\n\n"\
"#define HEAP_SESQUI_INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapSesquiIndirectDoublePriorityQueue\n\n"\
"#define HEAP_INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_CAP2[$k]}HeapIndirectDoublePriorityQueue\n\n"\
"#define ARRAY_FIFO_QUEUE ${TYPE_CAP2[$k]}ArrayFIFOQueue\n\n"\
"#define ARRAY_PRIORITY_QUEUE ${TYPE_CAP2[$k]}ArrayPriorityQueue\n\n"\
"#define ARRAY_INDIRECT_PRIORITY_QUEUE ${TYPE_CAP2[$k]}ArrayIndirectPriorityQueue\n\n"\
"#define ARRAY_INDIRECT_DOUBLE_PRIORITY_QUEUE ${TYPE_CAP2[$k]}ArrayIndirectDoublePriorityQueue\n\n"\
\
\
"/* Synchronized wrappers */\n"\
\
\
"#define SYNCHRONIZED_COLLECTION Synchronized${TYPE_CAP[$k]}Collection\n\n"\
"#define SYNCHRONIZED_SET Synchronized${TYPE_CAP[$k]}Set\n\n"\
"#define SYNCHRONIZED_SORTED_SET Synchronized${TYPE_CAP[$k]}SortedSet\n\n"\
"#define SYNCHRONIZED_FUNCTION Synchronized${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n\n"\
"#define SYNCHRONIZED_MAP Synchronized${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n\n"\
"#define SYNCHRONIZED_LIST Synchronized${TYPE_CAP[$k]}List\n\n"\
\
\
"/* Unmodifiable wrappers */\n"\
\
\
"#define UNMODIFIABLE_COLLECTION Unmodifiable${TYPE_CAP[$k]}Collection\n\n"\
"#define UNMODIFIABLE_SET Unmodifiable${TYPE_CAP[$k]}Set\n\n"\
"#define UNMODIFIABLE_SORTED_SET Unmodifiable${TYPE_CAP[$k]}SortedSet\n\n"\
"#define UNMODIFIABLE_FUNCTION Unmodifiable${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Function\n\n"\
"#define UNMODIFIABLE_MAP Unmodifiable${TYPE_CAP[$k]}2${TYPE_CAP[$v]}Map\n\n"\
"#define UNMODIFIABLE_LIST Unmodifiable${TYPE_CAP[$k]}List\n\n"\
"#define UNMODIFIABLE_KEY_ITERATOR Unmodifiable${TYPE_CAP[$k]}Iterator\n\n"\
"#define UNMODIFIABLE_KEY_BIDI_ITERATOR Unmodifiable${TYPE_CAP[$k]}BidirectionalIterator\n\n"\
"#define UNMODIFIABLE_KEY_LIST_ITERATOR Unmodifiable${TYPE_CAP[$k]}ListIterator\n\n"\
\
\
"/* Other wrappers */\n"\
\
\
"#define KEY_READER_WRAPPER ${TYPE_CAP[$k]}ReaderWrapper\n\n"\
"#define KEY_DATA_INPUT_WRAPPER ${TYPE_CAP[$k]}DataInputWrapper\n\n"\
\
\
"/* Methods (keys) */\n"\
\
\
"#define NEXT_KEY next${TYPE_STD[$k]}\n"\
"#define PREV_KEY previous${TYPE_STD[$k]}\n"\
"#define FIRST_KEY first${TYPE_STD[$k]}Key\n"\
"#define LAST_KEY last${TYPE_STD[$k]}Key\n"\
"#define GET_KEY get${TYPE_STD[$k]}\n"\
"#define REMOVE_KEY remove${TYPE_STD[$k]}\n"\
"#define READ_KEY read${TYPE_CAP2[$k]}\n"\
"#define WRITE_KEY write${TYPE_CAP2[$k]}\n"\
"#define DEQUEUE dequeue${TYPE_STD[$k]}\n"\
"#define DEQUEUE_LAST dequeueLast${TYPE_STD[$k]}\n"\
"#define SUBLIST_METHOD ${TYPE_LC[$k]}SubList\n"\
"#define SINGLETON_METHOD ${TYPE_LC[$k]}Singleton\n\n"\
"#define FIRST first${TYPE_STD[$k]}\n"\
"#define LAST last${TYPE_STD[$k]}\n"\
"#define TOP top${TYPE_STD[$k]}\n"\
"#define PEEK peek${TYPE_STD[$k]}\n"\
"#define POP pop${TYPE_STD[$k]}\n"\
"#define KEY_ITERATOR_METHOD ${TYPE_LC2[$k]}Iterator\n\n"\
"#define KEY_LIST_ITERATOR_METHOD ${TYPE_LC2[$k]}ListIterator\n\n"\
"#define KEY_EMPTY_ITERATOR_METHOD empty${TYPE_CAP2[$k]}Iterator\n\n"\
"#define AS_KEY_ITERATOR as${TYPE_CAP2[$k]}Iterator\n\n"\
"#define TO_KEY_ARRAY to${TYPE_STD[$k]}Array\n"\
"#define ENTRY_GET_KEY get${TYPE_STD[$k]}Key\n"\
"#define REMOVE_FIRST_KEY removeFirst${TYPE_STD[$k]}\n"\
"#define REMOVE_LAST_KEY removeLast${TYPE_STD[$k]}\n"\
"#define PARSE_KEY parse${TYPE_STD[$k]}\n"\
"#define LOAD_KEYS load${TYPE_STD[$k]}s\n"\
"#define LOAD_KEYS_BIG load${TYPE_STD[$k]}sBig\n"\
"#define STORE_KEYS store${TYPE_STD[$k]}s\n"\
\
\
"/* Methods (values) */\n"\
\
\
"#define NEXT_VALUE next${TYPE_STD[$v]}\n"\
"#define PREV_VALUE previous${TYPE_STD[$v]}\n"\
"#define READ_VALUE read${TYPE_CAP2[$v]}\n"\
"#define WRITE_VALUE write${TYPE_CAP2[$v]}\n"\
"#define VALUE_ITERATOR_METHOD ${TYPE_LC2[$v]}Iterator\n\n"\
"#define ENTRY_GET_VALUE get${TYPE_STD[$v]}Value\n"\
"#define REMOVE_FIRST_VALUE removeFirst${TYPE_STD[$v]}\n"\
"#define REMOVE_LAST_VALUE removeLast${TYPE_STD[$v]}\n"\
\
\
"/* Methods (keys/values) */\n"\
\
\
"#define ENTRYSET ${TYPE_LC[$k]}2${TYPE_CAP[$v]}EntrySet\n"\
\
\
"/* Methods that have special names depending on keys (but the special names depend on values) */\n"\
\
\
"#if #keyclass(Object) || #keyclass(Reference)\n"\
"#define GET_VALUE get${TYPE_STD[$v]}\n"\
"#define REMOVE_VALUE remove${TYPE_STD[$v]}\n"\
"#else\n"\
"#define GET_VALUE get\n"\
"#define REMOVE_VALUE remove\n"\
"#endif\n"\
\
\
\
"/* Equality */\n"\
\
\
\
"#ifdef Custom\n"\
"#define KEY_EQUALS(x,y) ( strategy.equals( (x), " KEY_GENERIC_CAST "(y) ) )\n"\
"#else\n"\
"#if #keyclass(Object)\n"\
"#define KEY_EQUALS(x,y) ( (x) == null ? (y) == null : (x).equals(y) )\n"\
"#define KEY_EQUALS_NOT_NULL(x,y) ( (x).equals(y) )\n"\
"#else\n"\
"#define KEY_EQUALS(x,y) ( (x) == (y) )\n"\
"#define KEY_EQUALS_NOT_NULL(x,y) ( (x) == (y) )\n"\
"#endif\n"\
"#endif\n\n"\
\
"#if #valueclass(Object)\n"\
"#define VALUE_EQUALS(x,y) ( (x) == null ? (y) == null : (x).equals(y) )\n"\
"#else\n"\
"#define VALUE_EQUALS(x,y) ( (x) == (y) )\n"\
"#endif\n\n"\
\
\
\
"/* Object/Reference-only definitions (keys) */\n"\
\
\
"#if #keyclass(Object) || #keyclass(Reference)\n"\
\
"#define REMOVE remove\n"\
\
"#define KEY_OBJ2TYPE(x) (x)\n"\
"#define KEY_CLASS2TYPE(x) (x)\n"\
"#define KEY2OBJ(x) (x)\n"\
\
"#if #keyclass(Object)\n"\
"#ifdef Custom\n"\
"#define KEY2JAVAHASH(x) ( strategy.hashCode(" KEY_GENERIC_CAST "(x)) )\n"\
"#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(" KEY_GENERIC_CAST "(x)) ) )\n"\
"#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)strategy.hashCode(" KEY_GENERIC_CAST "(x)) ) )\n"\
"#else\n"\
"#define KEY2JAVAHASH(x) ( (x) == null ? 0 : (x).hashCode() )\n"\
"#define KEY2INTHASH(x) ( (x) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (x).hashCode() ) )\n"\
"#define KEY2LONGHASH(x) ( (x) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(x).hashCode() ) )\n"\
"#endif\n"\
"#else\n"\
"#define KEY2JAVAHASH(x) ( (x) == null ? 0 : System.identityHashCode(x) )\n"\
"#define KEY2INTHASH(x) ( (x) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(x) ) )\n"\
"#define KEY2LONGHASH(x) ( (x) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)System.identityHashCode(x) ) )\n"\
"#endif\n"\
\
"#define KEY_CMP(x,y) ( ((Comparable<KEY_GENERIC_CLASS>)(x)).compareTo(y) )\n"\
"#define KEY_CMP_EQ(x,y) ( ((Comparable<KEY_GENERIC_CLASS>)(x)).compareTo(y) == 0 )\n"\
"#define KEY_LESS(x,y) ( ((Comparable<KEY_GENERIC_CLASS>)(x)).compareTo(y) < 0 )\n"\
"#define KEY_LESSEQ(x,y) ( ((Comparable<KEY_GENERIC_CLASS>)(x)).compareTo(y) <= 0 )\n"\
\
"#define KEY_NULL (null)\n"\
\
\
"#else\n"\
\
\
"/* Primitive-type-only definitions (keys) */\n"\
\
\
"#define REMOVE rem\n"\
\
"#define KEY_CLASS2TYPE(x) ((x).KEY_VALUE())\n"\
"#define KEY_OBJ2TYPE(x) (KEY_CLASS2TYPE((KEY_CLASS)(x)))\n"\
"#define KEY2OBJ(x) (KEY_CLASS.valueOf(x))\n"\
\
"#if #keyclass(Boolean)\n"\
"#define KEY_CMP_EQ(x,y) ( (x) == (y) )\n"\
"#define KEY_NULL (false)\n"\
"#define KEY_CMP(x,y) ( !(x) && (y) ? -1 : ( (x) == (y) ? 0 : 1 ) )\n"\
"#define KEY_LESS(x,y) ( !(x) && (y) )\n"\
"#define KEY_LESSEQ(x,y) ( !(x) || (y) )\n"\
"#else\n"\
"#define KEY_NULL ((KEY_TYPE)0)\n"\
"#if #keyclass(Float) || #keyclass(Double)\n"\
"#define KEY_CMP_EQ(x,y) ( KEY_CLASS.compare((x),(y)) == 0 )\n"\
"#define KEY_CMP(x,y) ( KEY_CLASS.compare((x),(y)) )\n"\
"#define KEY_LESS(x,y) ( KEY_CLASS.compare((x),(y)) < 0 )\n"\
"#define KEY_LESSEQ(x,y) ( KEY_CLASS.compare((x),(y)) <= 0 )\n"\
"#else\n"\
"#define KEY_CMP_EQ(x,y) ( (x) == (y) )\n"\
"#define KEY_CMP(x,y) ( (x) < (y) ? -1 : ( (x) == (y) ? 0 : 1 ) )\n"\
"#define KEY_LESS(x,y) ( (x) < (y) )\n"\
"#define KEY_LESSEQ(x,y) ( (x) <= (y) )\n"\
"#endif\n"\
\
"#if #keyclass(Float)\n"\
"#define KEY2LEXINT(x) fixFloat(x)\n"\
"#elif #keyclass(Double)\n"\
"#define KEY2LEXINT(x) fixDouble(x)\n"\
"#else\n"\
"#define KEY2LEXINT(x) (x)\n"\
"#endif\n"\
\
"#endif\n"\
\
"#ifdef Custom\n"\
"#define KEY2JAVAHASH(x) ( strategy.hashCode(x) )\n"\
"#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(x) ) )\n"\
"#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)strategy.hashCode(x) ) )\n"\
"#else\n"\
\
"#if #keyclass(Float)\n"\
"#define KEY2JAVAHASH(x) it.unimi.dsi.fastutil.HashCommon.float2int(x)\n"\
"#define KEY2INTHASH(x) it.unimi.dsi.fastutil.HashCommon.murmurHash3( it.unimi.dsi.fastutil.HashCommon.float2int(x) )\n"\
"#define KEY2LONGHASH(x) it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)it.unimi.dsi.fastutil.HashCommon.float2int(x) )\n"\
"#elif #keyclass(Double)\n"\
"#define KEY2JAVAHASH(x) it.unimi.dsi.fastutil.HashCommon.double2int(x)\n"\
"#define KEY2INTHASH(x) (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(x))\n"\
"#define KEY2LONGHASH(x) it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(x))\n"\
"#elif #keyclass(Long)\n"\
"#define KEY2JAVAHASH(x) it.unimi.dsi.fastutil.HashCommon.long2int(x)\n"\
"#define KEY2INTHASH(x) (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(x)\n"\
"#define KEY2LONGHASH(x) it.unimi.dsi.fastutil.HashCommon.murmurHash3(x)\n"\
"#elif #keyclass(Boolean)\n"\
"#define KEY2JAVAHASH(x) ((x) ? 1231 : 1237)\n"\
"#define KEY2INTHASH(x) ((x) ? 0xfab5368 : 0xcba05e7b)\n"\
"#define KEY2LONGHASH(x) ((x) ? 0x74a19fc8b6428188L : 0xbaeca2031a4fd9ecL)\n"\
"#else\n"\
"#define KEY2JAVAHASH(x) (x)\n"\
"#define KEY2INTHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( (x) ) )\n"\
"#define KEY2LONGHASH(x) ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(x) ) )\n"\
"#endif\n"\
"#endif\n"\
\
"#endif\n"\
\
\
\
"/* Object/Reference-only definitions (values) */\n"\
\
\
"#if #valueclass(Object) || #valueclass(Reference)\n"\
"#define VALUE_OBJ2TYPE(x) (x)\n"\
"#define VALUE_CLASS2TYPE(x) (x)\n"\
"#define VALUE2OBJ(x) (x)\n"\
\
"#if #valueclass(Object)\n"\
"#define VALUE2JAVAHASH(x) ( (x) == null ? 0 : (x).hashCode() )\n"\
"#else\n"\
"#define VALUE2JAVAHASH(x) ( (x) == null ? 0 : System.identityHashCode(x) )\n"\
"#endif\n"\
\
"#define VALUE_NULL (null)\n"\
"#define OBJECT_DEFAULT_RETURN_VALUE (this.defRetValue)\n"\
\
"#else\n"\
\
\
"/* Primitive-type-only definitions (values) */\n"\
\
\
"#define VALUE_CLASS2TYPE(x) ((x).VALUE_VALUE())\n"\
"#define VALUE_OBJ2TYPE(x) (VALUE_CLASS2TYPE((VALUE_CLASS)(x)))\n"\
"#define VALUE2OBJ(x) (VALUE_CLASS.valueOf(x))\n"\
\
"#if #valueclass(Float) || #valueclass(Double) || #valueclass(Long)\n"\
"#define VALUE_NULL (0)\n"\
"#define VALUE2JAVAHASH(x) it.unimi.dsi.fastutil.HashCommon.${TYPE[$v]}2int(x)\n"\
"#elif #valueclass(Boolean)\n"\
"#define VALUE_NULL (false)\n"\
"#define VALUE2JAVAHASH(x) (x ? 1231 : 1237)\n"\
"#else\n"\
"#if #valueclass(Integer)\n"\
"#define VALUE_NULL (0)\n"\
"#else\n"\
"#define VALUE_NULL ((VALUE_TYPE)0)\n"\
"#endif\n"\
"#define VALUE2JAVAHASH(x) (x)\n"\
"#endif\n"\
\
"#define OBJECT_DEFAULT_RETURN_VALUE (null)\n"\
\
"#endif\n"\
\
"#include \"$1\"\n"
|
<filename>packages/api/src/models/index.ts<gh_stars>1-10
export * from './event.model';
|
import sys
from setuptools import setup
setup_requires = ['setuptools >= 30.3.0']
if {'pytest', 'test', 'ptr'}.intersection(sys.argv):
setup_requires.append('pytest-runner')
if {'build_sphinx'}.intersection(sys.argv):
setup_requires.extend(['recommonmark',
'sphinx'])
def readme():
with open('README.md') as f:
return f.read()
setup(name='gwcosmo',
version='0.1.0',
description='A package to estimate cosmological parameters using gravitational-wave observations',
url='https://git.ligo.org/cbc-cosmo/gwcosmo',
author='Cosmology R&D Group',
author_email='<EMAIL>',
license='GNU',
packages=['gwcosmo', 'gwcosmo.likelihood', 'gwcosmo.prior', 'gwcosmo.utilities'],
package_dir={'gwcosmo': 'gwcosmo'},
scripts=['bin/gwcosmo_single_posterior', 'bin/gwcosmo_combined_posterior', 'bin/gwcosmo_compute_pdet'],
include_package_data=True,
install_requires=[
'numpy>=1.9',
'matplotlib>=2.0',
'pandas',
'scipy',
'tqdm'],
setup_requires=setup_requires,
zip_safe=False)
|
<filename>src/ui/__tests__/components/CodeHeader.test.tsx
import { h } from 'preact';
import { deep } from 'preact-render-spy';
import { CodeHeader } from '@authenticator/ui/components';
describe('CodeHeader Test', (): void => {
test('it renders OTP header', (): void => {
const backBtnMock = jest.fn();
const component = deep(<CodeHeader
goBack={backBtnMock}
lastMessageAddress='<EMAIL>'
/>);
expect(component.find('.subtitle').text())
.toBe('Please enter the 6 digit code sent to');
component.find('.code-header__back').simulate('click');
expect(backBtnMock).toHaveBeenCalled();
});
test('it renders TOTP header', (): void => {
const component = deep(<CodeHeader
goBack={jest.fn()}
lastMessageAddress=''
/>);
expect(component.find('.subtitle').text())
.toBe('Generate a new 6 digit verification code.');
});
});
|
# Copyright (c) [2022] Huawei Technologies Co.,Ltd.ALL rights reserved.
# This program is licensed under Mulan PSL v2.
# You can use it according to the terms and conditions of the Mulan PSL v2.
# http://license.coscl.org.cn/MulanPSL2
# THIS PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.
####################################
# @Author :
# @email :
# @Date :
# @License : Mulan PSL v2
#####################################
from flask_restful import Api
from .routes import Group, User
def init_api(api: Api):
api.add_resource(Group, '/api/v1/groups', '/api/v1/groups/<int:group_id>', endpoint='group')
# api.add_resource(OrgGroups, '/api/v1/org/<int:org_id>/groups/all')
api.add_resource(User, '/api/v1/groups/<int:group_id>/users', endpoint='group_user')
|
import Layout from '../../../components/ResidentLayout';
import Panel from '../../../components/Panel';
import Head from 'next/head';
import { NextPage } from 'next';
import { EvidenceApiGateway } from 'src/gateways/evidence-api';
import { withAuth } from 'src/helpers/authed-server-side-props';
import { TeamHelper } from '../../../services/team-helper';
import { Team } from 'src/domain/team';
import { Constants } from '../../../helpers/Constants';
type ConfirmationProps = {
residentReferenceId: string;
team: Team;
feedbackUrl: string;
};
const Confirmation: NextPage<ConfirmationProps> = ({
residentReferenceId,
team,
feedbackUrl,
}) => {
return (
<Layout feedbackUrl={feedbackUrl}>
<Head>
<title>
Confirmation | Document Evidence Service | Hackney Council
</title>
</Head>
<div className="govuk-grid-row">
<div className="govuk-grid-column-two-thirds">
<Panel>
<h1 className="lbh-heading-h1">We've received your documents</h1>
<p className="lbh-body">
Your reference number: {residentReferenceId}
</p>
</Panel>
{/* <p className="lbh-body">We have sent you a confirmation email.</p> */}
<h2 className="lbh-heading-h2">What happens next</h2>
<p className="lbh-body">{team.slaMessage}</p>
<p className="lbh-body">
We’ve sent your evidence to the service that requested it. They will
be in touch about the next steps.
</p>
<p className="lbh-body">
<a href={`${feedbackUrl}`} className="govuk-link lbh-link">
What did you think of this service?
</a>{' '}
(takes 30 seconds)
</p>
</div>
</div>
</Layout>
);
};
export const getServerSideProps = withAuth(async (ctx) => {
const { requestId } = ctx.query as {
requestId: string;
};
const evidenceApiGateway = new EvidenceApiGateway();
const evidenceRequest = await evidenceApiGateway.getEvidenceRequest(
Constants.DUMMY_EMAIL,
requestId
);
const residentReferenceId = evidenceRequest.resident.referenceId;
const teamName = evidenceRequest.team;
const team = TeamHelper.getTeamByName(TeamHelper.getTeamsJson(), teamName);
const feedbackUrl = process.env.FEEDBACK_FORM_RESIDENT_URL as string;
return { props: { residentReferenceId, team, feedbackUrl } };
});
export default Confirmation;
|
const { getConnection } = require('../lib/redisConnection');
const redis = getConnection();
const userService = require('../services/user.service');
const SOCKET_ID_IN_ROOM = 'socketIdInRoom-';
const USER = 'user-';
const ONLINE_USER = 'online-user-';
const USERS_IN_ROOM = 'usersInRoom-';
module.exports = [
{
name: 'online',
controller: async (socket, { userId }) => {
await redis.set(`${ONLINE_USER}${socket.id}`, userId);
socket.join(userId);
},
},
{
name: 'joinRoom',
controller: async (socket, { roomId, userId }) => {
const userObject = await userService.getUserById(userId);
await Promise.all([
redis.set(`${SOCKET_ID_IN_ROOM}${socket.id}`, roomId),
redis.set(`${USER}${socket.id}`, JSON.stringify(userObject)),
redis.hSet(`${USERS_IN_ROOM}${roomId}`, userId, socket.id),
]);
socket.join(roomId);
},
},
{
name: 'roomSendMessage',
controller: async (socket, { msg, receiverId }) => {
const [roomId, userObject] = await Promise.all([
redis.get(`${SOCKET_ID_IN_ROOM}${socket.id}`),
redis.get(`${USER}${socket.id}`),
]);
const newMessage = msg;
newMessage.senderId = JSON.parse(userObject);
if (roomId) socket.to(roomId).emit('roomNewMessage', newMessage);
const totalUsers = await redis.hGetAll(`${USERS_IN_ROOM}${roomId}`);
if (Object.keys(totalUsers).length === 1) {
socket.to(receiverId).emit('roomOpened');
}
},
},
{
name: 'sendFriendRequest',
controller: async (socket, { receiverId }) => {
if (receiverId) {
socket.to(receiverId).emit('friendRequest');
}
},
},
{
name: 'sendFriendAcceptRequest',
controller: async (socket, { receiverId }) => {
if (receiverId) {
socket.to(receiverId).emit('friendAcceptRequest');
}
},
},
{
name: 'sendRoomDeleteMessage',
controller: async (socket, { roomId, messageId }) => {
if (roomId) {
socket.to(roomId).emit('roomDeleteMessage', { messageId, roomId });
}
},
},
{
name: 'roomSendEditMessage',
controller: async (socket, message) => {
if (message) {
socket.to(message.roomId).emit('roomEditMessage', message);
}
},
},
{
name: 'leaveRoom',
controller: async (socket, roomId) => {
redis.del(`${SOCKET_ID_IN_ROOM}${socket.id}`);
socket.leave(roomId);
},
},
{
name: 'logOut',
controller: async (socket, userId) => {
redis.del(`${ONLINE_USER}${socket.id}`);
redis.del(`${SOCKET_ID_IN_ROOM}${socket.id}`);
socket.leave(userId);
},
},
];
|
//===--- Pattern.cpp --------------------------------------------*- C++ -*-===//
// Part of the Sora project, licensed under the MIT license.
// See LICENSE.txt in the project root for license information.
//
// Copyright (c) 2019 <NAME>
//===----------------------------------------------------------------------===//
#include "Sora/AST/Pattern.hpp"
#include "ASTNodeLoc.hpp"
#include "Sora/AST/ASTContext.hpp"
#include "Sora/AST/Decl.hpp"
#include "Sora/AST/TypeRepr.hpp"
#include <type_traits>
using namespace sora;
/// Check that all patterns are trivially destructible. This is needed
/// because, as they are allocated in the ASTContext's arenas, their destructors
/// are never called.
#define PATTERN(ID, PARENT) \
static_assert(std::is_trivially_destructible<ID##Pattern>::value, \
#ID "Pattern is not trivially destructible.");
#include "Sora/AST/PatternNodes.def"
void *Pattern::operator new(size_t size, ASTContext &ctxt, unsigned align) {
return ctxt.allocate(size, align, ArenaKind::Permanent);
}
Type Pattern::getType() const {
switch (getKind()) {
default:
llvm_unreachable("unknown Pattern kind");
#define PATTERN(ID, PARENT) \
static_assert(detail::isOverriden<Pattern>(&ID##Pattern::getType), \
"Must override getType!"); \
case PatternKind::ID: \
return cast<ID##Pattern>(this)->getType();
#include "Sora/AST/PatternNodes.def"
}
}
Pattern *Pattern::ignoreParens() {
if (auto paren = dyn_cast<ParenPattern>(this))
return paren->getSubPattern()->ignoreParens();
return this;
}
bool Pattern::isRefutable() const {
bool foundRefutablePattern = false;
const_cast<Pattern *>(this)->forEachNode([&](Pattern *pattern) {
if (isa<RefutablePattern>(pattern))
foundRefutablePattern = true;
});
return foundRefutablePattern;
}
void Pattern::forEachVarDecl(llvm::function_ref<void(VarDecl *)> fn) const {
using Kind = PatternKind;
switch (getKind()) {
case Kind::Var:
fn(cast<VarPattern>(this)->getVarDecl());
break;
case Kind::Discard:
break;
case Kind::Mut:
if (Pattern *sub = cast<MutPattern>(this)->getSubPattern())
sub->forEachVarDecl(fn);
break;
case Kind::Paren:
if (Pattern *sub = cast<ParenPattern>(this)->getSubPattern())
sub->forEachVarDecl(fn);
break;
case Kind::Tuple: {
const TuplePattern *tuple = cast<TuplePattern>(this);
for (Pattern *elem : tuple->getElements())
if (elem)
elem->forEachVarDecl(fn);
break;
}
case Kind::Typed:
if (Pattern *sub = cast<TypedPattern>(this)->getSubPattern())
sub->forEachVarDecl(fn);
break;
case Kind::MaybeValue:
if (Pattern *sub = cast<MaybeValuePattern>(this)->getSubPattern())
sub->forEachVarDecl(fn);
break;
}
}
void Pattern::forEachNode(llvm::function_ref<void(Pattern *)> fn) {
using Kind = PatternKind;
fn(this);
switch (getKind()) {
case Kind::Var:
case Kind::Discard:
break;
case Kind::Mut:
if (Pattern *sub = cast<MutPattern>(this)->getSubPattern())
sub->forEachNode(fn);
break;
case Kind::Paren:
if (Pattern *sub = cast<ParenPattern>(this)->getSubPattern())
sub->forEachNode(fn);
break;
case Kind::Tuple: {
const TuplePattern *tuple = cast<TuplePattern>(this);
for (Pattern *elem : tuple->getElements())
if (elem)
elem->forEachNode(fn);
break;
}
case Kind::Typed:
if (Pattern *sub = cast<TypedPattern>(this)->getSubPattern())
sub->forEachNode(fn);
break;
case Kind::MaybeValue:
if (Pattern *sub = cast<MaybeValuePattern>(this)->getSubPattern())
sub->forEachNode(fn);
break;
}
}
bool Pattern::hasVarPattern() const {
using Kind = PatternKind;
switch (getKind()) {
case Kind::Var:
return true;
case Kind::Discard:
return false;
case Kind::Mut:
return cast<MutPattern>(this)->getSubPattern()->hasVarPattern();
case Kind::Paren:
return cast<ParenPattern>(this)->getSubPattern()->hasVarPattern();
case Kind::Tuple:
for (Pattern *pattern : cast<TuplePattern>(this)->getElements())
if (pattern->hasVarPattern())
return true;
return false;
case Kind::Typed:
return cast<TypedPattern>(this)->getSubPattern()->hasVarPattern();
case Kind::MaybeValue:
return cast<MaybeValuePattern>(this)->getSubPattern()->hasVarPattern();
}
}
SourceLoc Pattern::getBegLoc() const {
switch (getKind()) {
#define PATTERN(ID, PARENT) \
case PatternKind::ID: \
return ASTNodeLoc<Pattern, ID##Pattern>::getBegLoc(cast<ID##Pattern>(this));
#include "Sora/AST/PatternNodes.def"
}
llvm_unreachable("unknown PatternKind");
}
SourceLoc Pattern::getEndLoc() const {
switch (getKind()) {
#define PATTERN(ID, PARENT) \
case PatternKind::ID: \
return ASTNodeLoc<Pattern, ID##Pattern>::getEndLoc(cast<ID##Pattern>(this));
#include "Sora/AST/PatternNodes.def"
}
llvm_unreachable("unknown PatternKind");
}
SourceLoc Pattern::getLoc() const {
switch (getKind()) {
#define PATTERN(ID, PARENT) \
case PatternKind::ID: \
return ASTNodeLoc<Pattern, ID##Pattern>::getLoc(cast<ID##Pattern>(this));
#include "Sora/AST/PatternNodes.def"
}
llvm_unreachable("unknown PatternKind");
}
SourceRange Pattern::getSourceRange() const {
switch (getKind()) {
#define PATTERN(ID, PARENT) \
case PatternKind::ID: \
return ASTNodeLoc<Pattern, ID##Pattern>::getSourceRange( \
cast<ID##Pattern>(this));
#include "Sora/AST/PatternNodes.def"
}
llvm_unreachable("unknown PatternKind");
}
Identifier VarPattern::getIdentifier() const {
return varDecl->getIdentifier();
}
SourceLoc VarPattern::getBegLoc() const { return varDecl->getIdentifierLoc(); }
SourceLoc VarPattern::getEndLoc() const { return varDecl->getIdentifierLoc(); }
TuplePattern *TuplePattern::create(ASTContext &ctxt, SourceLoc lParenLoc,
ArrayRef<Pattern *> patterns,
SourceLoc rParenLoc) {
// Need manual memory allocation here because of trailing objects.
auto size = totalSizeToAlloc<Pattern *>(patterns.size());
void *mem = ctxt.allocate(size, alignof(TuplePattern));
return new (mem) TuplePattern(lParenLoc, patterns, rParenLoc);
}
|
export API_KEY='xxxxx TO DO xxxxx'
|
import React from "react";
const SongTable = props => {
const { songs, removeSong, editRow } = props;
return (
<table className= "table">
<thead>
<tr className="playlist">
<th>Artist</th>
<th>Title</th>
<th>Actions</th>
</tr>
</thead>
<tbody className="playlist">
{songs.length > 0 ? (
songs.map(song => (
<tr key={song.id}>
<td>{song.artist}</td>
<td>{song.title}</td>
<td>
<button className="fav-button" onClick={() => editRow(song)}>Edit</button>
<button className="fav-button" onClick={() => removeSong(song.id)}>Delete</button>
</td>
</tr>
))
) : (
<tr>
<td colSpan={3}>No Songs</td>
</tr>
)}
</tbody>
</table>
);
};
export default SongTable; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.