text
stringlengths 1
1.05M
|
|---|
<filename>coms/responses.py
import asyncio, discord
try:
from _command import Command
except:
from coms._command import Command
import pickle
class Com(Command):
def __init__(self):
self.usage = "!responses"
self.description = "Direct Messages you with a list of all the responses!"
self.keys = ["!responses", "what are the responses", ".responses"]
self.permissions = ["*"]
async def command(self, client, message, rawtext):
pkl = open("./assets/responses.pkl", "rb")
responses = pickle.load(pkl)
pkl.close()
text = "***Responses:***\n"
for key in responses.keys():
text += "**" + key + ":**\t\t" + responses[key] + "\n"
await self.send(client, message.author, message=text)
await self.send(client, message.channel, message="I sent the responses :))")
if __name__ == "__main__":
command = Com()
print(command.help())
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.azkfw.biz.graphics.chart;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.imageio.ImageIO;
import org.azkfw.biz.graphics.chart.entity.PolarAreaChart;
import org.azkfw.biz.graphics.chart.entity.PolarAreaChartAxis;
import org.azkfw.biz.graphics.chart.entity.PolarAreaChartData;
import org.azkfw.biz.graphics.chart.entity.PolarAreaChartDataPoint;
import org.azkfw.biz.graphics.chart.entity.PolarAreaChartSubAxis;
import org.azkfw.biz.graphics.entity.Margin;
/**
* このクラスは、鶏頭図を描画するグラフィクスクラスです。
*
* @since 1.1.0
* @version 1.1.0 2014/06/13
* @author Kawakicchi
*/
public class PolarAreaChartGraphics extends AbstractCircleChartGraphics {
private PolarAreaChart chart;
public void setChart(final PolarAreaChart aChart) {
chart = aChart;
}
@Override
protected void drawCircleChart(final Graphics2D g) {
int width = (int) getWidth();
int height = (int) getHeight();
Margin margin = chart.getMargin();
PolarAreaChartAxis axis = chart.getAxis();
PolarAreaChartSubAxis subAxis = chart.getSubAxis();
// //////////////////////////////////////////////////
double middleX = getWidth() / 2.f;
double middleY = getHeight() / 2.f;
double graphX = margin.getLeft();
double graphY = margin.getTop();
double graphWidth = getWidth() - (margin.getLeft() + margin.getRight());
double graphHeight = getHeight() - (margin.getTop() + margin.getBottom());
double scaleWidth = (graphWidth / 2.f) / axis.getMaximumValue();
double scaleHeight = (graphHeight / 2.f) / axis.getMaximumValue();
if (null != chart.getBackgroundColor()) {
g.setColor(chart.getBackgroundColor());
g.fillRect((int) 0, (int) 0, (int) width, (int) height);
}
if (null != subAxis) {
drawSubAxis(axis, subAxis, graphX, graphY, graphWidth, graphHeight, g);
}
drawAxis1(axis, graphX, graphY, graphWidth, graphHeight, g);
// //////////////////////////////////////////////////////////////////
List<PolarAreaChartData> datas = chart.getDatas();
for (PolarAreaChartData data : datas) {
List<PolarAreaChartDataPoint> points = data.getPoints();
int angle = 360 / points.size();
g.setStroke(new BasicStroke(1.5f));
for (int i = 0; i < points.size(); i++) {
PolarAreaChartDataPoint point = points.get(i);
double value = point.getValue();
if (0 == value) {
continue;
}
double sWidth = value * scaleWidth;
double sHeight = value * scaleHeight;
Color fillColor = (null != point.getFillColor()) ? point.getFillColor() : data.getFillColor();
Color strokeColor = (null != point.getStrokeColor()) ? point.getStrokeColor() : data.getStrokeColor();
if (null != fillColor) {
g.setColor(fillColor);
g.fillArc((int) (middleX - sWidth) + 1, (int) (middleY - sHeight) + 1, (int) (sWidth * 2.f), (int) (sHeight * 2), i * angle,
angle);
}
if (null != strokeColor) {
g.setColor(strokeColor);
g.drawArc((int) (middleX - sWidth) + 1, (int) (middleY - sHeight) + 1, (int) (sWidth * 2.f), (int) (sHeight * 2), i * angle,
angle);
}
}
}
// //////////////////////////////////////////////////////////////////
drawAxis2(axis, graphX, graphY, graphWidth, graphHeight, g);
}
public static void main(final String[] args) {
PolarAreaChart chart = new PolarAreaChart();
chart.setMargin(new Margin(25.f, 25.f, 25.f, 25.f));
chart.setBackgroundColor(Color.white);
PolarAreaChartAxis axis = new PolarAreaChartAxis();
axis.setMaximumValue(2.0);
axis.setScale(1.0);
axis.setScaleStrokeColor(Color.darkGray);
axis.setSubScale(0.5);
//axis.setSubScaleStrokeColor(Color.lightGray);
PolarAreaChartSubAxis subAxis = new PolarAreaChartSubAxis();
subAxis.setAngle(30);
List<PolarAreaChartData> datas = new ArrayList<PolarAreaChartData>();
PolarAreaChartData data1 = new PolarAreaChartData();
PolarAreaChartData data2 = new PolarAreaChartData();
data1.setStrokeColor(Color.red);
data2.setStrokeColor(Color.blue);
data2.setFillColor(new Color(0, 0, 255, 64));
List<PolarAreaChartDataPoint> points1 = new ArrayList<PolarAreaChartDataPoint>();
List<PolarAreaChartDataPoint> points2 = new ArrayList<PolarAreaChartDataPoint>();
for (int i = 0; i < 12; i++) {
PolarAreaChartDataPoint point1 = new PolarAreaChartDataPoint(0.1 * i);
points1.add(point1);
PolarAreaChartDataPoint point2 = new PolarAreaChartDataPoint(1.5 - 0.1 * i);
points2.add(point2);
if (i == 5) {
point2.setFillColor(new Color(0, 255, 0, 64));
point2.setStrokeColor(new Color(0, 255, 0, 200));
}
}
data1.setPoints(points1);
data2.setPoints(points2);
datas.add(data1);
datas.add(data2);
chart.setAxis(axis);
chart.setSubAxis(subAxis);
chart.setDatas(datas);
PolarAreaChartGraphics g = new PolarAreaChartGraphics();
g.setSize(800, 800);
;
g.setChart(chart);
BufferedImage image = g.draw();
if (1 <= args.length) {
try {
File file = new File(args[0]);
ImageIO.write(image, "png", file);
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
}
|
require 'marky_markov'
markov = MarkyMarkov::Dictionary.new('dictionary')
markov.parse_file "there_is_a_way.txt"
markov.parse_file "the_breeze_at_dawn.txt"
markov.parse_file "not_intrigued_with_evening.txt"
markov.parse_file "moving_water.txt"
markov.parse_file "out_beyond_ideas.txt"
markov.parse_file "there_is_a_community_of_spirit.txt"
puts markov.generate_n_words 100
puts markov.generate_n_sentences 5
markov.save_dictionary!
|
namespace TT.Tests.Builders.RPClassifiedAds
{
public class RPClassifiedAdBuilder : Builder<RPClassifiedAd, int>
{
private string title;
private string description;
private decimal price;
private string category;
public RPClassifiedAdBuilder()
{
// Constructor
}
public RPClassifiedAdBuilder WithTitle(string title)
{
this.title = title;
return this;
}
public RPClassifiedAdBuilder WithDescription(string description)
{
this.description = description;
return this;
}
public RPClassifiedAdBuilder WithPrice(decimal price)
{
this.price = price;
return this;
}
public RPClassifiedAdBuilder WithCategory(string category)
{
this.category = category;
return this;
}
public RPClassifiedAd Build()
{
RPClassifiedAd classifiedAd = new RPClassifiedAd();
classifiedAd.Title = title;
classifiedAd.Description = description;
classifiedAd.Price = price;
classifiedAd.Category = category;
return classifiedAd;
}
}
}
|
<reponame>LuanFaria/Conversor_de_Moedas
from PyQt5.QtWidgets import QApplication, QMainWindow
import sys
from interface import *
from Coletor import *
import datetime
class Converter(QMainWindow, Ui_MainWindow):
def __init__(self):
super().__init__(None)
super().setupUi(self)
# Fixar o Tamanho
self.setFixedSize(313, 164)
# Set
self.atualizar.clicked.connect(self.atualiza)
self.data.setText(str(datetime.datetime.now().date().today()))
self.dinheiro_1.addItems(['Dollar', 'Euro', 'Iene', 'Real'])
self.dinheiro_2.addItems(['Dollar', 'Euro', 'Iene', 'Real'])
self.convert.clicked.connect(self.calcular_conversor)
# Travar a saida do valor_2 para não editavel.
self.valor_2.setDisabled(True)
# Formatação CSS
self.valor_2.setStyleSheet(
'* {color:#000}' # Formatação em CSS
)
self.convert.setStyleSheet(
'* {background: #32CD32; color:#000}' # Formatação em CSS
)
self.setStyleSheet(
'* {background: #E0FFFF; color:#000; font-size: 20px,}' # Formatação em CSS
)
self._coletaD = 5.26
self._coletaR = 1
self._coletaI = 0.046
self._coletaE = 6.14
def atualiza(self):
self._coletaD = coletar_dollar()
self._coletaR = coletar_real()
self._coletaI = coletar_iene()
self._coletaE = coletar_euro()
# Teste
# self._coletaD = 1
# self._coletaR = 2
# self._coletaI = 3
# self._coletaE = 4
def calcular_conversor(self):
dinheiro_input = self.dinheiro_1.currentText()
dinheiro_resultado = self.dinheiro_2.currentText()
try:
# Real
if dinheiro_input == 'Dollar' and dinheiro_resultado == 'Real':
calcular = (float(self.valor_1.text()) * self._coletaD)
calcular = round(calcular, 2)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Euro' and dinheiro_resultado == 'Real':
calcular = (float(self.valor_1.text()) * self._coletaE)
calcular = round(calcular, 2)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Iene' and dinheiro_resultado == 'Real':
calcular = (float(self.valor_1.text()) * self._coletaI)
calcular = round(calcular, 2)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Real' and dinheiro_resultado == 'Real':
calcular = (float(self.valor_1.text()) * self._coletaR)
calcular = round(calcular, 2)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
# Euro
elif dinheiro_input == 'Real' and dinheiro_resultado == 'Euro':
calcular = (float(self.valor_1.text()) / self._coletaE)
calcular = round(calcular, 2)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Dollar' and dinheiro_resultado == 'Euro':
calcular = (float(self.valor_1.text()) / self._coletaE * self._coletaD)
calcular = round(calcular, 2)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Iene' and dinheiro_resultado == 'Euro':
calcular = (float(self.valor_1.text()) / self._coletaE * self._coletaI)
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Euro' and dinheiro_resultado == 'Euro':
calcular = (float(self.valor_1.text()))
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
# Dollar
elif dinheiro_input == 'Iene' and dinheiro_resultado == 'Dollar':
calcular = (float(self.valor_1.text()) / self._coletaD * self._coletaI)
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Real' and dinheiro_resultado == 'Dollar':
calcular = (float(self.valor_1.text()) / self._coletaD * self._coletaR)
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Euro' and dinheiro_resultado == 'Dollar':
calcular = (float(self.valor_1.text()) / self._coletaD * self._coletaE)
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Dollar' and dinheiro_resultado == 'Dollar':
calcular = (float(self.valor_1.text()))
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
# Iene
elif dinheiro_input == 'Iene' and dinheiro_resultado == 'Iene':
calcular = (float(self.valor_1.text()))
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Euro' and dinheiro_resultado == 'Iene':
calcular = (float(self.valor_1.text()) / self._coletaI * self._coletaE)
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Dollar' and dinheiro_resultado == 'Iene':
calcular = (float(self.valor_1.text()) / self._coletaI * self._coletaD)
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
elif dinheiro_input == 'Real' and dinheiro_resultado == 'Iene':
calcular = (float(self.valor_1.text()) / self._coletaI * self._coletaR)
calcular = round(calcular, 4)
self.valor_1.setText(str(self.valor_1.text()))
self.valor_2.setText(str(calcular).replace('.', ','))
except:
self.valor_1.setText(str(1))
pass
if __name__ == '__main__':
aplicativo = QApplication(sys.argv)
tela = Converter()
tela.show()
aplicativo.exec_()
|
#!/bin/bash
set -ex
set -o pipefail
# Default number of steps and batch size for the challenge
N_STEP=16
N_BATCH=8
# For a fast experiment:
# N_STEP=15
# N_BATCH=1
# Input args
CODE_DIR=$1
N_REPEAT=$2
# Where output goes
DB_ROOT=./output
DBID=run_$(date +"%Y%m%d_%H%M%S")
# Setup vars
OPT=$(basename $CODE_DIR)
OPT_ROOT=$(dirname $CODE_DIR)
# Check that bayesmark is installed in this environment
which bayesmark-init
which bayesmark-launch
which bayesmark-exp
which bayesmark-agg
which bayesmark-anal
# Ensure output folder exists
mkdir -p $DB_ROOT
# Copy the baseline file in, we can skip this but we must include RandomSearch in the -o list
! test -d $DB_ROOT/$DBID/ # Check the folder does not yet exist
bayesmark-init -dir $DB_ROOT -b $DBID
cp ./input/baseline-$N_STEP-$N_BATCH.json $DB_ROOT/$DBID/derived/baseline.json
# By default, runs on all models (-c), data (-d), metrics (-m)
bayesmark-launch -dir $DB_ROOT -b $DBID -n $N_STEP -r $N_REPEAT -p $N_BATCH -o $OPT --opt-root $OPT_ROOT -v -c SVM DT -d boston wine
# To run on all problems use instead (slower):
# bayesmark-launch -dir $DB_ROOT -b $DBID -n $N_STEP -r $N_REPEAT -p $N_BATCH -o $OPT --opt-root $OPT_ROOT -v
# Now aggregate the results
bayesmark-agg -dir $DB_ROOT -b $DBID
# And analyze the scores
bayesmark-anal -dir $DB_ROOT -b $DBID -v
|
<gh_stars>1-10
package org.tom_v_squad.soiwenttoaconcert.controllers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.Errors;
import org.springframework.web.bind.annotation.*;
import org.tom_v_squad.soiwenttoaconcert.data.ArtistRepository;
import org.tom_v_squad.soiwenttoaconcert.data.EventRepository;
import org.tom_v_squad.soiwenttoaconcert.data.UserRepository;
import org.tom_v_squad.soiwenttoaconcert.data.VenueRepository;
import org.tom_v_squad.soiwenttoaconcert.models.Artist;
import org.tom_v_squad.soiwenttoaconcert.models.EventData;
import org.tom_v_squad.soiwenttoaconcert.models.Venue;
import javax.validation.Valid;
import java.util.Optional;
@Controller
@RequestMapping("artist")
public class ArtistController {
@Autowired
private EventRepository eventRepository;
@Autowired
private UserRepository userRepository;
@Autowired
private ArtistRepository artistRepository;
@Autowired
private VenueRepository venueRepository;
@RequestMapping("")
public String displayArtists(Model model) {
model.addAttribute("title", "Artists");
model.addAttribute("artists", artistRepository.findAll());
return "artist/index";
}
@GetMapping("create")
public String displayAddArtist(Model model) {
model.addAttribute(new Artist());
return "artist/create";
}
@PostMapping("create")
public String processAddArtistForm(@ModelAttribute @Valid Artist newArtist,
Errors errors, Model model) {
if (errors.hasErrors()) {
return "artist/create";
}
artistRepository.save(newArtist);
return "redirect:/artist";
}
@GetMapping("delete")
public String displayDeleteArtistForm(@RequestParam Integer artistId, Model model) {
Optional<Artist> result = artistRepository.findById(artistId);
System.out.println(artistId);
if (!result.isEmpty()) {
Artist artist = result.get();
model.addAttribute("title","Delete Artist");
model.addAttribute("artist", artist);
return "artist/delete";
}else{
model.addAttribute("title", "Invalid Event ID: " + artistId);
}
return "artist/delete";
}
@PostMapping("delete")
public String processDeleteEventsForm(@RequestParam(required = false) Integer artistId) {
if (artistId != null) {
Optional<Artist> result = artistRepository.findById(artistId);
if (result.isPresent()) {
artistRepository.delete(result.get());
}
}
return "redirect:";
}
@GetMapping("edit")
public String displayEditArtistForm(@RequestParam Integer artistId, Model model) {
Optional <Artist> result = artistRepository.findById(artistId);
if (result.isEmpty()) {
model.addAttribute("title", "Invalid Event ID: " + artistId);
} else {
Artist artist = result.get();
model.addAttribute("artist", artist);
}
return "artist/edit";
}
@PostMapping("edit")
public String processEditArtistForm(@RequestParam Integer artistId, @ModelAttribute Artist newArtist, Errors errors, Model model) {
if(errors.hasErrors()) {
model.addAttribute("title", "Create Event");
return "artist/create";
}
artistRepository.save(newArtist);
artistRepository.deleteById(artistId);
return "redirect:";
}
}
|
<filename>src/test/java/io/shadowstack/candidates/InvocationReplayerTest.java<gh_stars>0
package io.shadowstack.candidates;
import io.shadowstack.*;
import io.shadowstack.incumbents.InvocationRecorder;
import io.shadowstack.incumbents.InvocationSink;
import io.shadowstack.invocations.Invocation;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import io.shadowstack.exceptions.InvocationReplayerException;
import io.shadowstack.filters.ObjectFilter;
import sun.reflect.generics.reflectiveObjects.NotImplementedException;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static io.shadowstack.Fluently.*;
import static io.shadowstack.shoehorn.Fluently.reference;
import static org.junit.jupiter.api.Assertions.*;
@Slf4j
public class InvocationReplayerTest extends BaseTest {
@Test
public void testReplay() throws TimeoutException, InterruptedException, InvocationReplayerException {
String name = new Object() {}.getClass().getEnclosingMethod().getName();
log.info(name + " starting.");
final Queue<String> happyContextIds = new LinkedList<>();
final Queue<String> sadContextIds = new LinkedList<>();
final Method happyMethod = reference(Bar.class).from(b -> b.doSomethingShadowed(null));
final Method sadMethod = reference(Bar.class).from(b -> b.doSomethingBad(null));
InMemoryInvocationDestination invocationDestination = new InMemoryInvocationDestination(recordings -> {
if(recordings != null && !recordings.isEmpty()) {
Invocation invocation = recordings.get(0);
String guid = invocation.getInvocationContext().getContextId();
String methodName = invocation.getInvocationKey().getTargetMethodName();
if(methodName.equals(happyMethod.getName())) {
happyContextIds.offer(guid);
log.info(name + ": got HAPPY context GUID " + guid + " for method " + methodName + " :D");
} else if(methodName.equals(sadMethod.getName())) {
sadContextIds.offer(guid);
log.info(name + ": got SAD context GUID " + guid + " for method " + methodName + " :'(");
}
}
resume();
return true;
});
ObjectFilter filter = filter(noise().from(Foo.class),
secrets().from(Foo.class),
noise().from(Baz.class),
secrets().from(Baz.class));
Bar proxy = record(bar)
.filteringWith(filter)
.sendingTo(new InvocationSink(invocationDestination).withBatchSize(1))
.buildProxy(Bar.class);
assertEquals(result, proxy.doSomethingShadowed(foo));
await(1L, TimeUnit.SECONDS); // TODO: Why? Does the Flux need to be explicitly flushed?
proxy = replay(Bar.class)
.filteringWith(filter)
.retrievingFrom(invocationDestination)
.forContextId(happyContextIds.poll())
.buildProxy();
assertEquals(result, proxy.doSomethingShadowed(foo));
final Bar finalProxyRecord = record(bar)
.filteringWith(filter)
.sendingTo(new InvocationSink(invocationDestination).withBatchSize(1))
.buildProxy(Bar.class);
assertThrows(NotImplementedException.class, () -> finalProxyRecord.doSomethingBad(foo));
await(1L, TimeUnit.SECONDS); // TODO: Why? Does the Flux need to be explicitly flushed?
final Bar finalProxyReplay = replay(Bar.class)
.filteringWith(filter)
.retrievingFrom(invocationDestination)
.forContextId(sadContextIds.poll())
.buildProxy();
assertThrows(NotImplementedException.class, () -> finalProxyReplay.doSomethingBad(foo));
// Test bad constructions
InvocationReplayer replayer = replay(null)
.filteringWith(filter)
.retrievingFrom(invocationDestination)
.forContextId("badf00d");
final InvocationReplayer finalReplayer1 = replayer;
assertThrows(InvocationReplayerException.class, () -> finalReplayer1.buildProxy());
replayer = replay(Bar.class)
.filteringWith(null)
.retrievingFrom(invocationDestination)
.forContextId("badf00d");
final InvocationReplayer finalReplayer2 = replayer;
assertThrows(InvocationReplayerException.class, () -> finalReplayer2.buildProxy());
replayer = replay(Bar.class)
.filteringWith(filter)
.retrievingFrom(null)
.forContextId("badf00d");
final InvocationReplayer finalReplayer3 = replayer;
assertThrows(InvocationReplayerException.class, () -> finalReplayer3.buildProxy());
replayer = replay(Bar.class)
.filteringWith(filter)
.retrievingFrom(invocationDestination)
.forContextId(null);
final InvocationReplayer finalReplayer4 = replayer;
assertThrows(InvocationReplayerException.class, () -> finalReplayer4.buildProxy());
log.info(name + " finishing.");
}
}
|
import { graphql, useStaticQuery } from "gatsby"
const useStudioPage = () => {
const { prismicStudio } = useStaticQuery(graphql`
{
prismicStudio {
data {
manifesto {
title {
text
}
column_2 {
html
}
column_1 {
html
}
}
team_members {
bio {
html
}
bio_2 {
html
}
job_position {
text
}
photo {
fluid {
...GatsbyPrismicImageFluid
}
}
hover_photo {
fluid {
...GatsbyPrismicImageFluid
}
}
name {
text
}
}
our_day_to_day {
image {
fluid {
...GatsbyPrismicImageFluid
}
}
}
}
}
}
`)
return {
manifesto: {
title: prismicStudio.data.manifesto[0].title.text,
column_1: prismicStudio.data?.manifesto[0]?.column_1?.html,
column_2: prismicStudio.data?.manifesto[0]?.column_2?.html,
},
teamMembers: prismicStudio.data.team_members.map(member => ({
name: member.name.text,
jobPosition: member.job_position.text,
bio: member.bio?.html || null,
bio2: member.bio_2?.html || null,
image: member.photo.fluid,
hoverImage: member.hover_photo.fluid,
})),
gallery: prismicStudio.data.our_day_to_day.map(image => image.image.fluid),
}
}
export default useStudioPage
|
# change apt soft source to Chinese mirror site
sudo sed -i 's/archive.ubuntu.com/mirrors.tuna.tsinghua.edu.cn/g' /etc/apt/sources.list
# import openresty GPG key:
wget -qO - https://openresty.org/package/pubkey.gpg | sudo apt-key add -
sudo apt update
sudo apt install -y software-properties-common
# add openresty repo
sudo add-apt-repository -y "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main"
# add git ppa repo
sudo add-apt-repository ppa:git-core/ppa -y
# add maxmind ppa repo
sudo add-apt-repository ppa:maxmind/ppa
# to update the APT index:
sudo apt-get update
# install depends soft
sudo apt-get install -y openresty build-essential libssl-dev git htop libmaxminddb0 libmaxminddb-dev mmdb-bin geoipupdate
# code by https://stackoverflow.com/a/63748803/7001350
# download GeoLite2-City database file
#curl "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-City&license_key=vP65qsGQCxfewnTs&suffix=tar.gz" -o /tmp/GeoLite2-Country.tar.gz \
# && tar -xzvf /tmp/GeoLite2-City.tar.gz -C /tmp/ \
# && mv GeoLite2-City_*/GeoLite2-City.mmdb /tmp/GeoLite2-City.mmdb
# download GeoLite2-Country database file
# curl "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key=vP65qsGQCxfewnTs&suffix=tar.gz" -o /tmp/GeoLite2-Country.tar.gz \
# && tar -xzvf /tmp/GeoLite2-Country.tar.gz -C /tmp/ \
# && mv GeoLite2-Country_*/GeoLite2-Country.mmdb /tmp/GeoLite2-Country.mmdb
# opm install anjia0532/lua-resty-maxminddb lib
sudo opm --install-dir /usr/local/openresty/ get anjia0532/lua-resty-maxminddb
# overwrite nginx config file
sudo cp /vagrant/nginx.conf /usr/local/openresty/nginx/conf/nginx.conf
# test openresty config file
sudo /usr/local/openresty/bin/openresty -t
# reload openresty config
sudo /usr/local/openresty/bin/openresty -s reload
sudo cp /vagrant/* /tmp/
sudo chmod +x /tmp/wrk
|
heroku container:login
heroku container:push web
heroku container:release web
|
<reponame>smagill/opensphere-desktop<filename>open-sphere-base/mantle/src/main/java/io/opensphere/mantle/transformer/impl/PolygonRegionCommandWorker.java
package io.opensphere.mantle.transformer.impl;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Polygon;
import io.opensphere.core.TimeManager;
import io.opensphere.core.geometry.ConstrainableGeometry;
import io.opensphere.core.geometry.Geometry;
import io.opensphere.core.geometry.constraint.Constraints;
import io.opensphere.core.model.time.TimeSpan;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.plugin.selection.SelectionCommand;
import io.opensphere.mantle.transformer.impl.worker.DataElementTransformerWorkerDataProvider;
/**
* The Class PolygonRegionCommandWorker.
*/
public abstract class PolygonRegionCommandWorker implements Runnable
{
/** The my command. */
private final SelectionCommand myCommand;
/** The my intersecting set. */
private final Set<Long> myIntersectingSet = New.set();
/** The my non intersecting set. */
private final Set<Long> myNonIntersectingSet = New.set();
/** The Provider. */
private final DataElementTransformerWorkerDataProvider myProvider;
/** The my region. */
private final List<Polygon> myRegions;
/** The Time manager. */
private final TimeManager myTimeManager;
/** The Use time in intersection check. */
private final boolean myUseTimeInIntersectionCheck;
/**
* Instantiates a new selection command worker.
*
* @param provider the provider
* @param regions the regions
* @param command the command
* @param useTimeInIntersectionCheck the use time in intersection check
*/
public PolygonRegionCommandWorker(DataElementTransformerWorkerDataProvider provider, List<Polygon> regions,
SelectionCommand command, boolean useTimeInIntersectionCheck)
{
myProvider = provider;
myRegions = regions;
myCommand = command;
myTimeManager = myProvider.getToolbox().getTimeManager();
myUseTimeInIntersectionCheck = useTimeInIntersectionCheck;
}
/**
* Gets the command.
*
* @return the command
*/
public SelectionCommand getCommand()
{
return myCommand;
}
/**
* Gets the intersecting id set.
*
* @return the intersecting id set
*/
public Set<Long> getIntersectingIdSet()
{
return myIntersectingSet;
}
/**
* Gets the non intersecting id set.
*
* @return the non intersecting id set
*/
public Set<Long> getNonIntersectingIdSet()
{
return myNonIntersectingSet;
}
/**
* Gets the provider.
*
* @return the provider
*/
public DataElementTransformerWorkerDataProvider getProvider()
{
return myProvider;
}
/**
* Gets the regions.
*
* @return the regions
*/
public List<Polygon> getRegion()
{
return myRegions;
}
/**
* Passes time constraint check.
*
* @param g the g
* @return true, if successful
*/
public boolean passesTimeConstraintCheck(Geometry g)
{
boolean pass = true;
if (g instanceof ConstrainableGeometry)
{
ConstrainableGeometry cg = (ConstrainableGeometry)g;
Constraints constraints = cg.getConstraints();
if (myTimeManager != null && constraints != null && constraints.getTimeConstraint() != null
&& !constraints.getTimeConstraint().check(myTimeManager.getPrimaryActiveTimeSpans()))
{
if (constraints.getTimeConstraint().getKey() == null)
{
pass = false;
}
else
{
Collection<? extends TimeSpan> secondary = myTimeManager
.getSecondaryActiveTimeSpans(constraints.getTimeConstraint().getKey());
if (secondary == null || !constraints.getTimeConstraint().check(secondary))
{
pass = false;
}
}
}
}
return pass;
}
/**
* Process.
*/
public abstract void process();
@Override
public final void run()
{
ReentrantLock lock = myProvider.getGeometrySetLock();
lock.lock();
try
{
determineIntersectingSets(myUseTimeInIntersectionCheck);
process();
}
finally
{
lock.unlock();
}
}
/**
* Determine intersecting sets.
*
* @param useTimeConstraints true to use time constraints as part of the
* intersection check.
*/
private void determineIntersectingSets(boolean useTimeConstraints)
{
GeometryFactory gf = new GeometryFactory();
for (Geometry g : myProvider.getGeometrySet())
{
boolean intersecting = true;
if (useTimeConstraints)
{
intersecting = passesTimeConstraintCheck(g);
}
if (intersecting)
{
intersecting = g.jtsIntersectionTests(new Geometry.JTSIntersectionTests(true, true, false), myRegions, gf);
}
if (intersecting)
{
myIntersectingSet.add(Long.valueOf(g.getDataModelId() & myProvider.getDataModelIdFromGeometryIdBitMask()));
}
else
{
myNonIntersectingSet.add(Long.valueOf(g.getDataModelId() & myProvider.getDataModelIdFromGeometryIdBitMask()));
}
}
}
}
|
package com.rey.jsonbatch.playground.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
@JsonIgnoreProperties({"loop", "predicate", "httpMethod", "url", "headers", "body", "transformers", "responses"})
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ExtendedLoopTemplate extends ExtendedRequestTemplate {
private Object counterInit;
private Object counterPredicate;
private Object counterUpdate;
public Object getCounterInit() {
return counterInit;
}
public void setCounterInit(Object counterInit) {
this.counterInit = counterInit;
}
public Object getCounterPredicate() {
return counterPredicate;
}
public void setCounterPredicate(Object counterPredicate) {
this.counterPredicate = counterPredicate;
}
public Object getCounterUpdate() {
return counterUpdate;
}
public void setCounterUpdate(Object counterUpdate) {
this.counterUpdate = counterUpdate;
}
@Override
public String getLabel() {
return String.format("Loop: %s", getParent().getLabel());
}
}
|
from typing import List, Tuple
class Point:
def __init__(self, x: int, y: int):
self.x = x
self.y = y
class Polygon:
def __init__(self, points: List[Tuple[int, int]]):
self.points = [Point(x, y) for x, y in points]
self.indexed = False
def index(self):
# Implement custom indexing mechanism to optimize point search
# For example, you can use a spatial indexing data structure like a quadtree
# Placeholder for custom indexing logic
self.indexed = True
def __contains__(self, point: Tuple[int, int]) -> bool:
if not self.indexed:
raise RuntimeError("Polygon not indexed. Call index() before containment check.")
# Implement efficient point containment check using the custom indexing
# For example, you can use point-in-polygon algorithms like ray casting
# Placeholder for efficient containment check logic
return False # Placeholder return value
# Example usage
polygon = Polygon([(0, 0), (0, 5), (5, 5), (5, 0)])
polygon.index()
print((2, 2) in polygon) # Expected output: True or False based on containment check
|
package mysqlconn
import (
"context"
"database/sql"
"fmt"
"github.com/domonda/go-sqldb"
"github.com/domonda/go-sqldb/impl"
)
// New creates a new sqldb.Connection using the passed sqldb.Config
// and github.com/go-sql-driver/mysql as driver implementation.
// The connection is pinged with the passed context,
// and only returned when there was no error from the ping.
func New(ctx context.Context, config *sqldb.Config) (sqldb.Connection, error) {
if config.Driver != "mysql" {
return nil, fmt.Errorf(`invalid driver %q, mysqlconn expects "mysql"`, config.Driver)
}
config.DefaultIsolationLevel = sql.LevelRepeatableRead // mysql default
db, err := config.Connect(ctx)
if err != nil {
return nil, err
}
return impl.Connection(ctx, db, config, argFmt), nil
}
// MustNew creates a new sqldb.Connection using the passed sqldb.Config
// and github.com/go-sql-driver/mysql as driver implementation.
// The connection is pinged with the passed context,
// and only returned when there was no error from the ping.
// Errors are paniced.
func MustNew(ctx context.Context, config *sqldb.Config) sqldb.Connection {
conn, err := New(ctx, config)
if err != nil {
panic(err)
}
return conn
}
|
package de.ids_mannheim.korap.misc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.Test;
import com.fasterxml.jackson.databind.JsonNode;
import de.ids_mannheim.korap.exceptions.KustvaktException;
import de.ids_mannheim.korap.query.serialize.QuerySerializer;
import de.ids_mannheim.korap.utils.JsonUtils;
import de.ids_mannheim.korap.utils.KoralCollectionQueryBuilder;
/**
* @author hanl
* @date 12/08/2015
*/
public class CollectionQueryBuilderTest {
@Test
public void testsimpleAdd () throws KustvaktException {
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.with("corpusSigle=WPD");
JsonNode node = JsonUtils.readTree(b.toJSON());
assertNotNull(node);
assertEquals("koral:doc", node.at("/collection/@type").asText());
assertEquals("corpusSigle", node.at("/collection/key").asText());
}
@Test
public void testSimpleConjunction () throws KustvaktException {
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.with("corpusSigle=WPD & textClass=freizeit");
JsonNode node = JsonUtils.readTree(b.toJSON());
assertNotNull(node);
assertEquals("koral:docGroup", node.at("/collection/@type").asText());
assertEquals("operation:and", node.at("/collection/operation").asText());
assertEquals("corpusSigle", node.at("/collection/operands/0/key")
.asText());
assertEquals("textClass", node.at("/collection/operands/1/key")
.asText());
}
@Test
public void testSimpleDisjunction () throws KustvaktException {
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.with("corpusSigle=WPD | textClass=freizeit");
JsonNode node = JsonUtils.readTree(b.toJSON());
assertNotNull(node);
assert node.at("/collection/operation").asText().equals("operation:or");
assert node.at("/collection/operands/0/key").asText()
.equals("corpusSigle");
assert node.at("/collection/operands/1/key").asText()
.equals("textClass");
}
@Test
public void testComplexSubQuery () throws KustvaktException {
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.with("(corpusSigle=WPD) | (textClass=freizeit & corpusSigle=BRZ13)");
JsonNode node = JsonUtils.readTree(b.toJSON());
assertNotNull(node);
assert node.at("/collection/operation").asText().equals("operation:or");
assert node.at("/collection/operands/0/key").asText()
.equals("corpusSigle");
assert node.at("/collection/operands/1/@type").asText()
.equals("koral:docGroup");
}
@Test
public void testAddResourceQueryAfter () throws KustvaktException {
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.with("(textClass=politik & title=\"random title\") | textClass=wissenschaft");
KoralCollectionQueryBuilder c = new KoralCollectionQueryBuilder();
c.setBaseQuery(b.toJSON());
c.with("corpusSigle=WPD");
JsonNode node = JsonUtils.readTree(c.toJSON());
assertNotNull(node);
assertEquals("koral:doc", node.at("/collection/operands/1/@type")
.asText());
assertEquals("koral:docGroup", node.at("/collection/operands/0/@type")
.asText());
assertEquals(2, node.at("/collection/operands").size());
assertEquals(2, node.at("/collection/operands/0/operands").size());
assertEquals(2, node.at("/collection/operands/0/operands/0/operands")
.size());
assertEquals("operation:and", node.at("/collection/operation").asText());
assertEquals("operation:or", node
.at("/collection/operands/0/operation").asText());
assertEquals("operation:and",
node.at("/collection/operands/0/operands/0/operation").asText());
assertEquals("WPD", node.at("/collection/operands/1/value").asText());
}
@Test
public void testAddComplexResourceQueryAfter () throws KustvaktException {
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.with("(title=\"random title\") | (textClass=wissenschaft)");
KoralCollectionQueryBuilder c = new KoralCollectionQueryBuilder();
c.setBaseQuery(b.toJSON());
c.with("(corpusSigle=BRZ13 | corpusSigle=AZPS)");
JsonNode node = JsonUtils.readTree(c.toJSON());
assertNotNull(node);
assertEquals("koral:docGroup", node.at("/collection/operands/0/@type")
.asText());
assertEquals("koral:docGroup", node.at("/collection/operands/1/@type")
.asText());
assertEquals("BRZ13", node
.at("/collection/operands/1/operands/0/value").asText());
assertEquals("AZPS", node.at("/collection/operands/1/operands/1/value")
.asText());
assertEquals("random title",
node.at("/collection/operands/0/operands/0/value").asText());
assertEquals("wissenschaft",
node.at("/collection/operands/0/operands/1/value").asText());
}
@Test
public void testBuildQuery () throws KustvaktException {
String coll = "corpusSigle=WPD";
String query = "[base=Haus]";
QuerySerializer check = new QuerySerializer();
check.setQuery(query, "poliqarp");
check.setCollection(coll);
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.setBaseQuery(check.toJSON());
b.with("textClass=freizeit");
JsonNode res = (JsonNode) b.rebaseCollection();
assertNotNull(res);
assertEquals("koral:docGroup", res.at("/collection/@type").asText());
assertEquals("operation:and", res.at("/collection/operation").asText());
assertEquals("koral:doc", res.at("/collection/operands/0/@type")
.asText());
assertEquals("freizeit", res.at("/collection/operands/1/value")
.asText());
assertEquals("textClass", res.at("/collection/operands/1/key").asText());
assertEquals("koral:doc", res.at("/collection/operands/1/@type")
.asText());
assertEquals("WPD", res.at("/collection/operands/0/value").asText());
assertEquals("corpusSigle", res.at("/collection/operands/0/key")
.asText());
// check also that query is still there
assertEquals("koral:token", res.at("/query/@type").asText());
assertEquals("koral:term", res.at("/query/wrap/@type").asText());
assertEquals("Haus", res.at("/query/wrap/key").asText());
assertEquals("lemma", res.at("/query/wrap/layer").asText());
}
@Test
public void testBaseQueryBuild () throws KustvaktException {
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.with("(corpusSigle=ADF) | (textClass=freizeit & corpusSigle=WPD)");
KoralCollectionQueryBuilder c = new KoralCollectionQueryBuilder();
c.setBaseQuery(b.toJSON());
c.with("corpusSigle=BRZ13");
JsonNode base = (JsonNode) c.rebaseCollection();
assertNotNull(base);
assertEquals(base.at("/collection/@type").asText(), "koral:docGroup");
assertEquals(base.at("/collection/operands/1/@type").asText(),
"koral:doc");
assertEquals(base.at("/collection/operands/1/value").asText(), "BRZ13");
assertEquals(base.at("/collection/operands/0/@type").asText(),
"koral:docGroup");
assertEquals(base.at("/collection/operands/0/operands").size(), 2);
}
@Test
public void testNodeMergeWithBase () throws KustvaktException {
String coll = "corpusSigle=WPD";
String query = "[base=Haus]";
QuerySerializer check = new QuerySerializer();
check.setQuery(query, "poliqarp");
check.setCollection(coll);
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.setBaseQuery(check.toJSON());
KoralCollectionQueryBuilder test = new KoralCollectionQueryBuilder();
test.with("textClass=wissenschaft | textClass=politik");
JsonNode node = (JsonNode) test.rebaseCollection();
node = b.mergeWith(node);
assertNotNull(node);
assertEquals("koral:docGroup", node.at("/collection/@type").asText());
assertEquals("operation:and", node.at("/collection/operation").asText());
assertEquals(2, node.at("/collection/operands").size());
}
@Test
public void testNodeMergeWithoutBase () throws KustvaktException {
String query = "[base=Haus]";
QuerySerializer check = new QuerySerializer();
check.setQuery(query, "poliqarp");
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.setBaseQuery(check.toJSON());
KoralCollectionQueryBuilder test = new KoralCollectionQueryBuilder();
test.with("corpusSigle=WPD");
// String json = test.toJSON();
// System.out.println(json);
//JsonNode node = (JsonNode) test.rebaseCollection(null);
//node = b.mergeWith(node);
//assertNotNull(node);
//assertEquals("koral:doc", node.at("/collection/@type").asText());
//assertEquals("corpusSigle", node.at("/collection/key").asText());
}
@Test
public void testNodeMergeWithoutBaseWrongOperator () throws KustvaktException {
String query = "[base=Haus]";
QuerySerializer check = new QuerySerializer();
check.setQuery(query, "poliqarp");
KoralCollectionQueryBuilder b = new KoralCollectionQueryBuilder();
b.setBaseQuery(check.toJSON());
KoralCollectionQueryBuilder test = new KoralCollectionQueryBuilder();
// operator is not supposed to be here!
test.and().with("corpusSigle=WPD");
// String json = test.toJSON();
// System.out.println(json);
//JsonNode node = (JsonNode) test.rebaseCollection(null);
//node = b.mergeWith(node);
//assertNotNull(node);
//assertEquals("koral:doc", node.at("/collection/@type").asText());
//assertEquals("corpusSigle", node.at("/collection/key").asText());
}
@Test
public void testStoredCollectionBaseQueryBuild () {
}
@Test
public void testAddOROperator () throws KustvaktException {
String coll = "corpusSigle=WPD";
String query = "[base=Haus]";
QuerySerializer check = new QuerySerializer();
check.setQuery(query, "poliqarp");
check.setCollection(coll);
KoralCollectionQueryBuilder test = new KoralCollectionQueryBuilder();
test.setBaseQuery(check.toJSON());
test.or().with("textClass=wissenschaft | textClass=politik");
JsonNode node = (JsonNode) test.rebaseCollection();
assertNotNull(node);
assertEquals("koral:docGroup", node.at("/collection/@type").asText());
assertEquals("operation:or", node.at("/collection/operation").asText());
assertEquals(2, node.at("/collection/operands/1/operands").size());
}
@Test
public void testAddANDOperator () throws KustvaktException {
String coll = "corpusSigle=WPD";
String query = "[base=Haus]";
QuerySerializer check = new QuerySerializer();
check.setQuery(query, "poliqarp");
check.setCollection(coll);
KoralCollectionQueryBuilder test = new KoralCollectionQueryBuilder();
test.setBaseQuery(check.toJSON());
test.and().with("textClass=wissenschaft | textClass=politik");
JsonNode node = (JsonNode) test.rebaseCollection();
assertNotNull(node);
assertEquals("koral:docGroup", node.at("/collection/@type").asText());
assertEquals("operation:and", node.at("/collection/operation").asText());
assertEquals(2, node.at("/collection/operands/1/operands").size());
}
@Test
public void testAddDefaultOperator () throws KustvaktException {
String coll = "corpusSigle=WPD";
String query = "[base=Haus]";
QuerySerializer check = new QuerySerializer();
check.setQuery(query, "poliqarp");
check.setCollection(coll);
KoralCollectionQueryBuilder test = new KoralCollectionQueryBuilder();
test.setBaseQuery(check.toJSON());
test.with("textClass=wissenschaft | textClass=politik");
JsonNode node = (JsonNode) test.rebaseCollection();
assertNotNull(node);
assertEquals("koral:docGroup", node.at("/collection/@type").asText());
assertEquals("operation:and", node.at("/collection/operation").asText());
assertEquals(2, node.at("/collection/operands/1/operands").size());
}
@Test
public void testBaseCollectionNull () throws KustvaktException {
// base is missing collection segment
QuerySerializer s = new QuerySerializer();
s.setQuery("[base=Haus]", "poliqarp");
KoralCollectionQueryBuilder total = new KoralCollectionQueryBuilder();
total.setBaseQuery(s.toJSON());
KoralCollectionQueryBuilder builder = new KoralCollectionQueryBuilder();
builder.with("textClass=politik & corpusSigle=WPD");
JsonNode node = total.and().mergeWith(
(JsonNode) builder.rebaseCollection());
assertNotNull(node);
assertEquals("koral:docGroup", node.at("/collection/@type").asText());
assertEquals("operation:and", node.at("/collection/operation").asText());
assertEquals("koral:doc", node.at("/collection/operands/0/@type")
.asText());
assertEquals("koral:doc", node.at("/collection/operands/1/@type")
.asText());
assertEquals("textClass", node.at("/collection/operands/0/key")
.asText());
assertEquals("corpusSigle", node.at("/collection/operands/1/key")
.asText());
}
@Test
public void testMergeCollectionNull () throws KustvaktException {
// merge json is missing collection segment
QuerySerializer s = new QuerySerializer();
s.setQuery("[base=Haus]", "poliqarp");
s.setCollection("textClass=wissenschaft");
KoralCollectionQueryBuilder total = new KoralCollectionQueryBuilder();
total.setBaseQuery(s.toJSON());
KoralCollectionQueryBuilder builder = new KoralCollectionQueryBuilder();
JsonNode node = total.and().mergeWith(
(JsonNode) builder.rebaseCollection());
assertNotNull(node);
assertEquals("koral:doc", node.at("/collection/@type").asText());
assertEquals("textClass", node.at("/collection/key").asText());
}
}
|
#!/bin/bash
set -e
eval "$(ssh-agent -s)" && \
ssh-add /home/galuisal/.ssh/id_rsa_ext
ssh-add -l
rsync --delete -avzhe 'ssh -p 2222' --progress lovelace@192.168.100.47:/home/lovelace/cardano-node/mainnet/rocinante/relay0 /mnt/storage/cardano/ \
&& ssh -p 2222 lovelace@192.168.100.40 'mkdir -p /home/lovelace/cardano-node/mainnet/rocinante/backup' \
&& rsync --delete -avzhe 'ssh -p 2222' --progress /mnt/storage/cardano/relay0/* lovelace@192.168.100.40:/home/lovelace/cardano-node/mainnet/rocinante/backup \
&& ssh -p 2222 lovelace@192.168.100.41 'mkdir -p /home/lovelace/cardano-node/mainnet/rocinante/backup' \
&& rsync --delete -avzhe 'ssh -p 2222' --progress /mnt/storage/cardano/relay0/* lovelace@192.168.100.41:/home/lovelace/cardano-node/mainnet/rocinante/backup \
&& ssh -p 2222 lovelace@192.168.100.42 'mkdir -p /home/lovelace/cardano-node/mainnet/rocinante/backup' \
&& rsync --delete -avzhe 'ssh -p 2222' --progress /mnt/storage/cardano/relay0/* lovelace@192.168.100.42:/home/lovelace/cardano-node/mainnet/rocinante/backup \
&& ssh -p 2222 lovelace@192.168.100.43 'mkdir -p /home/lovelace/cardano-node/mainnet/rocinante/backup' \
&& rsync --delete -avzhe 'ssh -p 2222' --progress /mnt/storage/cardano/relay0/* lovelace@192.168.100.43:/home/lovelace/cardano-node/mainnet/rocinante/backup \
&& ssh -p 2222 lovelace@192.168.100.44 'mkdir -p /home/lovelace/cardano-node/mainnet/rocinante/backup' \
&& rsync --delete -avzhe 'ssh -p 2222' --progress /mnt/storage/cardano/relay0/* lovelace@192.168.100.44:/home/lovelace/cardano-node/mainnet/rocinante/backup \
|
<reponame>mighteejim/manager<filename>src/profile/integrations/layouts/IndexPage.js
import React, { PropTypes } from 'react';
import { connect } from 'react-redux';
import { push } from 'react-router-redux';
import { Tabs } from 'linode-components/tabs';
export function IndexPage(props) {
const tabs = [
{ name: 'Authorized Applications', link: '' },
{ name: 'My Applications', link: '/applications' },
{ name: 'Personal Access Tokens', link: '/tokens' },
].map(t => ({ ...t, link: `/profile/integrations${t.link}` }));
return (
<Tabs
tabs={tabs}
isSubTabs
onClick={(e, tabIndex) => {
e.stopPropagation();
props.dispatch(push(tabs[tabIndex].link));
}}
pathname={location.pathname}
>
{props.children}
</Tabs>
);
}
IndexPage.propTypes = {
children: PropTypes.node.isRequired,
dispatch: PropTypes.func,
};
export default connect()(IndexPage);
|
#Run this curl command to invoke the service.
$ curl "http://localhost:9090/sample/path;a=4;b=5/value1;x=10;y=15?bar=value2"
{
"pathParam": "value1",
"queryParam": "value2",
"matrix": {
"path": "a=4, b=5",
"foo": "x=10, y=15"
}
}
|
#!/bin/bash
set -e -x
yum -y install autoconf automake cmake gcc gcc-c++ git make pkgconfig zlib-devel portmidi portmidi-devel Xorg-x11-server-deve mesa-libEGL-devel mtdev-devel mesa-libEGL freetype freetype-devel openjpeg openjpeg-devel libpng libpng-devel libtiff libtiff-devel libwebp libwebp-devel dbus-devel dbus ibus-devel ibus libsamplerate-devel libsamplerate libudev-devel libudev libmodplug-devel libmodplug libvorbis-devel libvorbis flac-devel flac libjpeg-turbo-devel libjpeg-turbo;
mkdir ~/kivy_sources;
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HOME/kivy_build/lib;
cd ~/kivy_sources;
git clone --depth 1 https://github.com/spurious/SDL-mirror.git
cd SDL-mirror;
./configure --prefix="$HOME/kivy_build" --bindir="$HOME/kivy_build/bin" --enable-alsa-shared=no --enable-jack-shared=no --enable-pulseaudio-shared=no --enable-esd-shared=no --enable-arts-shared=no --enable-nas-shared=no --enable-sndio-shared=no --enable-fusionsound-shared=no --enable-libsamplerate-shared=no --enable-wayland-shared=no --enable-mir-shared=no --enable-x11-shared=no --enable-directfb-shared=no --enable-kmsdrm-shared=no;
make;
make install;
make distclean;
cd ~/kivy_sources;
wget http://www.libsdl.org/projects/SDL_mixer/release/SDL2_mixer-2.0.2.tar.gz;
tar xzf SDL2_mixer-2.0.2.tar.gz;
cd SDL2_mixer-2.0.2;
PATH="$HOME/kivy_build/bin:$PATH" PKG_CONFIG_PATH="$HOME/kivy_build/lib/pkgconfig" ./configure --prefix="$HOME/kivy_build" --bindir="$HOME/kivy_build/bin" --enable-music-mod-modplug-shared=no --enable-music-mod-mikmod-shared=no --enable-music-midi-fluidsynth-shared=no --enable-music-ogg-shared=no --enable-music-flac-shared=no --enable-music-mp3-smpeg-shared=no --enable-music-mp3-mpg123-shared=no;
PATH="$HOME/kivy_build/bin:$PATH" make;
make install;
make distclean;
cd ~/kivy_sources;
wget http://www.libsdl.org/projects/SDL_image/release/SDL2_image-2.0.3.tar.gz;
tar xzf SDL2_image-2.0.3.tar.gz;
cd SDL2_image-2.0.3;
PATH="$HOME/kivy_build/bin:$PATH" PKG_CONFIG_PATH="$HOME/kivy_build/lib/pkgconfig" ./configure --prefix="$HOME/kivy_build" --bindir="$HOME/kivy_build/bin" --enable-png-shared=no --enable-jpg-shared=no --enable-tif-shared=no --enable-webp-shared=no;
PATH="$HOME/kivy_build/bin:$PATH" make;
make install;
make distclean;
cd ~/kivy_sources;
wget http://www.libsdl.org/projects/SDL_ttf/release/SDL2_ttf-2.0.14.tar.gz;
tar xzf SDL2_ttf-2.0.14.tar.gz;
cd SDL2_ttf-2.0.14;
PATH="$HOME/kivy_build/bin:$PATH" PKG_CONFIG_PATH="$HOME/kivy_build/lib/pkgconfig" ./configure --prefix="$HOME/kivy_build" --bindir="$HOME/kivy_build/bin";
PATH="$HOME/kivy_build/bin:$PATH" make;
make install;
make distclean;
cd /io;
for PYBIN in /opt/python/*3*/bin; do
"${PYBIN}/pip" install --upgrade setuptools pip;
"${PYBIN}/pip" install --upgrade cython nose pygments docutils;
USE_X11=1 USE_SDL2=1 USE_GSTREAMER=0 PKG_CONFIG_PATH="$HOME/kivy_build/lib/pkgconfig" "${PYBIN}/pip" wheel --no-deps . -w wheelhouse/;
done
for name in /io/wheelhouse/*.whl; do
echo "Fixing $name";
auditwheel repair $name -w /io/wheelhouse/;
done
|
SELECT * FROM contacts
where first_name LIKE 'M%'
|
<filename>dist/common/intent.js
import { palette } from '@dfds-shared/colors';
export var Intent;
(function (Intent) {
Intent[Intent["Primary"] = 0] = "Primary";
Intent[Intent["Secondary"] = 1] = "Secondary";
Intent[Intent["Success"] = 2] = "Success";
Intent[Intent["Warning"] = 3] = "Warning";
Intent[Intent["Danger"] = 4] = "Danger";
})(Intent || (Intent = {}));
export function getIntentColor() {
var intent = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : Intent.Primary;
switch (intent) {
case Intent.Primary:
return palette.primaryOrange;
case Intent.Secondary:
return palette.actionBlue;
case Intent.Success:
return palette.yesGreen;
case Intent.Warning:
case Intent.Danger:
return palette.noRed;
default:
return palette.primaryOrange;
}
}
|
ip tuntap add tap100 mode tap
ip addr add 10.0.0.1/24 dev tap100
ip link set dev tap100 up
|
#!/bin/bash
clear
BRANCH_TAG=$(git rev-parse --abbrev-ref HEAD)
echo "----------------------------------- RESET $BRANCH_TAG"
git fetch origin && \
git reset --hard origin/$BRANCH_TAG
echo "----------------------------------- RESET CONCLUIDO"
echo
|
#!/bin/bash
dieharder -d 204 -g 44 -S 2080882235
|
<filename>core/plugin/activity/activity.rb
# -*- coding: utf-8 -*-
# 通知管理プラグイン
miquire :mui, 'tree_view_pretty_scroll'
require "set"
# アクティビティの設定の並び順
UserConfig[:activity_kind_order] ||= ["retweet", "favorite", "follow", "list_member_added", "list_member_removed", "dm", "system", "error"]
# アクティビティタブに保持する通知の数
UserConfig[:activity_max] ||= 1000
Plugin.create(:activity) do
class ActivityView < ::Gtk::CRUD
include ::Gtk::TreeViewPrettyScroll
ICON = 0
KIND = 1
TITLE = 2
DATE = 3
PLUGIN = 4
ID = 5
SERVICE = 6
EVENT = 7
def initialize(plugin)
type_strict plugin => Plugin
@plugin = plugin
super()
@creatable = @updatable = @deletable = false
end
def column_schemer
[{:kind => :pixbuf, :type => Gdk::Pixbuf, :label => 'icon'}, # ICON
{:kind => :text, :type => String, :label => _('種類')}, # KIND
{:kind => :text, :type => String, :label => _('説明')}, # TITLE
{:kind => :text, :type => String, :label => _('時刻')}, # DATE
{:type => Plugin}, # PLUGIN
{:type => Integer}, # IDu
{:type => Service}, # SERVICE
{:type => Hash} ].freeze # EVENT
end
def method_missing(*args, &block)
@plugin.__send__(*args, &block)
end
end
BOOT_TIME = Time.new.freeze
# そのイベントをミュートするかどうかを返す(trueなら表示しない)
def mute?(params)
mute_kind = UserConfig[:activity_mute_kind]
if mute_kind.is_a? Array
return true if mute_kind.include? params[:kind].to_s end
mute_kind_related = UserConfig[:activity_mute_kind_related]
if mute_kind_related
return true if mute_kind_related.include?(params[:kind].to_s) and !params[:related] end
false end
# このIDの組み合わせが出現したことがないなら真
# ==== Args
# [event] イベント名
# [ids] ID
# ==== Return
# 初めて表示するキーなら真
def show_once(event, *ids)
@show_once ||= Hash.new{ |h, k| h[k] = [] }
result = []
ids.each_with_index{ |id, index|
storage = @show_once[event][index] ||= Set.new
if storage.include? id
result << true
else
storage << id
result << false end }
not result.all? end
# アクティビティの古い通知を一定時間後に消す
def reset_activity(model)
Reserver.new(60) {
Delayer.new {
if not model.destroyed?
iters = model.to_enum(:each).to_a
remove_count = iters.size - UserConfig[:activity_max]
if remove_count > 0
iters[-remove_count, remove_count].each{ |mpi|
model.remove(mpi[2]) } end
reset_activity(model) end } }
end
def gen_listener_for_visible_check(uc, kind)
UserConfig[uc] ||= []
Plugin::Settings::Listener.new \
get: ->(){ UserConfig[uc].include?(kind) rescue false },
set: ->(value) do
if value
UserConfig[uc] += [kind]
else
UserConfig[uc] -= [kind] end end end
def gen_listener_for_invisible_check(uc, kind)
UserConfig[uc] ||= []
Plugin::Settings::Listener.new \
get: ->(){ (not UserConfig[uc].include?(kind)) rescue true },
set: ->(value) do
unless value
UserConfig[uc] += [kind]
else
UserConfig[uc] -= [kind] end end end
# アクティビティを更新する。
# ==== Args
# [kind] Symbol イベントの種類
# [title] タイトル
# [args] その他オプション。主に以下の値
# icon :: String|Gdk::Pixbuf アイコン
# date :: Time イベントの発生した時刻
# service :: Service 関係するServiceオブジェクト
# related :: 自分に関係するかどうかのフラグ
defdsl :activity do |kind, title, args = {}|
Plugin.call(:modify_activity,
{ plugin: self,
kind: kind,
title: title,
date: Time.new,
description: title }.merge(args)) end
# 新しいアクティビティの種類を定義する。設定に表示されるようになる
# ==== Args
# [kind] 種類
# [name] 表示する名前
defdsl :defactivity do |kind, name|
filter_activity_kind do |data|
data[kind] = name
[data] end end
activity_view = ActivityView.new(self)
activity_vscrollbar = ::Gtk::VScrollbar.new(activity_view.vadjustment)
activity_hscrollbar = ::Gtk::HScrollbar.new(activity_view.hadjustment)
activity_shell = ::Gtk::Table.new(2, 2)
activity_description = ::Gtk::IntelligentTextview.new
activity_status = ::Gtk::Label.new
activity_container = ::Gtk::VPaned.new
activity_detail_view = Gtk::ScrolledWindow.new
reset_activity(activity_view.model)
activity_detail_view.
set_policy(Gtk::POLICY_NEVER, Gtk::POLICY_AUTOMATIC).
set_height_request(88)
activity_container.
pack1(activity_shell.
attach(activity_view, 0, 1, 0, 1, ::Gtk::FILL|::Gtk::SHRINK|::Gtk::EXPAND, ::Gtk::FILL|::Gtk::SHRINK|::Gtk::EXPAND).
attach(activity_vscrollbar, 1, 2, 0, 1, ::Gtk::FILL, ::Gtk::SHRINK|::Gtk::FILL).
attach(activity_hscrollbar, 0, 1, 1, 2, ::Gtk::SHRINK|::Gtk::FILL, ::Gtk::FILL),
true, true).
pack2(activity_detail_view.add_with_viewport(::Gtk::VBox.new.
closeup(activity_description).
closeup(activity_status.right)), true, false)
tab(:activity, _("アクティビティ")) do
set_icon Skin.get("activity.png")
nativewidget ::Gtk::EventBox.new.add(activity_container)
end
activity_view.ssc("cursor-changed") { |this|
iter = this.selection.selected
if iter
activity_description.rewind(iter[ActivityView::EVENT][:description])
activity_status.set_text(iter[ActivityView::DATE])
end
false
}
# アクティビティ更新を受け取った時の処理
# plugin, kind, title, icon, date, service
on_modify_activity do |params|
if not mute?(params)
activity_view.scroll_to_zero_lator! if activity_view.realized? and activity_view.vadjustment.value == 0.0
iter = activity_view.model.prepend
if params[:icon].is_a? String
iter[ActivityView::ICON] = Gdk::WebImageLoader.pixbuf(params[:icon], 24, 24){ |loaded_icon|
iter[ActivityView::ICON] = loaded_icon }
else
iter[ActivityView::ICON] = params[:icon] end
iter[ActivityView::KIND] = params[:kind].to_s
iter[ActivityView::TITLE] = params[:title].tr("\n", "")
iter[ActivityView::DATE] = params[:date].strftime('%Y/%m/%d %H:%M:%S')
iter[ActivityView::PLUGIN] = params[:plugin]
iter[ActivityView::ID] = 0
iter[ActivityView::SERVICE] = params[:service]
iter[ActivityView::EVENT] = params
if (UserConfig[:activity_show_timeline] || []).include?(params[:kind].to_s)
Plugin.call(:update, nil, [Message.new(message: params[:description], system: true, source: params[:plugin].to_s, created: params[:date])])
end
if (UserConfig[:activity_show_statusbar] || []).include?(params[:kind].to_s)
Plugin.call(:gui_window_rewindstatus, Plugin::GUI::Window.instance(:default), "#{params[:kind]}: #{params[:title]}", 10)
end
end
end
on_favorite do |service, user, message|
activity(:favorite, "#{message.user[:idname]}: #{message.to_s}",
description:(_("@%{user} がふぁぼふぁぼしました") % {user: user[:idname]} + "\n" +
"@#{message.user[:idname]}: #{message.to_s}\n"+
message.parma_link),
icon: user[:profile_image_url],
related: message.user.me? || user.me?,
service: service)
end
on_unfavorite do |service, user, message|
activity(:unfavorite, "#{message.user[:idname]}: #{message.to_s}",
description:(_("@%{user} があんふぁぼしました") % {user: user[:idname]} + "\n" +
"@#{message.user[:idname]}: #{message.to_s}\n"+
message.parma_link),
icon: user[:profile_image_url],
related: message.user.me? || user.me?,
service: service)
end
on_retweet do |retweets|
retweets.each { |retweet|
retweet.retweet_source_d.next{ |source|
activity(:retweet, retweet.to_s,
description:(_("@%{user} がリツイートしました") % {user: retweet.user[:idname]} + "\n" +
"@#{source.user[:idname]}: #{source.to_s}\n"+
source.parma_link),
icon: retweet.user[:profile_image_url],
date: retweet[:created],
related: (retweet.user.me? || source && source.user.me?),
service: Service.primary) }.terminate(_ 'リツイートソースが取得できませんでした') }
end
on_list_member_added do |service, user, list, source_user|
if show_once(:list_member_added, user[:id], list[:id])
title = _("@%{user}が%{list}に追加されました") % {
user: user[:idname],
list: list[:full_name] }
desc_by_user = {
description: list[:description],
user: list.user[:idname] }
activity(:list_member_added, title,
description:("#{title}\n" +
_("%{description} (by @%{user})") % desc_by_user + "\n" +
"https://twitter.com/#{list.user[:idname]}/#{list[:slug]}"),
icon: user[:profile_image_url],
related: user.me? || source_user.me?,
service: service) end
end
on_list_member_removed do |service, user, list, source_user|
if show_once(:list_member_removed, user[:id], list[:id])
title = _("@%{user}が%{list}から削除されました") % {
user: user[:idname],
list: list[:full_name] }
desc_by_user = {
description: list[:description],
user: list.user[:idname] }
activity(:list_member_removed, title,
description:("#{title}\n"+
_("%{description} (by @%{user})") % desc_by_user + "\n" +
"https://twitter.com/#{list.user[:idname]}/#{list[:slug]}"),
icon: user[:profile_image_url],
related: user.me? || source_user.me?,
service: service) end
end
on_follow do |by, to|
if show_once(:follow, by[:id], to[:id])
by_user_to_user = {
followee: by[:idname],
follower: to[:idname] }
activity(:follow, _("@%{followee} が @%{follower} をフョローしました") % by_user_to_user,
related: by.me? || to.me?,
icon: (to.me? ? by : to)[:profile_image_url]) end
end
on_direct_messages do |service, dms|
dms.each{ |dm|
date = Time.parse(dm[:created_at])
if date > BOOT_TIME
first_line = dm[:sender].me? ? _("ダイレクトメッセージを送信しました") : _("ダイレクトメッセージを受信しました")
title = _("D %{recipient} %{text}") % {
recipient: dm[:recipient][:idname],
text: dm[:text] }
activity(:dm, title,
description: ("#{first_line}\n" +
"@#{dm[:sender][:idname]}: #{title}"),
icon: dm[:sender][:profile_image_url],
service: service,
date: date) end }
end
onunload do
Addon.remove_tab _('アクティビティ')
end
settings _("アクティビティ") do
activity_kind = Plugin.filtering(:activity_kind, {})
activity_kind_order = TypedArray(String).new
if activity_kind
activity_kind = activity_kind.last
activity_kind.keys.each{ |kind|
kind = kind.to_s
i = where_should_insert_it(kind, activity_kind_order, UserConfig[:activity_kind_order])
activity_kind_order.insert(i, kind) }
else
activity_kind_order = []
activity_kind = {} end
activity_kind_order.each do |kind|
name = activity_kind[kind]
ml_param = {name: name}
settings name do
boolean(_('%{name}を表示する') % ml_param, gen_listener_for_invisible_check(:activity_mute_kind, kind)).tooltip(_('%{name}を、アクティビティタイムラインに表示します。チェックを外すと、%{name}の他の設定は無効になります。') % ml_param)
boolean(_('自分に関係ない%{name}も表示する') % ml_param, gen_listener_for_invisible_check(:activity_mute_kind_related, kind)).tooltip(_('自分に関係ない%{name}もアクティビティタイムラインに表示されるようになります。チェックを外すと、自分に関係ない%{name}は表示されません。') % ml_param)
boolean(_('タイムラインに表示'), gen_listener_for_visible_check(:activity_show_timeline, kind)).tooltip(_('%{name}が通知された時に、システムメッセージで%{name}を通知します') % ml_param)
boolean(_('ステータスバーに表示'), gen_listener_for_visible_check(:activity_show_statusbar, kind)).tooltip(_('%{name}が通知された時に、ステータスバーにしばらく表示します') % ml_param)
end
end
end
defactivity "retweet", _("リツイート")
defactivity "favorite", _("ふぁぼ")
defactivity "follow", _("フォロー")
defactivity "list_member_added", _("リストに追加")
defactivity "list_member_removed", _("リストから削除")
defactivity "dm", _("ダイレクトメッセージ")
defactivity "system", _("システムメッセージ")
defactivity "error", _("エラー")
end
|
var searchData=
[
['comments',['COMMENTS',['../namespacejson11.html#a8ed5b6b8d6e11e5cdf2e213e192a817baab5f715cadd88a391f6d749c5f5910b0',1,'json11']]]
];
|
// class-dump results processed by bin/class-dump/dump.rb
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Jul 30 2018 09:07:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import <Foundation/Foundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <XCTest/XCUIElementTypes.h>
#import "CDStructures.h"
@protocol OS_dispatch_queue;
@protocol OS_xpc_object;
@interface NSError (XCUITestingErrors)
+ (id)_xcui_error:(NSInteger)arg1 description:(id)arg2;
+ (id)_xcui_error:(NSInteger)arg1 userInfo:(id)arg2 description:(id)arg3;
+ (id)_xcui_errorWithDomain:(id)arg1 code:(NSInteger)arg2 description:(id)arg3;
+ (id)_xcui_errorWithDomain:(id)arg1 code:(NSInteger)arg2 userInfo:(id)arg3 description:(id)arg4 arguments:(char *)arg5;
- (BOOL)xcui_isUITestingError:(NSInteger)arg1;
@end
|
#!/bin/bash
top_dir=$(git rev-parse --show-toplevel | sed 's|/[^/]*$||')
# run ansible
ansible-playbook -i .contiv_k8s_inventory ./contrib/ansible/cluster.yml --tags "contiv_restart" -e "networking=contiv contiv_fabric_mode=default contiv_bin_path=$top_dir/netplugin/bin"
|
package com.sa.web;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SentimentAnalysisWebApp {
public static void main(String[] args) {
SpringApplication.run(SentimentAnalysisWebApp.class, args);
}
}
|
import type { LayoutRectangle } from 'react-native';
export function layoutReducer(
state: LayoutReducerState,
action: LayoutReducerAction
): LayoutReducerState {
const { type, ...stateProps } = action;
switch (type) {
case LayoutReducerActionType.update:
return {
...state,
...stateProps,
};
default:
return state;
}
}
export interface LayoutReducerState extends Partial<LayoutRectangle> {}
export interface LayoutReducerAction extends Partial<LayoutReducerState> {
type: LayoutReducerActionType;
}
export enum LayoutReducerActionType {
update,
}
|
cp docker-compose.yml.sample docker-compose.yml
docker-compose build
docker-compose up -d
|
<gh_stars>0
import pulsar as psr
def load_ref_system():
""" Returns d-arginine as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
N 2.1694 0.0784 -2.2820
C 1.5771 -0.9760 -1.4246
C 2.6968 -1.9224 -1.0071
O 2.4991 -3.2178 -1.3400
O 3.7267 -1.6735 -0.4078
C 0.8700 -0.4625 -0.1605
C -0.4699 0.1822 -0.4803
C -1.1452 0.6626 0.8009
N -2.4305 1.3405 0.4754
C -3.1434 1.8747 1.6238
N -2.4938 2.9235 2.3614
N -4.2980 1.3549 1.9103
H 2.8448 0.6053 -1.7684
H 1.4447 0.6871 -2.6001
H 0.8393 -1.5246 -2.0650
H 1.5210 0.2526 0.3844
H 0.7119 -1.3089 0.5385
H -1.1185 -0.5442 -1.0129
H -0.3347 1.0306 -1.1819
H -0.4888 1.3845 1.3284
H -1.2902 -0.1904 1.4987
H -3.0140 0.7284 -0.0582
H -3.1495 3.5736 2.7372
H -1.7906 3.3914 1.8321
H -4.8244 1.7731 2.6362
H 3.2412 -3.7454 -1.0612
""")
|
<reponame>cjeanGitHub/microservice-spring-cloud-159
package com.study.vcloud.vclouduser.user.service.impl;
import com.baomidou.mybatisplus.service.impl.ServiceImpl;
import com.study.vcloud.vclouduser.user.entity.SysRole;
import com.study.vcloud.vclouduser.user.mapper.SysRoleMapper;
import com.study.vcloud.vclouduser.user.service.SysRoleService;
import org.springframework.stereotype.Service;
/**
* <p>
* 服务实现类
* </p>
*
* @author pwl
* @since 2019-09-03
*/
@Service
public class SysRoleServiceImpl extends ServiceImpl<SysRoleMapper, SysRole> implements SysRoleService {
}
|
#!/bin/bash
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
# Change directories to the parent directory of the one in which this
# script is located.
cd "$(dirname "${BASH_SOURCE[0]}")/.."
# Ensure the temp out file is removed when this program exits.
out="$(mktemp)"
on_exit() {
[ -z "${out}" ] || [ ! -e "${out}" ] || rm -f "${out}"
}
trap on_exit EXIT
# Run goformat on all the sources.
flags="-e -s -w"
[ -z "${PROW_JOB_ID-}" ] || flags="-d ${flags}"
eval "go run ./vendor/github.com/mbenkmann/goformat/goformat ${flags} ./cmd/ ./pkg/" | tee "${out}"
# Check to see if there any suggestions.
goformat_exit_code=0; test -z "$(head -n 1 "${out}")" || goformat_exit_code=1
# Truncate the out file.
rm -f "${out}" && touch "${out}"
# Run goimports on all the sources.
flags="-e -w"
[ -z "${PROW_JOB_ID-}" ] || flags="-d ${flags}"
eval "go run ./vendor/golang.org/x/tools/cmd/goimports ${flags} ./cmd/ ./pkg/" | tee "${out}"
# Check to see if there any suggestions.
goimports_exit_code=0; test -z "$(head -n 1 "${out}")" || goimports_exit_code=1
# If running on Prow, exit with a non-zero code if either of the tests failed.
if [ -n "${PROW_JOB_ID-}" ]; then
[ "${goformat_exit_code}" -eq "0" ] || exit "${goformat_exit_code}"
[ "${goimports_exit_code}" -eq "0" ] || exit "${goimports_exit_code}"
fi
|
<filename>src/keypadConfig.h
#ifndef keyPadConfig_h
#define keyPadConfig_h
#include <Arduino.h>
#define C1 4 // Col 1
#define C2 5 // Col 2
#define C3 6 // Col 3
#define C4 7 // Col 4
#define R1 8 // Row 1
#define R2 9 // Row 2
#define R3 10 // Row 3
#define R4 11 // Row 4
const byte ROWS = 4; //four rows
const byte COLS = 4; //four columns
char keys[ROWS][COLS] = {
{'1','2','3','A'},
{'4','5','6','B'},
{'7','8','9','C'},
{'*','0','#','D'}
};
byte rowPins[4] = {R1, R2, R3, R4}; //connect to the row pinouts of the keypad
byte colPins[4] = {C1, C2, C3, C4}; //connect to the column pinouts of the keypad
#endif
|
python3 install.py $@
|
import React from "react";
export interface SectionBlockProps {
className?: string;
children?: React.ReactNodeArray | React.ReactNode;
isExperimental?: boolean;
title: string;
}
|
import fetch, { Headers, Request, Response } from 'node-fetch'
import AbortController from 'abort-controller'
import { FormData, Blob } from 'formdata-node'
globalThis.AbortController = AbortController
// @ts-expect-error
globalThis.fetch = fetch
globalThis.Headers = Headers
// @ts-expect-error
globalThis.Request = Request
// @ts-expect-error
globalThis.Response = Response
// @ts-expect-error
globalThis.FormData = FormData
// @ts-expect-error
globalThis.Blob = Blob
|
/**
* Copyright 2018 hubohua
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.demoncat.dcapp;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.demoncat.dcapp.widget.FrameAnimation;
import com.demoncat.dcapp.widget.GlideRoundTransform;
/**
* @Class: FrameAnimActivity
* @Description: Frame animation activity
* @Author: hubohua
* @CreateDate: 2018/5/6
*/
public class FrameAnimActivity extends Activity {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_frame_anim);
int[] resIds = new int[52];
for (int i = 0 ; i < resIds.length; i ++) {
int id = getResources().
getIdentifier("p" + (i + 1), "drawable", getPackageName());
resIds[i] = id;
}
FrameAnimation frameAnimation =
new FrameAnimation((ImageView) findViewById(R.id.img_vehicle), resIds, 30);
frameAnimation.setAnimationListener(new FrameAnimation.OnAnimationListener() {
@Override
public void onAnimationStart() {
}
@Override
public void onAnimationFinish() {
Toast.makeText(getApplicationContext(), "动画完成", Toast.LENGTH_SHORT).show();
}
});
frameAnimation.start();
Configuration configuration = getResources().getConfiguration();
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
Log.d("test", "height: " + metrics.heightPixels + ", width: " + metrics.widthPixels);
Log.d("test", "configuration.smallestScreenWidthDp: " + configuration.smallestScreenWidthDp);
final ImageView imageView = findViewById(R.id.image_view);
Log.d("test", "imageView: " + imageView);
// GlideRoundTransform transformation =
// new GlideRoundTransform(getApplicationContext(),
// dip2px(getApplicationContext(), 10));
// transformation.setExceptCorner(false, false, false, false);
// Glide.with(this).load("https://172.16.31.10:18088/cherym31t/m31t/download/?id=MS5qcGc=").
// asBitmap().
// skipMemoryCache(true).
// placeholder(R.drawable.banner_default).
// error(R.drawable.banner_default).
// transform(transformation).into(imageView);
findViewById(R.id.title).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startActivity(new Intent(FrameAnimActivity.this, LeakMemActivity.class));
}
});
}
public static int dip2px(Context context, float dpValue) {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (dpValue * scale + 0.5f);
}
}
|
# -*- coding: utf-8 -*-
"""Kuihao_FL_Sequential_Multi-Clients_Simulation
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/gist/kuihao/2b8b376fd307d83661fcd65679cc99ec/kuihao_fl_sequential_multi-clients_simulation.ipynb
#Prepare Dataset
"""
dataset_path = 'dataset/cifar100_noniid/content/zip/cifar100_noniid'
#dataset_path = r'C:\Users\kuiha\OneDrive - 國立成功大學 National Cheng Kung University\NCKU研究所\FL論文andCode\FlowerFL_code\實驗資料集\content\zip\cifar100_noniid'
"""#IMPORT PKG"""
import os
from tensorflow.python.saved_model.loader_impl import parse_saved_model
# Make TensorFlow logs less verbose
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
import sys
# add path to use my package
#sys.path.append('/Implement_FedAdativate')#/home/sheng/document/Kuihao
from datetime import datetime
import numpy as np
from mypkg import (
ServerArg,
ModelNameGenerator,
secure_mkdir,
mylog,
Result_Avg_Last_N_round,
Simulation_DynamicClientSample,
Weighted_Aggregate,
FedAdagrad_Aggregate,
FedAdam_Aggregate,
FedYogi_Aggregate,
)
#import tensorflow as tf
'''
from mypkg.TF import (
CNN_Model,
myResNet,
GoogleAdaptive_tfds_preprocessor,
simple_cifar100_preprocessor,
myLoadDS
)
'''
"""# Desktop Setting"""
# --------
# [Welcome prompt] Make model name
# --------
args = ServerArg()
model_name = ModelNameGenerator(args.name)
print(f"*** This model name: {model_name} ***\n")
# --------
# [Hardware setting] CPU only or limit the GPU usage
# --------
if args.cpu:
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = ""
else:
#from mypkg.TF import setGPU
#setGPU(mode=1)
if args.gpu is not None:
os.environ["CUDA_VISIBLE_DEVICES"]= str(args.gpu)
from mypkg.TF import setGPU
setGPU(mode=3, device_num=args.gpu)
else:
from mypkg.TF import setGPU
setGPU(mode=1) # Dataset size 會影響 GPU memory 需求
# --------
# [Model Recovery Setting]
# --------
checkpoint_load = None
prior_model = None
if args.mode == 1:
'''The prior temporary checkpoint was interrupt accidentally.'''
prior_model = args.prior_model_name
elif args.mode == 2:
'''Load the other model train finished.'''
checkpoint_load = args.checkpoint_path
import tensorflow as tf
import tensorflow_addons as tfa
from mypkg.TF import (
CNN_Model,
myResNet,
GoogleAdaptive_tfds_preprocessor,
simple_cifar100_preprocessor,
myLoadDS
)
"""#[Hyperparemeter]"""
#model_name = 'FL_Simulattion'
SEED = 2021
'''fix random seed'''
np.random.seed(SEED)
tf.random.set_seed(SEED)
model_input_shape = (24,24,3)
model_class_number = 100 # This is LABEL
SAVE = True
'''(bool) save log or not'''
HyperSet_Aggregation, Aggregation_name = '', '' #Weighted_Aggregate
HyperSet_round = 600 # 4000*10 / 500 = 80
HyperSet_Train_all_connect_client_number = 500
HypHyperSet_Train_EveryRound_client_number = 500
HyperSet_Test_all_connect_client_number = 100
HypHyperSet_Test_EveryRound_client_number = 100
HyperSet_Server_eta = pow(10,(0)) #1e-3
HyperSet_Server_tau = None #pow(10,(-1)) #1e-2
HyperSet_Server_beta1 = None #0.9
HyperSet_Server_beta2 = None #0.99
HyperSet_Local_eta = None #pow(10,(-1/2)) #1e-1
'''Don't use this'''
HyperSet_Local_momentum = 0. #0.9
HyperSet_Local_batch_size = 20
HyperSet_Local_epoch = None
HyperSet_optimizer = tf.keras.optimizers.SGD(learning_rate=HyperSet_Server_eta, momentum=HyperSet_Local_momentum)
'''
optimizer = tfa.optimizers.Yogi(learning_rate=HyperSet_Server_eta,
epsilon=HyperSet_Server_tau,
beta1=HyperSet_Server_beta1,
beta2=HyperSet_Server_beta2,
)'''
#optimizer = tf.keras.optimizers.SGD(learning_rate=HyperSet_Local_eta, momentum=HyperSet_Local_momentum)
#optimizer = tf.keras.optimizers.Adam() #learning_rate=1e-5
# ---
# [Build-Model]
# ---
tf.keras.backend.clear_session()
model = myResNet().ResNet18(model_input_shape,model_class_number)
optimizer = HyperSet_optimizer
model.compile( optimizer,
tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=["accuracy", 'sparse_top_k_categorical_accuracy']) # sparse_top_k_categorical_accuracy, top_k_categorical_accuracy
# ---
# [Preprocessing Setting]
# ---
# Random number generator
rng = tf.random.Generator.from_seed(110, alg='philox')
preprocessor = GoogleAdaptive_tfds_preprocessor(
global_seed=SEED,
crop_size=24,
batch_zize=HyperSet_Local_batch_size,
shuffle_buffer=100,
prefetch_buffer=20,
)
# --------
# [Saving Setting]
# --------
Training_result_distributed = {'loss':[],'accuracy':[],'sparse_top_k_categorical_accuracy':[]}
'''Clients Training 的聚合結果'''
Testing_result_centralized = {'loss':[],'accuracy':[],'sparse_top_k_categorical_accuracy':[]}
'''Server Testing 的結果'''
checkpoint_folder = secure_mkdir("ckpoint"+"/"+model_name)
'''保存weight的資料夾'''
checkpoint_path = checkpoint_folder+"/cp-{epoch:04d}.ckpt"
'''保存weight的儲存路徑'''
dataset_size = 50000
batch_counts_per_epoch = int(dataset_size/HyperSet_Local_batch_size)
'''steps_per_execution'''
cp_saver = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_path,
save_weights_only=True,
verbose=2,
save_freq=100*batch_counts_per_epoch)
'''儲存機制: 開啟新的Epoch時(訓練前)先儲存,因此cp-0001其實是 epoch == 0的weights
save_freq: (int) the callback saves the model at end of this many batches.'''
#if prior_model is not None:
# tmpbackup_folder = secure_mkdir("/tmp/backup"+"/"+prior_model)
# '''暫存cp的資料夾'''
# cp_recovery = tf.keras.callbacks.experimental.BackupAndRestore(backup_dir=tmpbackup_folder)
#else:
# tmpbackup_folder = secure_mkdir("/tmp/backup"+"/"+model_name)
# '''暫存cp的資料夾'''
# cp_recovery = tf.keras.callbacks.experimental.BackupAndRestore(backup_dir=tmpbackup_folder)
#'''Store last cp in the tmp forder'''
if prior_model is not None:
'''cpbackup暫時有bug,因此改用npz暫存檔代替其功能'''
weight_npzfile = np.load(f'ckpoint/{prior_model}/interrupt-round-weights.npz')
weight_np_unzip = [weight_npzfile[ArrayName] for ArrayName in weight_npzfile.files]
'''model weights format is *List of NumPy arrays* '''
model.set_weights(weight_np_unzip) # mot use model.load_weights() that's load from file.h5
TraceCurrentEpoch = 0
class CustomCallback(tf.keras.callbacks.Callback):
'''自訂Callback事件: 此用來追蹤當前執行Epoch
Ref.:https://ithelp.ithome.com.tw/articles/10235293'''
def __init__(self):
self.task_type=''
self.epoch=0
self.batch=0
def on_epoch_begin(self, epoch, logs=None):
#print(f"{self.task_type}第 {epoch} 執行週期結束.")
# Get epoch now
global TraceCurrentEpoch
TraceCurrentEpoch = epoch
#print(TraceCurrentEpoch)
# ---
# [Load Data]
# ---
# Load Server-side test daraset
server_train_data = myLoadDS(dataset_path+'/server/train/global_train_all', 'tfds')
#server_train_data = myLoadDS(dataset_path+f'/client/train/client_{1}_train', 'tfds')
server_test_data = myLoadDS(dataset_path+'/server/test/global_test_all', 'tfds')
"""
#Centralized
"""
try:
tf.keras.backend.clear_session() #clear keras tmp data
if checkpoint_load is not None:
'''load model weights'''
latest = tf.train.latest_checkpoint(checkpoint_load)
model.load_weights(latest)
# If load weight.npz
#weight_npzfile = np.load(weight.npz)
#model.load_weights(weight_npzfile['arr_0'])
print(f"Sucessfully load {checkpoint_load}")
model.save_weights(checkpoint_path.format(epoch=1))
print("** checkpoint 001 (init.) saved **")
history = model.fit(
preprocessor.preprocess(server_train_data, rng, train=True, BruteForce_kill_nan=True, add_minmax=False, normal_mode=False),
#tfds_train.map(server_train_data, num_parallel_calls=tf.data.AUTOTUNE).batch(HyperSet_Local_batch_size).prefetch(20),
epochs=HyperSet_round,
verbose=2,
validation_data=preprocessor.preprocess(server_test_data, rng, train=False, BruteForce_kill_nan=True, add_minmax=False, normal_mode=False),
callbacks=[CustomCallback(), cp_saver], #cp_recovery,
)
model.save_weights(checkpoint_path.format(epoch=HyperSet_round))
print("** checkpoint final saved **")
# 暫存訓練結果
Training_result_distributed["loss"] = history.history["loss"]
Training_result_distributed["accuracy"] = history.history["accuracy"]
Training_result_distributed["sparse_top_k_categorical_accuracy"] = history.history["sparse_top_k_categorical_accuracy"]
Testing_result_centralized["loss"] = history.history["val_loss"]
Testing_result_centralized["accuracy"] = history.history["val_accuracy"]
Testing_result_centralized["sparse_top_k_categorical_accuracy"] = history.history["val_sparse_top_k_categorical_accuracy"]
# 儲存結果
if SAVE:
FL_Results_folder = secure_mkdir("FL_Results"+"/"+model_name)
if Training_result_distributed is not None:
np.savez(f"{FL_Results_folder}/Training_result_distributed.npz", Training_result_distributed)
if Testing_result_centralized is not None:
np.savez(f"{FL_Results_folder}/Testing_result_centralized.npz", Testing_result_centralized)
checkpoint_folder = secure_mkdir("ckpoint"+"/"+model_name)
print(f"****Saving model weights...****")
GlobalModel_NewestWeight = model.get_weights()
np.savez(f"{checkpoint_folder}/final-round-weights.npz", *GlobalModel_NewestWeight)
#model.save_weights(checkpoint_path.format(epoch=epochs))
#model.save(model_path)
# 移除緊急暫存
#os.rmdir(tmpbackup_folder)
# 緊急狀況備份
except KeyboardInterrupt or InterruptedError:
print("KeyboardInterrupt or InterruptedError!!")
print("Saving model...")
GlobalModel_NewestWeight = model.get_weights()
np.savez(f"{checkpoint_folder}/interrupt-round-weights.npz", *GlobalModel_NewestWeight)
print("Model saved.")
print("Saving result...")
FL_Results_folder = secure_mkdir("FL_Results"+"/"+model_name)
if Training_result_distributed is not None:
np.savez(f"{FL_Results_folder}/Training_result_distributed.npz", Training_result_distributed)
if Testing_result_centralized is not None:
np.savez(f"{FL_Results_folder}/Testing_result_centralized.npz", Testing_result_centralized)
print("Result saved.")
print("Logging...")
now_time = datetime.now()
time_str = now_time.strftime("%m_%d_%Y__%H_%M_%S")
log_folder = secure_mkdir("FL_log"+"/"+"InterruptSaved_"+model_name)
log_text = f'*** Centralized Traing Record ***\n \
*[This training was unexpectly interrupted.]*\n \
*[Interrupt at epoch = {TraceCurrentEpoch}]*\n \
Model Name: {model_name}\n \
FL Finish Time: {time_str}\n \
\n--- FL setting ---\n \
Aggregation: {Aggregation_name}\n \
Rounds: {HyperSet_round}\n \
Traing population: {HyperSet_Train_all_connect_client_number}\n \
Testing population: {HyperSet_Test_all_connect_client_number}\n \
Number of client per round (training): {HypHyperSet_Train_EveryRound_client_number}\n \
Number of client per round (testing): {HypHyperSet_Test_EveryRound_client_number}\n \
\n--- Server-side hyperparemeter ---\n \
Learning-rate: {HyperSet_Server_eta}\n \
Tau: {HyperSet_Server_tau}\n \
Beta-1: {HyperSet_Server_beta1}\n \
Beta-2: {HyperSet_Server_beta2}\n \
\n--- Client-side hyperparemeter ---\n \
Learning-rate: {HyperSet_Local_eta}\n \
Momentum: {HyperSet_Local_momentum}\n \
Local epoch: {HyperSet_Local_epoch}\n \
Local batch size: {HyperSet_Local_batch_size}\n \
\n--- Other env. setting ---\n \
Random Seed: {SEED}\n \
\n--- Result ---\nCannot save in this mode.'
mylog(log_text, log_folder+'/log')
print("Log saved.")
sys.exit()
if SAVE:
now_time = datetime.now()
time_str = now_time.strftime("%m_%d_%Y__%H_%M_%S")
N = 100 # To calculate the avg N rounds result.
Train_Loss_avgN, Train_Acc_avgN, Train_TopKAcc_avgN = Result_Avg_Last_N_round(Training_result_distributed,N)
Test_Loss_avgN, Test_Acc_avgN, Test_TopKAcc_avgN = Result_Avg_Last_N_round(Testing_result_centralized,N)
log_folder = secure_mkdir("FL_log"+"/"+model_name)
log_text = f'*** Centralized Traing Record ***\n' \
f'Model Name: {model_name}\n' \
f'FL Finish Time: {time_str}\n' \
f'\n--- FL setting ---\n' \
f'Aggregation: {Aggregation_name}\n' \
f'Rounds: {HyperSet_round}\n' \
f'Traing population: {HyperSet_Train_all_connect_client_number}\n' \
f'Testing population: {HyperSet_Test_all_connect_client_number}\n' \
f'Number of client per round (training): {HypHyperSet_Train_EveryRound_client_number}\n' \
f'Number of client per round (testing): {HypHyperSet_Test_EveryRound_client_number}\n' \
f'\n--- Server-side hyperparemeter ---\n' \
f'Learning-rate: {HyperSet_Server_eta}\n' \
f'Tau: {HyperSet_Server_tau}\n' \
f'Beta-1: {HyperSet_Server_beta1}\n' \
f'Beta-2: {HyperSet_Server_beta2}\n' \
f'\n--- Client-side hyperparemeter ---\n' \
f'Learning-rate: {HyperSet_Local_eta}\n' \
f'Momentum: {HyperSet_Local_momentum}\n' \
f'Local epoch: {HyperSet_Local_epoch}\n' \
f'Local batch size: {HyperSet_Local_batch_size}\n' \
f'\n--- Other env. setting ---\n' \
f'Random Seed: {SEED}\n' \
f'\n--- Result ---\n' \
f'--Last result--\n\
*Last Train Acc.: {Training_result_distributed["accuracy"][-1]}\n \
Last Train TopK-Acc.: {Training_result_distributed["sparse_top_k_categorical_accuracy"][-1]}\n \
Last Train Loss: {Training_result_distributed["loss"][-1]}\n \
*Last Test Acc.: {Testing_result_centralized["accuracy"][-1]}\n \
Last Test TopK-Acc.: {Testing_result_centralized["sparse_top_k_categorical_accuracy"][-1]}\n \
Last Test Loss: {Testing_result_centralized["loss"][-1]}\n' \
f'--Avg last {N} rounds result--\n' \
f'*Train Acc. (Avg last {N} rounds): {Train_Acc_avgN}\n' \
f'Train TopK-Acc. (Avg last {N} rounds): {Train_TopKAcc_avgN}\n' \
f'Train Loss (Avg last {N} rounds): {Train_Loss_avgN}\n' \
f'*Test Acc. (Avg last {N} rounds): {Test_Acc_avgN}\n' \
f'Test TopK-Acc. (Avg last {N} rounds): {Test_TopKAcc_avgN}\n' \
f'Test Loss (Avg last {N} rounds): {Test_Loss_avgN}\n'
mylog(log_text, log_folder+'/log')
print("log saved.")
|
<reponame>rachelkwaynick/redux-toolkit
import React from 'react'
import Footer from './features/filters/Footer'
import AddTodo from './features/todos/AddTodo'
import VisibleTodoList from './features/todos/VisibleTodoList'
const App = () => (
<div>
<AddTodo />
<VisibleTodoList />
<Footer />
</div>
)
export default App;
|
#!bash
# shellcheck disable=SC2239
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# shellcheck source=scripts/tmux_cmd_path.sh
source "$CURRENT_DIR/tmux_cmd_path.sh"
yank_line="y"
yank_line_option="@yank_line"
yank_pane_pwd="Y"
yank_pane_pwd_option="@yank_pane_pwd"
yank_default="y"
yank_option="@copy_mode_yank"
put_default="Y"
put_option="@copy_mode_put"
yank_put_default="M-y"
yank_put_option="@copy_mode_yank_put"
yank_wo_newline_default="!"
yank_wo_newline_option="@copy_mode_yank_wo_newline"
yank_selection_default="clipboard"
yank_selection_option="@yank_selection"
yank_selection_mouse_default="primary"
yank_selection_mouse_option="@yank_selection_mouse"
yank_with_mouse_default="on"
yank_with_mouse_option="@yank_with_mouse"
yank_action_default="copy-pipe-and-cancel"
yank_action_option="@yank_action"
shell_mode_default="emacs"
shell_mode_option="@shell_mode"
custom_copy_command_default=""
custom_copy_command_option="@custom_copy_command"
override_copy_command_default=""
override_copy_command_option="@override_copy_command"
# helper functions
get_tmux_option() {
local option="$1"
local default_value="$2"
local option_value
option_value=$($TMUX_CMD_PATH show-option -gqv "$option")
if [ -z "$option_value" ]; then
echo "$default_value"
else
echo "$option_value"
fi
}
yank_line_key() {
get_tmux_option "$yank_line_option" "$yank_line"
}
yank_pane_pwd_key() {
get_tmux_option "$yank_pane_pwd_option" "$yank_pane_pwd"
}
yank_key() {
get_tmux_option "$yank_option" "$yank_default"
}
put_key() {
get_tmux_option "$put_option" "$put_default"
}
yank_put_key() {
get_tmux_option "$yank_put_option" "$yank_put_default"
}
yank_wo_newline_key() {
get_tmux_option "$yank_wo_newline_option" "$yank_wo_newline_default"
}
yank_selection() {
get_tmux_option "$yank_selection_option" "$yank_selection_default"
}
yank_selection_mouse() {
get_tmux_option "$yank_selection_mouse_option" "$yank_selection_mouse_default"
}
yank_with_mouse() {
get_tmux_option "$yank_with_mouse_option" "$yank_with_mouse_default"
}
yank_action() {
get_tmux_option "$yank_action_option" "$yank_action_default"
}
shell_mode() {
get_tmux_option "$shell_mode_option" "$shell_mode_default"
}
custom_copy_command() {
get_tmux_option "$custom_copy_command_option" "$custom_copy_command_default"
}
override_copy_command() {
get_tmux_option "$override_copy_command_option" "$override_copy_command_default"
}
# Ensures a message is displayed for 5 seconds in tmux prompt.
# Does not override the 'display-time' tmux option.
display_message() {
local message="$1"
# display_duration defaults to 5 seconds, if not passed as an argument
if [ "$#" -eq 2 ]; then
local display_duration="$2"
else
local display_duration="5000"
fi
# saves user-set 'display-time' option
local saved_display_time
saved_display_time=$(get_tmux_option "display-time" "750")
# sets message display time to 5 seconds
$TMUX_CMD_PATH set-option -gq display-time "$display_duration"
# displays message
$TMUX_CMD_PATH display-message "$message"
# restores original 'display-time' value
$TMUX_CMD_PATH set-option -gq display-time "$saved_display_time"
}
command_exists() {
local command="$1"
type "$command" >/dev/null 2>&1
}
clipboard_copy_command() {
local mouse="${1:-false}"
# installing reattach-to-user-namespace is recommended on OS X
if [ -n "$(override_copy_command)" ]; then
override_copy_command
elif command_exists "pbcopy"; then
if command_exists "reattach-to-user-namespace"; then
echo "reattach-to-user-namespace pbcopy"
else
echo "pbcopy"
fi
elif command_exists "clip.exe"; then # WSL clipboard command
echo "cat | clip.exe"
elif command_exists "wl-copy"; then # wl-clipboard: Wayland clipboard utilities
echo "wl-copy"
elif command_exists "xsel"; then
local xsel_selection
if [[ $mouse == "true" ]]; then
xsel_selection="$(yank_selection_mouse)"
else
xsel_selection="$(yank_selection)"
fi
echo "xsel -i --$xsel_selection"
elif command_exists "xclip"; then
local xclip_selection
if [[ $mouse == "true" ]]; then
xclip_selection="$(yank_selection_mouse)"
else
xclip_selection="$(yank_selection)"
fi
echo "xclip -selection $xclip_selection"
elif command_exists "putclip"; then # cygwin clipboard command
echo "putclip"
elif [ -n "$(custom_copy_command)" ]; then
custom_copy_command
fi
}
# Cache the TMUX version for speed.
tmux_version="$($TMUX_CMD_PATH -V | cut -d ' ' -f 2 | sed 's/next-//')"
tmux_is_at_least() {
if [[ $tmux_version == "$1" ]] || [[ $tmux_version == master ]]; then
return 0
fi
local i
local -a current_version wanted_version
IFS='.' read -ra current_version <<<"$tmux_version"
IFS='.' read -ra wanted_version <<<"$1"
# fill empty fields in current_version with zeros
for ((i = ${#current_version[@]}; i < ${#wanted_version[@]}; i++)); do
current_version[i]=0
done
# fill empty fields in wanted_version with zeros
for ((i = ${#wanted_version[@]}; i < ${#current_version[@]}; i++)); do
wanted_version[i]=0
done
for ((i = 0; i < ${#current_version[@]}; i++)); do
if ((10#${current_version[i]} < 10#${wanted_version[i]})); then
return 1
fi
if ((10#${current_version[i]} > 10#${wanted_version[i]})); then
return 0
fi
done
return 0
}
|
import { Field } from '@nestjs/graphql';
import { InputType } from '@nestjs/graphql';
import { StringFieldUpdateOperationsInput } from '../prisma/string-field-update-operations.input';
import { PostUpdateManyWithoutAuthorInput } from '../post/post-update-many-without-author.input';
@InputType()
export class UserUpdateInput {
@Field(() => StringFieldUpdateOperationsInput, {nullable:true})
id?: StringFieldUpdateOperationsInput;
@Field(() => StringFieldUpdateOperationsInput, {nullable:true})
email?: StringFieldUpdateOperationsInput;
@Field(() => StringFieldUpdateOperationsInput, {nullable:true})
name?: StringFieldUpdateOperationsInput;
@Field(() => StringFieldUpdateOperationsInput, {nullable:true})
password?: StringFieldUpdateOperationsInput;
@Field(() => PostUpdateManyWithoutAuthorInput, {nullable:true})
posts?: PostUpdateManyWithoutAuthorInput;
}
|
<filename>src/elements/FlowField.ts<gh_stars>0
import { IFlowField } from './ElementInterface';
import { ICanvasState } from '../components/Canvas/CanvasInterfaces';
import { getCoordinateAfterRotation } from '../utils/math';
export default class FlowField implements IFlowField {
private fields: nj.NdArray[][] = [[]];
width: number;
height: number;
resolution: number;
showFields: boolean = false;
constructor(width: number, height: number, resolution: number, fieldFunction: (width: number, height: number, resolution: number)=>nj.NdArray[][], showFields: boolean) {
this.width = width;
this.height = height;
this.resolution = resolution;
this.fields = fieldFunction(width, height, resolution);
this.showFields = showFields || this.showFields;
}
getField(location: nj.NdArray): nj.NdArray {
const row = Math.floor(location.get(0) / this.resolution);
const col = Math.floor(location.get(1) / this.resolution);
return this.fields[Math.min(Math.max(0, row), this.fields.length - 1)][Math.min(Math.max(0, col), this.fields[0].length - 1)];
}
display(state: Readonly<ICanvasState>) {
if (this.showFields && state.ctx) {
for (let i = 0; i < this.fields.length; i++) {
for (let j = 0; j < this.fields[i].length; j++) {
const field = this.fields[i][j];
const centerX = this.resolution * i + this.resolution / 2;
const centerY = this.resolution * j + this.resolution / 2;
const angle = Math.atan2(field.get(1), field.get(0));
const magnitude = this.resolution / 3;
const [cx, cy] = getCoordinateAfterRotation(centerX, centerY, angle);
state.ctx.beginPath();
state.ctx.rotate(angle);
state.ctx.moveTo(cx - magnitude, cy);
state.ctx.lineTo(cx + magnitude, cy);
state.ctx.lineTo(cx + magnitude - 5, cy + 5);
state.ctx.stroke();
state.ctx.beginPath();
state.ctx.moveTo(cx + magnitude, cy);
state.ctx.lineTo(cx + magnitude - 5, cy - 5);
state.ctx.stroke();
state.ctx.resetTransform();
}
}
}
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
mkdir -p "${DWARF_DSYM_FOLDER_PATH}"
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/DBDebugToolkit/DBDebugToolkit.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/DBDebugToolkit/DBDebugToolkit.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.mobilestartup.services
import cats.implicits._
import play.api.libs.json.Json._
import play.api.libs.json.{JsObject, JsString, JsValue, Json}
import uk.gov.hmrc.http.{HeaderCarrier, HttpReads}
import uk.gov.hmrc.mobilestartup.connectors.GenericConnector
import uk.gov.hmrc.mobilestartup.model.{CidPerson, EnrolmentStoreResponse}
import uk.gov.hmrc.mobilestartup.{BaseSpec, TestF}
class StartupServiceImplSpec extends BaseSpec with TestF {
private val helpToSave = "helpToSave"
private val taxCreditsRenewals = "taxCreditRenewals"
private val messages = "messages"
private val user = "user"
private val successfulResponse = JsString("success")
private val htsSuccessResponse: JsValue = successfulResponse
private val tcrSuccessResponse: JsValue = obj("submissionsState" -> "open")
private val messagesSuccessResponse: JsValue = Json.parse("""{
| "paye": [
| {
| "type": "Info",
| "id": "paye-message-1",
| "headline": "Title2 - Has active date",
| "content": {
| "title": "title Content title",
| "body": "Content2"
| },
| "activeWindow": {
| "startTime": "2020-03-01T20:06:12.726",
| "endTime": "2020-05-24T20:06:12.726"
| }
| }
| ],
| "tc": [
| {
| "type": "Info",
| "id": "tc-message-1",
| "headline": "Title2 - Has active date",
| "content": {
| "title": "title Content title",
| "body": "Content2"
| },
| "activeWindow": {
| "startTime": "2020-03-01T20:06:12.726",
| "endTime": "2020-05-24T20:06:12.726"
| }
| }
| ],
| "hts": [
| {
| "type": "Warning",
| "id": "hts-message-1",
| "headline": "Title3",
| "content": {
| "body": "Content3"
| },
| "link": {
| "url": "URL3",
| "urlType": "Normal",
| "type": "Secondary",
| "message": "Click me"
| }
| },
| {
| "type": "Urgent",
| "id": "hts-message-2",
| "headline": "Title4",
| "content": {
| "body": "Content4"
| },
| "link": {
| "url": "URL4",
| "urlType": "Normal",
| "type": "Secondary",
| "message": "Click me"
| }
| }
| ]
|}
|""".stripMargin)
private val citizenDetailsSuccessResponse: JsValue = Json.parse("""{
| "person": {
| "firstName": "Angus",
| "middleName": "John",
| "lastName": "Smith",
| "title": "Mr",
| "honours": null,
| "sex": "M",
| "dateOfBirth": -26092800000,
| "nino": "AA000006C"
| },
| "address": {
| "line1": "123456",
| "line2": "23456",
| "line3": "3456",
| "line4": "456",
| "line5": "55555",
| "postcode": "98765",
| "startDate": 946684800000,
| "country": "Test Country",
| "type": "Residential"
| },
| "correspondenceAddress": {
| "line1": "1 Main Street",
| "line2": "Central",
| "line3": "Anothertown",
| "line4": "Anothershire",
| "line5": "Anotherline",
| "postcode": "AA1 1AA",
| "startDate": 1341100800000,
| "country": null,
| "type": "Correspondence"
| }
| }
|""".stripMargin)
private val userExpectedResponse: JsValue = Json.parse("""{
| "name": "<NAME>"
| }
|""".stripMargin)
private def dummyConnector(
htsResponse: TestF[JsValue] = htsSuccessResponse.pure[TestF],
tcrResponse: TestF[JsValue] = tcrSuccessResponse.pure[TestF],
inAppMessagesResponse: TestF[JsValue] = messagesSuccessResponse.pure[TestF],
citizenDetailsResponse: TestF[JsValue] = citizenDetailsSuccessResponse.pure[TestF]
): GenericConnector[TestF] =
new GenericConnector[TestF] {
override def doGet(
serviceName: String,
path: String,
hc: HeaderCarrier
): TestF[JsValue] =
serviceName match {
case "mobile-help-to-save" => htsResponse
case "mobile-tax-credits-renewal" => tcrResponse
case "mobile-in-app-messages" => inAppMessagesResponse
case "citizen-details" => citizenDetailsResponse
case _ => obj().pure[TestF]
}
override def cidGet(
serviceName: String,
path: String,
hc: HeaderCarrier
): TestF[CidPerson] = ???
override def enrolmentStoreGet(
serviceName: String,
path: String,
hc: HeaderCarrier
): TestF[EnrolmentStoreResponse] = ???
override def doPost[T](
json: JsValue,
serviceName: String,
path: String,
hc: HeaderCarrier
)(implicit rds: HttpReads[T]
): TestF[T] = ???
}
"a fully successful response" should {
"contain success entries for each service" in {
val sut = new StartupServiceImpl[TestF](dummyConnector(),
userPanelSignUp = false,
enablePushNotificationTokenRegistration = false,
enablePaperlessAlertDialogs = false,
enablePaperlessAdverts = false,
enableHtsAdverts = false,
enableAnnualTaxSummaryLink = false)
val result: JsObject = sut.startup("nino", journeyId)(HeaderCarrier()).unsafeGet
(result \ helpToSave).toOption.value shouldBe htsSuccessResponse
(result \ taxCreditsRenewals).toOption.value shouldBe tcrSuccessResponse
(result \ "feature").get
.as[List[FeatureFlag]] shouldBe List(
FeatureFlag("userPanelSignUp", enabled = false),
FeatureFlag("enablePushNotificationTokenRegistration", enabled = false),
FeatureFlag("paperlessAlertDialogs", enabled = false),
FeatureFlag("paperlessAdverts", enabled = false),
FeatureFlag("htsAdverts", enabled = false),
FeatureFlag("annualTaxSummaryLink", enabled = false)
)
(result \ messages).toOption.value shouldBe messagesSuccessResponse
(result \ user).toOption.value shouldBe userExpectedResponse
}
}
"a response" should {
"not contain an entry for help-to-save when the hts call fails" in {
val sut = new StartupServiceImpl[TestF](dummyConnector(htsResponse = new Exception("hts failed").error),
false,
enablePushNotificationTokenRegistration = false,
enablePaperlessAlertDialogs = false,
enablePaperlessAdverts = false,
enableHtsAdverts = false,
enableAnnualTaxSummaryLink = false)
val result: JsObject = sut.startup("nino", journeyId)(HeaderCarrier()).unsafeGet
(result \ helpToSave).toOption shouldBe None
(result \ taxCreditsRenewals).toOption.value shouldBe tcrSuccessResponse
(result \ "feature").get
.as[List[FeatureFlag]] shouldBe List(
FeatureFlag("userPanelSignUp", enabled = false),
FeatureFlag("enablePushNotificationTokenRegistration", enabled = false),
FeatureFlag("paperlessAlertDialogs", enabled = false),
FeatureFlag("paperlessAdverts", enabled = false),
FeatureFlag("htsAdverts", enabled = false),
FeatureFlag("annualTaxSummaryLink", enabled = false)
)
(result \ messages).toOption.value shouldBe messagesSuccessResponse
(result \ user).toOption.value shouldBe userExpectedResponse
}
"contain an error entry for tcr when the tcr call fails" in {
val sut = new StartupServiceImpl[TestF](dummyConnector(tcrResponse = new Exception("tcr failed").error),
false,
enablePushNotificationTokenRegistration = false,
enablePaperlessAlertDialogs = false,
enablePaperlessAdverts = false,
enableHtsAdverts = false,
enableAnnualTaxSummaryLink = false)
val result: JsObject = sut.startup("nino", journeyId)(HeaderCarrier()).unsafeGet
(result \ helpToSave).toOption.value shouldBe htsSuccessResponse
(result \ taxCreditsRenewals).toOption.value shouldBe obj("submissionsState" -> "error")
(result \ "feature").get
.as[List[FeatureFlag]] shouldBe List(
FeatureFlag("userPanelSignUp", enabled = false),
FeatureFlag("enablePushNotificationTokenRegistration", enabled = false),
FeatureFlag("paperlessAlertDialogs", enabled = false),
FeatureFlag("paperlessAdverts", enabled = false),
FeatureFlag("htsAdverts", enabled = false),
FeatureFlag("annualTaxSummaryLink", enabled = false)
)
(result \ messages).toOption.value shouldBe messagesSuccessResponse
(result \ user).toOption.value shouldBe userExpectedResponse
}
"contain an empty lists entry for messages when the messages call fails" in {
val sut = new StartupServiceImpl[TestF](
dummyConnector(inAppMessagesResponse = new Exception("message call failed").error),
false,
enablePushNotificationTokenRegistration = false,
enablePaperlessAlertDialogs = false,
enablePaperlessAdverts = false,
enableHtsAdverts = false,
enableAnnualTaxSummaryLink = false)
val result: JsObject = sut.startup("nino", journeyId)(HeaderCarrier()).unsafeGet
(result \ helpToSave).toOption.value shouldBe htsSuccessResponse
(result \ taxCreditsRenewals).toOption.value shouldBe obj("submissionsState" -> "open")
(result \ "feature").get
.as[List[FeatureFlag]] shouldBe List(
FeatureFlag("userPanelSignUp", enabled = false),
FeatureFlag("enablePushNotificationTokenRegistration", enabled = false),
FeatureFlag("paperlessAlertDialogs", enabled = false),
FeatureFlag("paperlessAdverts", enabled = false),
FeatureFlag("htsAdverts", enabled = false),
FeatureFlag("annualTaxSummaryLink", enabled = false)
)
(result \ messages).toOption.value shouldBe Json.parse("""{
| "paye": [],
| "tc": [],
| "hts": [],
| "tcp": []
|}
|""".stripMargin)
(result \ user).toOption.value shouldBe userExpectedResponse
}
"not contain an entry for user when the citizen details call fails" in {
val sut =
new StartupServiceImpl[TestF](dummyConnector(citizenDetailsResponse = new Exception("cid failed").error),
false,
enablePushNotificationTokenRegistration = false,
enablePaperlessAlertDialogs = false,
enablePaperlessAdverts = false,
enableHtsAdverts = false,
enableAnnualTaxSummaryLink = false)
val result: JsObject = sut.startup("nino", journeyId)(HeaderCarrier()).unsafeGet
(result \ helpToSave).toOption.value shouldBe htsSuccessResponse
(result \ taxCreditsRenewals).toOption.value shouldBe tcrSuccessResponse
(result \ "feature").get
.as[List[FeatureFlag]] shouldBe List(
FeatureFlag("userPanelSignUp", enabled = false),
FeatureFlag("enablePushNotificationTokenRegistration", enabled = false),
FeatureFlag("paperlessAlertDialogs", enabled = false),
FeatureFlag("paperlessAdverts", enabled = false),
FeatureFlag("htsAdverts", enabled = false),
FeatureFlag("annualTaxSummaryLink", enabled = false)
)
(result \ messages).toOption.value shouldBe messagesSuccessResponse
(result \ user).toOption shouldBe None
}
}
}
|
<reponame>path64/assembler
//
// ELF object format
//
// Copyright (C) 2003-2007 <NAME>
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND OTHER CONTRIBUTORS ``AS IS''
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
// Notes
//
// elf-objfmt uses the "linking" view of an ELF file:
// ELF header, an optional program header table, several sections,
// and a section header table
//
// The ELF header tells us some overall program information,
// where to find the PHT (if it exists) with phnum and phentsize,
// and where to find the SHT with shnum and shentsize
//
// The PHT doesn't seem to be generated by NASM for elftest.asm
//
// The SHT
//
// Each Section is spatially disjoint, and has exactly one SHT entry.
//
#include "ElfObject.h"
#ifdef __FreeBSD__
#include <sys/param.h>
#endif
#include "llvm/ADT/SmallString.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/raw_ostream.h"
#include "yasmx/Basic/Diagnostic.h"
#include "yasmx/Basic/SourceManager.h"
#include "yasmx/Parse/Directive.h"
#include "yasmx/Parse/DirHelpers.h"
#include "yasmx/Parse/NameValue.h"
#include "yasmx/Support/bitcount.h"
#include "yasmx/Support/registry.h"
#include "yasmx/Support/scoped_array.h"
#include "yasmx/Arch.h"
#include "yasmx/BytecodeContainer.h"
#include "yasmx/BytecodeOutput.h"
#include "yasmx/Bytecode.h"
#include "yasmx/Bytes_util.h"
#include "yasmx/DebugFormat.h"
#include "yasmx/Expr_util.h"
#include "yasmx/Location_util.h"
#include "yasmx/Object.h"
#include "yasmx/Object_util.h"
#include "yasmx/Section.h"
#include "yasmx/StringTable.h"
#include "yasmx/Symbol_util.h"
#include "ElfMachine.h"
#include "ElfReloc.h"
#include "ElfSection.h"
#include "ElfSymbol.h"
#include "ElfTypes.h"
using namespace yasm;
using namespace yasm::objfmt;
static inline bool
isLocal(const Symbol& sym)
{
int vis = sym.getVisibility();
return (vis == Symbol::LOCAL || (vis & Symbol::DLOCAL) != 0);
}
static inline bool
byIndex(const Symbol& s1, const Symbol& s2)
{
const ElfSymbol* e1 = s1.getAssocData<ElfSymbol>();
const ElfSymbol* e2 = s2.getAssocData<ElfSymbol>();
if (e1 && !e2)
return true;
if (!e1 && e2)
return false;
if (!e1 && !e2)
return false; // doesn't matter
return e1->getSymbolIndex() < e2->getSymbolIndex();
}
ElfGroup::ElfGroup()
: flags(0)
{
}
ElfGroup::~ElfGroup()
{
}
ElfObject::ElfObject(const ObjectFormatModule& module,
Object& object,
unsigned int bits)
: ObjectFormat(module, object)
, m_machine(0)
, m_file_elfsym(0)
, m_dotdotsym(0)
, m_symvers_owner(m_symvers)
, m_groups_owner(m_groups)
{
if (bits == 32)
m_config.cls = ELFCLASS32;
else if (bits == 64)
m_config.cls = ELFCLASS64;
else
{
assert(bits == 0 && "unknown ELF bits setting");
m_config.cls = ELFCLASS32;
}
m_machine.reset(CreateElfMachine(*m_object.getArch(),
m_config.cls).release());
m_machine->Configure(&m_config);
}
ElfObject::~ElfObject()
{
}
Elf32Object::~Elf32Object()
{
}
bool
Elf32Object::isOkObject(Object& object)
{
return isOkElfMachine(*object.getArch(), ELFCLASS32);
}
Elf64Object::~Elf64Object()
{
}
bool
Elf64Object::isOkObject(Object& object)
{
return isOkElfMachine(*object.getArch(), ELFCLASS64);
}
Elfx32Object::~Elfx32Object()
{
}
bool
Elfx32Object::isOkObject(Object& object)
{
return isOkElfMachine(*object.getArch(), ELFCLASS32);
}
bool
Elf32Object::Taste(const MemoryBuffer& in,
/*@out@*/ std::string* arch_keyword,
/*@out@*/ std::string* machine)
{
ElfConfig config;
if (!config.ReadProgramHeader(in))
return false;
if (config.cls != ELFCLASS32)
return false;
if (config.machine_type == EM_386)
{
arch_keyword->assign("x86");
machine->assign("x86");
return true;
}
return false;
}
bool
Elf64Object::Taste(const MemoryBuffer& in,
/*@out@*/ std::string* arch_keyword,
/*@out@*/ std::string* machine)
{
ElfConfig config;
if (!config.ReadProgramHeader(in))
return false;
if (config.cls != ELFCLASS64)
return false;
if (config.machine_type == EM_X86_64)
{
arch_keyword->assign("x86");
machine->assign("amd64");
return true;
}
return false;
}
bool
Elfx32Object::Taste(const MemoryBuffer& in,
/*@out@*/ std::string* arch_keyword,
/*@out@*/ std::string* machine)
{
ElfConfig config;
if (!config.ReadProgramHeader(in))
return false;
if (config.cls != ELFCLASS32)
return false;
if (config.machine_type == EM_X86_64)
{
arch_keyword->assign("x86");
machine->assign("amd64");
return true;
}
return false;
}
static inline bool
LoadStringTable(StringTable* strtab,
const MemoryBuffer& in,
const ElfSection& elfsect,
DiagnosticsEngine& diags)
{
size_t start = elfsect.getFileOffset();
size_t size = elfsect.getSize().getUInt();
StringRef buf = in.getBuffer().substr(start, size);
if (buf.size() < size)
{
diags.Report(SourceLocation(), diag::err_string_table_unreadable);
return false;
}
strtab->Read(buf);
return true;
}
bool
ElfObject::Read(SourceManager& sm, DiagnosticsEngine& diags)
{
const MemoryBuffer& in = *sm.getBuffer(sm.getMainFileID());
// Read header
if (!m_config.ReadProgramHeader(in))
{
diags.Report(SourceLocation(), diag::err_not_file_type) << "ELF";
return false;
}
// Can't handle files without section table yet
if (m_config.secthead_pos == 0)
{
diags.Report(SourceLocation(), diag::err_no_section_table);
return false;
}
// Read section string table (needed for section names)
std::auto_ptr<ElfSection>
shstrtab_sect(new ElfSection(m_config, in, m_config.shstrtab_index,
diags));
if (diags.hasErrorOccurred())
return false;
StringTable shstrtab;
if (!LoadStringTable(&shstrtab, in, *shstrtab_sect, diags))
return false;
// Read all section headers
// owned storage for "misc" sections (e.g. relocation sections)
stdx::ptr_vector<ElfSection> misc_sections;
stdx::ptr_vector_owner<ElfSection> misc_sections_owner(misc_sections);
misc_sections.reserve(m_config.secthead_count);
// indexed array of all ElfSections by section index
util::scoped_array<ElfSection*>
elfsects(new ElfSection*[m_config.secthead_count]);
// indexed array of all Sections by section index
util::scoped_array<Section*>
sections(new Section*[m_config.secthead_count]);
// special sections
ElfSection* strtab_sect = 0;
ElfSection* symtab_sect = 0;
// read section headers
for (unsigned int i=0; i<m_config.secthead_count; ++i)
{
// read section header and save by index
std::auto_ptr<ElfSection> elfsect(new ElfSection(m_config, in, i,
diags));
if (diags.hasErrorOccurred())
return false;
elfsects[i] = elfsect.get();
StringRef sectname = shstrtab.getString(elfsect->getName());
if (sectname == ".strtab")
{
strtab_sect = elfsect.get();
}
else if (sectname == ".symtab")
{
symtab_sect = elfsect.get();
}
ElfSectionType secttype = elfsect->getType();
if (secttype == SHT_NULL ||
secttype == SHT_SYMTAB ||
secttype == SHT_STRTAB ||
secttype == SHT_RELA ||
secttype == SHT_REL)
{
misc_sections.push_back(elfsect.release());
sections[i] = 0;
// try to pick these up by section type if not set
if (secttype == SHT_SYMTAB && symtab_sect == 0)
symtab_sect = elfsect.get();
else if (secttype == SHT_STRTAB && strtab_sect == 0)
strtab_sect = elfsect.get();
// if any section is RELA, set config to RELA
if (secttype == SHT_RELA)
m_config.rela = true;
}
else
{
std::auto_ptr<Section> section = elfsect->CreateSection(shstrtab);
if (!elfsect->LoadSectionData(*section, in, diags))
return false;
sections[i] = section.get();
// Associate section data with section
section->AddAssocData(elfsect);
// Add section to object
m_object.AppendSection(section);
}
}
// Symbol table by index (needed for relocation lookups by index)
std::vector<SymbolRef> symtab;
// read symtab string table and symbol table (if present)
if (symtab_sect != 0)
{
// get string table section index from symtab link field if reasonable
ElfSectionIndex link = symtab_sect->getLink();
if (link < m_config.secthead_count &&
elfsects[link]->getType() == SHT_STRTAB)
strtab_sect = elfsects[link];
if (strtab_sect == 0)
{
diags.Report(SourceLocation(), diag::err_no_symbol_string_table);
return false;
}
// load symbol string table
StringTable strtab;
if (!LoadStringTable(&strtab, in, *strtab_sect, diags))
return false;
// load symbol table
if (!m_config.ReadSymbolTable(in, *symtab_sect, symtab, m_object,
strtab, §ions[0], diags))
return false;
}
// go through misc sections to load relocations
for (unsigned int i=0; i<m_config.secthead_count; ++i)
{
ElfSection* reloc_sect = elfsects[i];
ElfSectionType secttype = reloc_sect->getType();
if (secttype != SHT_REL && secttype != SHT_RELA)
continue;
// get symbol table section index from link field (if valid)
ElfSection* rel_symtab_sect = symtab_sect;
ElfSectionIndex link = reloc_sect->getLink();
if (link < m_config.secthead_count &&
elfsects[link]->getType() == SHT_SYMTAB)
{
if (rel_symtab_sect != elfsects[link])
{
diags.Report(SourceLocation(),
diag::err_multiple_symbol_tables);
return false;
}
}
// section relocs apply to is indicated by info field
ElfSectionIndex info = reloc_sect->getInfo();
if (link >= m_config.secthead_count || sections[info] == 0)
continue;
// load relocations
elfsects[info]->ReadRelocs(in, *reloc_sect, *sections[info],
*m_machine, symtab, secttype == SHT_RELA);
}
return true;
}
void
ElfObject::InitSymbols(StringRef parser)
{
// Set object options
m_object.getOptions().DisableGlobalSubRelative = true;
// Add .file symbol
SymbolRef filesym = m_object.AppendSymbol(".file");
filesym->DefineSpecial(Symbol::LOCAL);
std::auto_ptr<ElfSymbol> elfsym(new ElfSymbol());
elfsym->setSectionIndex(SHN_ABS);
elfsym->setBinding(STB_LOCAL);
elfsym->setType(STT_FILE);
elfsym->setSymbolIndex(1); // by convention
m_file_elfsym = elfsym.get();
filesym->AddAssocData(elfsym);
// Create ..sym special symbol (NASM only)
if (parser.equals_lower("nasm"))
{
m_dotdotsym = m_object.AddSpecialSymbol("sym");
m_dotdotsym->DefineSpecial(Symbol::EXTERN);
}
// Create machine-specific special symbols
m_machine->AddSpecialSymbols(m_object, parser);
}
ElfSymbol&
ElfObject::BuildSymbol(Symbol& sym)
{
ElfSymbol* elfsym = sym.getAssocData<ElfSymbol>();
if (!elfsym)
{
elfsym = new ElfSymbol;
sym.AddAssocData(std::auto_ptr<ElfSymbol>(elfsym));
}
return *elfsym;
}
void
ElfObject::BuildExtern(Symbol& sym, DiagnosticsEngine& diags)
{
const NameValues* objext_nvs = getObjextNameValues(sym);
if (objext_nvs)
{
for (NameValues::const_iterator nv=objext_nvs->begin(),
end=objext_nvs->end(); nv != end; ++nv)
{
if (nv->isString())
{
diags.Report(nv->getValueRange().getBegin(),
diag::warn_unrecognized_symbol_type)
<< nv->getString();
return;
}
}
}
ElfSymbol& elfsym = BuildSymbol(sym);
if (elfsym.getBinding() == STB_LOCAL)
elfsym.setBinding(STB_GLOBAL);
}
static bool
GlobalNameValueFallback(NameValue& nv,
SourceLocation dir_source,
DiagnosticsEngine& diags,
Object* object,
Expr::Ptr* size)
{
if (!nv.isExpr() && nv.isId())
{
diags.Report(nv.getValueRange().getBegin(),
diag::warn_unrecognized_symbol_type)
<< nv.getId();
return true;
}
else if (nv.isExpr() && size->get() == 0)
{
*size = nv.ReleaseExpr(*object);
return true;
}
else
return DirNameValueWarn(nv, dir_source, diags);
}
static inline void
GlobalSetVis(NameValue& nv,
DiagnosticsEngine& diags,
ElfSymbolVis* vis_out,
unsigned int* vis_count,
SourceLocation* vis_source,
ElfSymbolVis vis)
{
*vis_out = vis;
*vis_count = *vis_count + 1;
*vis_source = nv.getValueRange().getBegin();
}
void
ElfObject::BuildGlobal(Symbol& sym, DiagnosticsEngine& diags)
{
Expr::Ptr size(0);
unsigned long type = STT_NOTYPE; // ElfSymbolType
unsigned int nvis = 0;
SourceLocation vis_source;
ElfSymbol& elfsym = BuildSymbol(sym);
ElfSymbolVis vis = elfsym.getVisibility();
DirHelpers helpers;
helpers.Add("function", false,
TR1::bind(&DirResetFlag, _1, _2, &type, STT_FUNC));
helpers.Add("data", false,
TR1::bind(&DirResetFlag, _1, _2, &type, STT_OBJECT));
helpers.Add("object", false,
TR1::bind(&DirResetFlag, _1, _2, &type, STT_OBJECT));
helpers.Add("object", false,
TR1::bind(&DirResetFlag, _1, _2, &type, STT_OBJECT));
helpers.Add("internal", false,
TR1::bind(&GlobalSetVis, _1, _2, &vis, &nvis, &vis_source,
STV_INTERNAL));
helpers.Add("hidden", false,
TR1::bind(&GlobalSetVis, _1, _2, &vis, &nvis, &vis_source,
STV_HIDDEN));
helpers.Add("protected", false,
TR1::bind(&GlobalSetVis, _1, _2, &vis, &nvis, &vis_source,
STV_PROTECTED));
NameValues* objext_nvs = getObjextNameValues(sym);
if (objext_nvs)
{
helpers(objext_nvs->begin(), objext_nvs->end(), sym.getDeclSource(),
diags, TR1::bind(&GlobalNameValueFallback, _1, _2, _3,
&m_object, &size));
}
if (nvis > 1)
diags.Report(vis_source, diag::warn_multiple_symbol_visibility);
if (elfsym.getBinding() == STB_LOCAL)
elfsym.setBinding(STB_GLOBAL);
if (!elfsym.hasType())
elfsym.setType(static_cast<ElfSymbolType>(type));
elfsym.setVisibility(vis);
if (size.get() != 0 && !elfsym.hasSize())
elfsym.setSize(*size, sym.getDeclSource());
}
void
ElfObject::BuildCommon(Symbol& sym, DiagnosticsEngine& diags)
{
NameValues* objext_nvs = getObjextNameValues(sym);
bool has_align = false;
unsigned long addralign = 0;
if (objext_nvs)
{
for (NameValues::iterator nv=objext_nvs->begin(),
end=objext_nvs->end(); nv != end; ++nv)
{
if (!nv->getName().empty())
{
diags.Report(nv->getNameSource(),
diag::warn_unrecognized_qualifier);
continue;
}
if (!nv->isExpr())
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_align_not_integer);
return;
}
std::auto_ptr<Expr> align_expr = nv->ReleaseExpr(m_object);
if (!align_expr->isIntNum())
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_align_not_integer);
return;
}
addralign = align_expr->getIntNum().getUInt();
has_align = true;
// Alignments must be a power of two.
if (!isExp2(addralign))
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_value_power2);
return;
}
}
}
ElfSymbol& elfsym = BuildSymbol(sym);
elfsym.setSectionIndex(SHN_COMMON);
if (elfsym.getBinding() == STB_LOCAL)
elfsym.setBinding(STB_GLOBAL);
if (!elfsym.hasSize())
elfsym.setSize(*getCommonSize(sym), sym.getDeclSource());
if (!elfsym.hasType())
elfsym.setType(STT_OBJECT);
if (!has_align)
{
Expr size = elfsym.getSize();
if (!size.isEmpty())
{
if (!ExpandEqu(size))
{
diags.Report(elfsym.getSizeSource(),
diag::err_equ_circular_reference);
return;
}
SimplifyCalcDist(size, diags);
if (size.isIntNum())
{
unsigned long sz = size.getIntNum().getUInt();
// set alignment to largest power of two <= size, up to 16.
if (sz < 2)
addralign = 1;
else if (sz < 4)
addralign = 2;
else if (sz < 8)
addralign = 4;
else if (sz < 16)
addralign = 8;
else
addralign = 16;
}
else
diags.Report(elfsym.getSizeSource(), diag::err_size_integer);
}
}
elfsym.setValue(addralign);
}
void
ElfObject::setSymbolSectionValue(Symbol& sym, ElfSymbol& elfsym)
{
Location loc;
if (!sym.getLabel(&loc))
return;
if (loc.bc)
{
elfsym.setSection(loc.bc->getContainer()->getSection());
elfsym.setValue(loc.getOffset());
}
}
void
ElfObject::FinalizeSymbol(Symbol& sym,
StringTable& strtab,
bool local_names,
DiagnosticsEngine& diags)
{
int vis = sym.getVisibility();
ElfSymbol* elfsym = sym.getAssocData<ElfSymbol>();
if (vis & Symbol::EXTERN)
{
BuildExtern(sym, diags);
elfsym = sym.getAssocData<ElfSymbol>();
}
else if (vis & Symbol::COMMON)
{
BuildCommon(sym, diags);
elfsym = sym.getAssocData<ElfSymbol>();
}
if (sym.isDefined())
{
if ((vis & Symbol::COMMON) == 0 && (vis & Symbol::GLOBAL) != 0)
{
BuildGlobal(sym, diags);
elfsym = sym.getAssocData<ElfSymbol>();
}
if (!elfsym)
{
Location loc = {0, 0};
if (!sym.getLabel(&loc))
{
if (!sym.getEqu() && !sym.isAbsoluteSymbol())
return;
}
Section* sect = 0;
if (loc.bc)
sect = loc.bc->getContainer()->getSection();
// Locals (except when debugging) do not need to be
// in the symbol table, unless they're a section.
bool is_sect = false;
if (sect)
{
ElfSection* elfsect = sect->getAssocData<ElfSection>();
if (elfsect && sect->getSymbol() == &sym)
is_sect = true;
}
if (!is_sect)
{
if (!local_names)
return;
// GCC names its internal symbols .Lxxxx; follow gas' lead and
// don't output these symbols even if local_names is enabled.
StringRef name = sym.getName();
if (name.size() > 2 && name[0] == '.' && name[1] == 'L')
return;
// Don't output GAS parser local labels.
if (name.size() > 1 && name[0] == 'L' &&
name.rfind('\001') != StringRef::npos)
return;
}
elfsym = &BuildSymbol(sym);
if (is_sect)
elfsym->setType(STT_SECTION);
}
setSymbolSectionValue(sym, *elfsym);
}
if (!elfsym)
return;
elfsym->Finalize(sym, diags);
if (elfsym->isInTable() && !elfsym->hasName() &&
elfsym->getType() != STT_SECTION)
elfsym->setName(strtab.getIndex(sym.getName()));
}
namespace {
class ElfOutput : public BytecodeStreamOutput
{
public:
ElfOutput(raw_fd_ostream& os,
ElfObject& objfmt,
Object& object,
DiagnosticsEngine& diags);
~ElfOutput();
void OutputGroup(ElfGroup& group);
void OutputSection(Section& sect, StringTable& shstrtab);
// OutputBytecode overrides
bool ConvertValueToBytes(Value& value,
Location loc,
NumericOutput& num_out);
bool ConvertSymbolToBytes(SymbolRef sym,
Location loc,
NumericOutput& num_out);
bool NeedsGOT() const { return m_needs_GOT; }
private:
ElfObject& m_objfmt;
Object& m_object;
raw_fd_ostream& m_fd_os;
BytecodeNoOutput m_no_output;
SymbolRef m_GOT_sym;
bool m_needs_GOT;
};
} // anonymous namespace
ElfOutput::ElfOutput(raw_fd_ostream& os,
ElfObject& objfmt,
Object& object,
DiagnosticsEngine& diags)
: BytecodeStreamOutput(os, diags)
, m_objfmt(objfmt)
, m_object(object)
, m_fd_os(os)
, m_no_output(diags)
, m_GOT_sym(object.FindSymbol("_GLOBAL_OFFSET_TABLE_"))
, m_needs_GOT(false)
{
}
ElfOutput::~ElfOutput()
{
}
bool
ElfOutput::ConvertSymbolToBytes(SymbolRef sym,
Location loc,
NumericOutput& num_out)
{
std::auto_ptr<ElfReloc> reloc =
m_objfmt.m_machine->MakeReloc(sym, loc.getOffset());
if (reloc->setRel(false, m_GOT_sym, num_out.getSize(), false))
{
// allocate .rel[a] sections on a need-basis
Section* sect = loc.bc->getContainer()->getSection();
sect->AddReloc(std::auto_ptr<Reloc>(reloc.release()));
}
else
{
Diag(num_out.getSource(), diag::err_reloc_invalid_size);
}
m_object.getArch()->setEndian(num_out.getBytes());
num_out.OutputInteger(0);
return true;
}
bool
ElfOutput::ConvertValueToBytes(Value& value,
Location loc,
NumericOutput& num_out)
{
m_object.getArch()->setEndian(num_out.getBytes());
IntNum intn(0);
if (value.OutputBasic(num_out, &intn, getDiagnostics()))
return true;
if (value.isRelative())
{
// We can't handle these types of values
if (value.isSegOf() || value.isSectionRelative() ||
value.getRShift() > 0 || value.getShift() > 0)
{
Diag(value.getSource().getBegin(), diag::err_reloc_too_complex);
return false;
}
SymbolRef sym = value.getRelative();
SymbolRef wrt = value.getWRT();
// create GOT symbol if we don't already have one.
if (wrt && !m_GOT_sym && isWRTElfNeedsGOT(*wrt))
m_needs_GOT = true;
if (wrt && wrt == m_objfmt.m_dotdotsym)
wrt = SymbolRef(0);
else if (wrt && isWRTElfSymRelative(*wrt))
;
else if (wrt && isWRTElfPosAdjusted(*wrt))
intn += loc.getOffset();
else if (isLocal(*sym))
{
// Local symbols need relocation to their section's start, and
// add in the offset of the bytecode (within the target section)
// into the abs portion.
//
// This is only done if the symbol is relocated against the
// section instead of the symbol itself.
// Also, if the symbol's section has the merge flag set, we can't
// relocate against the section.
Location symloc;
if (sym->getLabel(&symloc))
{
Section* sym_sect = symloc.bc->getContainer()->getSection();
ElfSection* elfsect = sym_sect->getAssocData<ElfSection>();
if ((elfsect->getFlags() & SHF_MERGE) == 0)
{
// Relocate to section start
sym = sym_sect->getSymbol();
intn += symloc.getOffset();
}
}
}
bool pc_rel = false;
IntNum intn2;
if (value.CalcPCRelSub(&intn2, loc))
{
// Create PC-relative relocation type and fix up absolute portion.
pc_rel = true;
intn += intn2;
}
else if (value.hasSubRelative())
{
Diag(value.getSource().getBegin(), diag::err_reloc_too_complex);
return false;
}
// Create relocation
Section* sect = loc.bc->getContainer()->getSection();
std::auto_ptr<ElfReloc> reloc =
m_objfmt.m_machine->MakeReloc(sym, loc.getOffset());
if (wrt)
{
if (!reloc->setWrt(wrt, value.getSize()))
{
Diag(value.getSource().getBegin(), diag::err_invalid_wrt);
}
}
else
{
if (!reloc->setRel(pc_rel, m_GOT_sym, value.getSize(),
value.isSigned()))
{
Diag(value.getSource().getBegin(),
diag::err_reloc_invalid_size);
}
}
if (reloc->isValid())
{
reloc->HandleAddend(&intn, m_objfmt.m_config, value.getInsnStart());
sect->AddReloc(std::auto_ptr<Reloc>(reloc.release()));
}
}
num_out.OutputInteger(intn);
return true;
}
void
ElfOutput::OutputGroup(ElfGroup& group)
{
uint64_t pos = m_os.tell();
if (m_os.has_error())
{
Diag(SourceLocation(), diag::err_file_output_position);
return;
}
m_fd_os.seek(group.elfsect->setFileOffset(pos));
if (m_os.has_error())
{
Diag(SourceLocation(), diag::err_file_output_seek);
return;
}
Bytes& scratch = getScratch();
m_objfmt.m_config.setEndian(scratch);
// sort and uniquify sections before output
std::sort(group.sects.begin(), group.sects.end());
std::vector<Section*>::iterator it =
std::unique(group.sects.begin(), group.sects.end());
group.sects.resize(it - group.sects.begin());
Write32(scratch, group.flags);
for (std::vector<Section*>::const_iterator i=group.sects.begin(),
end=group.sects.end(); i != end; ++i)
{
ElfSection* elfsect = (*i)->getAssocData<ElfSection>();
assert(elfsect != 0);
Write32(scratch, elfsect->getIndex());
}
group.elfsect->setSize(scratch.size());
OutputBytes(scratch, SourceLocation());
}
void
ElfOutput::OutputSection(Section& sect, StringTable& shstrtab)
{
BytecodeOutput* outputter = this;
ElfSection* elfsect = sect.getAssocData<ElfSection>();
assert(elfsect != 0);
if (elfsect->getAlign() == 0)
elfsect->setAlign(sect.getAlign());
elfsect->setName(shstrtab.getIndex(sect.getName()));
uint64_t pos;
if (sect.isBSS())
{
// Don't output BSS sections.
outputter = &m_no_output;
pos = 0; // position = 0 because it's not in the file
}
else
{
pos = m_os.tell();
if (m_os.has_error())
{
Diag(SourceLocation(), diag::err_file_output_position);
return;
}
m_fd_os.seek(elfsect->setFileOffset(pos));
if (m_os.has_error())
{
Diag(SourceLocation(), diag::err_file_output_seek);
return;
}
}
// Output bytecodes
for (Section::bc_iterator i=sect.bytecodes_begin(),
end=sect.bytecodes_end(); i != end; ++i)
{
if (i->Output(*outputter))
elfsect->AddSize(i->getTotalLen());
}
if (getDiagnostics().hasErrorOccurred())
return;
// Sanity check final section size
assert(elfsect->getSize() == sect.bytecodes_back().getNextOffset());
// Empty? Go on to next section
if (elfsect->isEmpty())
return;
// No relocations? Go on to next section
if (sect.getRelocs().size() == 0)
return;
// name the relocation section .rel[a].foo
std::string relname = m_objfmt.m_config.getRelocSectionName(sect.getName());
elfsect->setRelName(shstrtab.getIndex(relname));
}
static unsigned long
ElfAlignOutput(raw_fd_ostream& os,
unsigned int align,
DiagnosticsEngine& diags)
{
assert(isExp2(align) && "requested alignment not a power of two");
uint64_t pos = os.tell();
if (os.has_error())
{
diags.Report(SourceLocation(), diag::err_file_output_position);
return 0;
}
unsigned long delta = align - (pos & (align-1));
if (delta != align)
{
pos += delta;
os.seek(pos);
if (os.has_error())
{
diags.Report(SourceLocation(), diag::err_file_output_seek);
return 0;
}
}
return static_cast<unsigned long>(pos);
}
void
ElfObject::Output(raw_fd_ostream& os,
bool all_syms,
DebugFormat& dbgfmt,
DiagnosticsEngine& diags)
{
StringTable shstrtab, strtab;
unsigned int align = (m_config.cls == ELFCLASS32) ? 4 : 8;
// XXX: ugly workaround to prevent all_syms from kicking in
if (dbgfmt.getModule().getKeyword() == "cfi")
all_syms = false;
// Add filename to strtab and set as .file symbol name
if (m_file_elfsym)
{
m_file_elfsym->setName(strtab.getIndex(m_object.getSourceFilename()));
}
// Create .note.GNU-stack if we need to advise linker about executable
// stack.
Object::Config& oconfig = m_object.getConfig();
if (oconfig.ExecStack || oconfig.NoExecStack)
{
Section* gnu_stack = m_object.FindSection(".note.GNU-stack");
if (!gnu_stack)
gnu_stack = AppendSection(".note.GNU-stack", SourceLocation(),
diags);
gnu_stack->setAlign(0);
ElfSection* elfsect = gnu_stack->getAssocData<ElfSection>();
if (oconfig.ExecStack)
{
gnu_stack->setCode(true);
elfsect->setTypeFlags(SHT_PROGBITS, SHF_EXECINSTR);
}
else
{
gnu_stack->setCode(false);
elfsect->setTypeFlags(SHT_PROGBITS, 0);
}
}
// Allocate space for Ehdr by seeking forward
os.seek(m_config.getProgramHeaderSize());
if (os.has_error())
{
diags.Report(SourceLocation(), diag::err_file_output_seek);
return;
}
// Generate version symbols.
for (SymVers::const_iterator i=m_symvers.begin(), end=m_symvers.end();
i != end; ++i)
{
SymbolRef sym = m_object.FindSymbol(i->m_real);
if (!sym)
continue;
ElfSymbol& elfsym = BuildSymbol(*sym);
SmallString<64> newname;
if (i->m_mode == ElfSymVersion::Standard)
{
// rename to name@version
std::string oldname = sym->getName();
newname += i->m_name;
newname += '@';
newname += i->m_version;
m_object.RenameSymbol(sym, newname.str());
// if it was defined, create a new alias.
if (sym->isDefined())
{
SymbolRef sym2 = m_object.getSymbol(oldname);
ElfSymbol& elfsym2 = BuildSymbol(*sym2);
// copy visibility and binding
sym2->Declare(
static_cast<Symbol::Visibility>(sym->getVisibility()));
elfsym2.setBinding(elfsym.getBinding());
sym2->DefineEqu(Expr(sym));
}
}
else if (i->m_mode == ElfSymVersion::Default)
{
newname += i->m_name;
newname += "@@";
newname += i->m_version;
SymbolRef sym2 = m_object.getSymbol(newname.str());
ElfSymbol& elfsym2 = BuildSymbol(*sym2);
// copy visibility and binding
sym2->Declare(
static_cast<Symbol::Visibility>(sym->getVisibility()));
elfsym2.setBinding(elfsym.getBinding());
sym2->DefineEqu(Expr(sym));
}
else if (i->m_mode == ElfSymVersion::Auto)
{
// rename to name@@version
std::string oldname = sym->getName();
newname += i->m_name;
if (sym->isDefined())
newname += "@@";
else
newname += '@';
newname += i->m_version;
m_object.RenameSymbol(sym, newname.str());
}
}
m_config.secthead_count = 0;
// dummy section header
ElfSection null_sect(m_config, SHT_NULL, 0);
null_sect.setIndex(m_config.secthead_count++);
ElfOutput out(os, *this, m_object, diags);
// Group sections.
ElfStringIndex groupname_index = 0;
for (Groups::iterator i=m_groups.begin(), end=m_groups.end(); i != end; ++i)
{
ElfGroup& group = *i;
group.elfsect.reset(new ElfSection(m_config, SHT_GROUP, 0));
i->sym = m_object.FindSymbol(i->name);
ElfSymbol* elfsym = 0;
if (i->sym)
{
if (groupname_index == 0)
groupname_index = shstrtab.getIndex(".group");
group.elfsect->setName(groupname_index);
elfsym = i->sym->getAssocData<ElfSymbol>();
if (!elfsym)
elfsym = &BuildSymbol(*i->sym);
}
else
{
i->sym = m_object.getSymbol(i->name);
group.elfsect->setName(shstrtab.getIndex(i->name));
elfsym = &BuildSymbol(*i->sym);
elfsym->setType(STT_SECTION);
elfsym->setSectionIndex(m_config.secthead_count);
}
group.elfsect->setIndex(m_config.secthead_count++);
group.elfsect->setEntSize(4);
group.elfsect->setAlign(4);
}
// Finalize symbol table, handling any objfmt-specific extensions given
// during parse phase. If all_syms is true, add all local symbols and
// include name information.
for (Object::symbol_iterator i=m_object.symbols_begin(),
end=m_object.symbols_end(); i != end; ++i)
{
FinalizeSymbol(*i, strtab, all_syms, diags);
}
// Number user sections (numbering required for group sections).
for (Object::section_iterator i=m_object.sections_begin(),
end=m_object.sections_end(); i != end; ++i)
{
ElfSection* elfsect = i->getAssocData<ElfSection>();
assert(elfsect != 0);
elfsect->setIndex(m_config.secthead_count++);
}
// Output group sections.
for (Groups::iterator i=m_groups.begin(), end=m_groups.end(); i != end; ++i)
{
out.OutputGroup(*i);
}
// Output user sections.
// Assign indices and names as we go (including relocation section names).
for (Object::section_iterator i=m_object.sections_begin(),
end=m_object.sections_end(); i != end; ++i)
{
out.OutputSection(*i, shstrtab);
}
// Go through relocations and force referenced symbols into symbol table,
// because relocation needs a symtab index.
for (Object::section_iterator sect=m_object.sections_begin(),
endsect=m_object.sections_end(); sect != endsect; ++sect)
{
for (Section::reloc_iterator reloc=sect->relocs_begin(),
endreloc=sect->relocs_end(); reloc != endreloc; ++reloc)
{
SymbolRef sym = reloc->getSymbol();
if (!all_syms || !sym->getAssocData<ElfSymbol>())
{
ElfSymbol& elfsym = BuildSymbol(*sym);
elfsym.setName(strtab.getIndex(sym->getName()));
setSymbolSectionValue(*sym, elfsym);
elfsym.setInTable(true);
elfsym.Finalize(*sym, diags);
}
}
}
// Create GOT symbol if required
SymbolRef GOT_sym = m_object.FindSymbol("_GLOBAL_OFFSET_TABLE_");
if (!GOT_sym && out.NeedsGOT())
{
GOT_sym = m_object.getSymbol("_GLOBAL_OFFSET_TABLE_");
GOT_sym->Declare(Symbol::EXTERN);
FinalizeSymbol(*GOT_sym, strtab, false, diags);
}
// Partition symbol table to put local symbols first
stdx::stable_partition(m_object.symbols_begin(), m_object.symbols_end(),
isLocal);
// Number symbols. Start at 2 due to undefined symbol (0)
// and file symbol (1).
ElfSymbolIndex symtab_nlocal = 2;
// The first symbols should be the section names in the same order as the
// sections themselves.
for (Object::section_iterator i=m_object.sections_begin(),
end=m_object.sections_end(); i != end; ++i)
{
SymbolRef sectsym = i->getSymbol();
ElfSymbol* elfsectsym = sectsym->getAssocData<ElfSymbol>();
elfsectsym->setSymbolIndex(symtab_nlocal++);
}
// The remainder of the symbols.
m_config.AssignSymbolIndices(m_object, &symtab_nlocal);
// Sort the symbols by symbol index.
stdx::sort(m_object.symbols_begin(), m_object.symbols_end(), byIndex);
unsigned long offset, size;
ElfStringIndex shstrtab_name = shstrtab.getIndex(".shstrtab");
ElfStringIndex strtab_name = shstrtab.getIndex(".strtab");
ElfStringIndex symtab_name = shstrtab.getIndex(".symtab");
// section header string table (.shstrtab)
offset = ElfAlignOutput(os, align, diags);
size = shstrtab.getSize();
shstrtab.Write(os);
ElfSection shstrtab_sect(m_config, SHT_STRTAB, 0);
m_config.shstrtab_index = m_config.secthead_count;
shstrtab_sect.setName(shstrtab_name);
shstrtab_sect.setIndex(m_config.secthead_count++);
shstrtab_sect.setFileOffset(offset);
shstrtab_sect.setSize(size);
// string table (.strtab)
offset = ElfAlignOutput(os, align, diags);
size = strtab.getSize();
strtab.Write(os);
ElfSection strtab_sect(m_config, SHT_STRTAB, 0);
strtab_sect.setName(strtab_name);
strtab_sect.setIndex(m_config.secthead_count++);
strtab_sect.setFileOffset(offset);
strtab_sect.setSize(size);
// symbol table (.symtab)
offset = ElfAlignOutput(os, align, diags);
size = m_config.WriteSymbolTable(os, m_object, diags, out.getScratch());
ElfSection symtab_sect(m_config, SHT_SYMTAB, 0, true);
symtab_sect.setName(symtab_name);
symtab_sect.setIndex(m_config.secthead_count++);
symtab_sect.setFileOffset(offset);
symtab_sect.setSize(size);
symtab_sect.setInfo(symtab_nlocal);
symtab_sect.setLink(strtab_sect.getIndex()); // link to .strtab
// output relocations
for (Object::section_iterator i=m_object.sections_begin(),
end=m_object.sections_end(); i != end; ++i)
{
// No relocations to output? Go on to next section
if (i->getRelocs().size() == 0)
continue;
ElfSection* elfsect = i->getAssocData<ElfSection>();
assert(elfsect != 0);
// need relocation section; set it up
elfsect->setRelIndex(m_config.secthead_count++);
elfsect->WriteRelocs(os, *i, out.getScratch(), *m_machine, diags);
}
// output section header table
m_config.secthead_pos = ElfAlignOutput(os, 16, diags);
#if 0
// stabs debugging support
if (strcmp(yasm_dbgfmt_keyword(object->dbgfmt), "stabs")==0)
{
Section* stabsect = m_object.find_section(".stab");
Section* stabstrsect = m_object.find_section(".stabstr");
if (stabsect && stabstrsect)
{
ElfSection* stab = get_elf(*stabsect);
ElfSection* stabstr = get_elf(*stabstrsect);
assert(stab && stabstr && "missing .stab or .stabstr section/data");
stab->set_link(stabstr->get_index());
}
}
#endif
// null section header
null_sect.Write(os, out.getScratch());
// group section headers
for (Groups::iterator i=m_groups.begin(), end=m_groups.end(); i != end; ++i)
{
ElfGroup& group = *i;
group.elfsect->setLink(symtab_sect.getIndex());
ElfSymbol* elfsym = i->sym->getAssocData<ElfSymbol>();
group.elfsect->setInfo(elfsym->getSymbolIndex());
group.elfsect->Write(os, out.getScratch());
}
// user section headers
for (Object::section_iterator i=m_object.sections_begin(),
end=m_object.sections_end(); i != end; ++i)
{
ElfSection* elfsect = i->getAssocData<ElfSection>();
assert(elfsect != 0);
elfsect->Write(os, out.getScratch());
}
// standard section headers
shstrtab_sect.Write(os, out.getScratch());
strtab_sect.Write(os, out.getScratch());
symtab_sect.Write(os, out.getScratch());
// relocation section headers
for (Object::section_iterator i=m_object.sections_begin(),
end=m_object.sections_end(); i != end; ++i)
{
ElfSection* elfsect = i->getAssocData<ElfSection>();
assert(elfsect != 0);
// relocation entries for .foo are stored in section .rel[a].foo
elfsect->WriteRel(os, symtab_sect.getIndex(), *i, out.getScratch());
}
// output Ehdr
os.seek(0);
if (os.has_error())
{
diags.Report(SourceLocation(), diag::err_file_output_seek);
return;
}
m_config.WriteProgramHeader(os, out.getScratch());
}
Section*
ElfObject::AddDefaultSection()
{
IntrusiveRefCntPtr<DiagnosticIDs> diagids(new DiagnosticIDs);
DiagnosticsEngine diags(diagids);
Section* section = AppendSection(".text", SourceLocation(), diags);
section->setDefault(true);
return section;
}
Section*
ElfObject::AppendSection(StringRef name,
SourceLocation source,
DiagnosticsEngine& diags)
{
ElfSectionType type = SHT_PROGBITS;
ElfSectionFlags flags = SHF_ALLOC;
unsigned long align = 4;
if (name == ".bss")
{
type = SHT_NOBITS;
flags = SHF_ALLOC + SHF_WRITE;
}
else if (name == ".data" || name == ".ctors" || name == ".dtors")
{
type = SHT_PROGBITS;
flags = SHF_ALLOC + SHF_WRITE;
}
else if (name == ".tdata")
{
type = SHT_PROGBITS;
flags = SHF_ALLOC + SHF_WRITE + SHF_TLS;
}
else if (name == ".rodata")
{
type = SHT_PROGBITS;
flags = SHF_ALLOC;
}
else if (name == ".eh_frame")
{
flags = SHF_ALLOC;
#ifdef __sun
type = SHT_UNWIND;
if (m_config.cls == ELFCLASS32)
flags |= SHF_WRITE;
#else
type = SHT_PROGBITS;
#endif
}
else if (name == ".text")
{
align = 16;
type = SHT_PROGBITS;
flags = SHF_ALLOC + SHF_EXECINSTR;
}
else if (name == ".comment")
{
align = 0;
type = SHT_PROGBITS;
flags = 0;
}
else if (name.startswith(".debug_"))
{
align = 0;
type = SHT_PROGBITS;
flags = 0;
}
else
{
// Default to code, but align=1
align = 1;
}
bool code = (flags & SHF_EXECINSTR) != 0;
bool bss = (type == SHT_NOBITS);
Section* section = new Section(name, code, bss, source);
m_object.AppendSection(std::auto_ptr<Section>(section));
section->setAlign(align);
// Define a label for the start of the section
Location start = {§ion->bytecodes_front(), 0};
SymbolRef sym = m_object.getSymbol(name);
if (!sym->isDefined())
{
sym->DefineLabel(start);
sym->setDefSource(source);
}
section->setSymbol(sym);
// Add ELF data to the section
ElfSection* elfsect = new ElfSection(m_config, type, flags);
section->AddAssocData(std::auto_ptr<ElfSection>(elfsect));
return section;
}
void
ElfObject::DirGasSection(DirectiveInfo& info, DiagnosticsEngine& diags)
{
assert(info.isObject(m_object));
NameValues& nvs = info.getNameValues();
NameValues::iterator nv = nvs.begin();
if (!nv->isString())
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_value_string_or_id);
return;
}
StringRef sectname = nv->getString();
Section* sect = m_object.FindSection(sectname);
if (!sect)
sect = AppendSection(sectname, info.getSource(), diags);
m_object.setCurSection(sect);
sect->setDefault(false);
++nv;
// No name/values, so nothing more to do
if (nv == nvs.end())
return;
// Section flags must be a string.
if (!nv->isString())
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_expected_flag_string);
return;
}
// Parse section flags
ElfSection* elfsect = sect->getAssocData<ElfSection>();
assert(elfsect != 0);
int flags = 0, type = elfsect->getType();
StringRef flagstr = nv->getString();
for (size_t i=0; i<flagstr.size(); ++i)
{
switch (flagstr[i])
{
case 'a':
flags |= SHF_ALLOC;
break;
case 'w':
flags |= SHF_WRITE;
break;
case 'x':
flags |= SHF_EXECINSTR;
break;
case 'M':
flags |= SHF_MERGE;
break;
case 'S':
flags |= SHF_STRINGS;
break;
case 'G':
flags |= SHF_GROUP;
break;
case 'T':
flags |= SHF_TLS;
break;
default:
{
char print_flag[2] = {flagstr[i], 0};
diags.Report(nv->getValueRange().getBegin()
.getLocWithOffset(i),
diag::warn_unrecognized_section_attribute)
<< print_flag;
}
}
}
++nv;
// Parse section type
if (nv != nvs.end())
{
if (!nv->isToken() || nv->getToken().isNot(Token::at))
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_expected_at);
return;
}
++nv;
if (nv == nvs.end())
{
diags.Report((nv-1)->getValueRange().getEnd(),
diag::err_expected_ident);
return;
}
if (!nv->isId())
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_expected_ident);
return;
}
StringRef typestr = nv->getId();
if (typestr == "progbits")
type = SHT_PROGBITS;
else if (typestr == "nobits")
type = SHT_NOBITS;
else if (typestr == "note")
type = SHT_NOTE;
else if (typestr == "init_array")
type = SHT_INIT_ARRAY;
else if (typestr == "fini_array")
type = SHT_FINI_ARRAY;
else if (typestr == "preinit_array")
type = SHT_PREINIT_ARRAY;
else if (typestr == "unwind")
#if defined(__FreeBSD_version) && __FreeBSD_version < 900000
// work around a bug in ld 2.15
type = SHT_PROGBITS;
#else
type = SHT_UNWIND;
#endif
++nv;
}
// Handle merge entity size
if ((flags & SHF_MERGE) != 0)
{
if (nv != nvs.end())
{
IntNum merge;
bool merge_ok;
DirIntNum(*nv, diags, &m_object, &merge, &merge_ok);
if (!merge_ok)
return;
elfsect->setEntSize(merge.getUInt());
++nv;
}
else
{
diags.Report((nv-1)->getValueRange().getEnd(),
diag::warn_expected_merge_entity_size);
flags &= ~SHF_MERGE;
}
}
// Handle group name
if ((flags & SHF_GROUP) != 0)
{
if (nv == nvs.end())
{
diags.Report((nv-1)->getValueRange().getEnd(),
diag::err_expected_group_name);
return;
}
if (!nv->isString())
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_value_string_or_id);
return;
}
llvm::StringMapEntry<ElfGroup*>& entry =
m_group_map.GetOrCreateValue(nv->getString());
ElfGroup* group;
if (entry.getValue() != 0)
group = entry.getValue();
else
{
group = new ElfGroup;
group->name = nv->getString();
entry.setValue(group);
m_groups.push_back(group);
}
group->sects.push_back(sect);
++nv;
// look for comdat flag
if (nv != nvs.end() && nv->isId() && nv->getId() == "comdat")
{
group->flags |= 1;
++nv;
}
// also treat sections named ".gnu.linkonce" as comdat
if (sectname.startswith(".gnu.linkonce"))
group->flags |= 1;
}
elfsect->setTypeFlags(static_cast<ElfSectionType>(type),
static_cast<ElfSectionFlags>(flags));
sect->setBSS(type == SHT_NOBITS);
sect->setCode((flags & SHF_EXECINSTR) != 0);
}
void
ElfObject::DirSection(DirectiveInfo& info, DiagnosticsEngine& diags)
{
assert(info.isObject(m_object));
NameValues& nvs = info.getNameValues();
NameValue& sectname_nv = nvs.front();
if (!sectname_nv.isString())
{
diags.Report(sectname_nv.getValueRange().getBegin(),
diag::err_value_string_or_id);
return;
}
StringRef sectname = sectname_nv.getString();
Section* sect = m_object.FindSection(sectname);
bool first = true;
if (sect)
first = sect->isDefault();
else
sect = AppendSection(sectname, info.getSource(), diags);
m_object.setCurSection(sect);
sect->setDefault(false);
// No name/values, so nothing more to do
if (nvs.size() <= 1)
return;
// Ignore flags if we've seen this section before
if (!first)
{
diags.Report(info.getSource(), diag::warn_section_redef_flags);
return;
}
// Parse section flags
ElfSection* elfsect = sect->getAssocData<ElfSection>();
assert(elfsect != 0);
IntNum align;
bool has_align = false;
IntNum merge;
bool has_merge = false;
unsigned long type = elfsect->getType();
unsigned long flags = elfsect->getFlags();
DirHelpers helpers;
static const struct
{
const char* enable;
const char* disable;
unsigned int flag;
}
name_flags[] =
{
{"alloc", "noalloc", SHF_ALLOC},
{"exec", "noexec", SHF_EXECINSTR},
{"write", "nowrite", SHF_WRITE},
{"tls", "notls", SHF_TLS},
};
for (size_t i=0; i<sizeof(name_flags)/sizeof(name_flags[0]); ++i)
{
helpers.Add(name_flags[i].enable, false,
TR1::bind(&DirSetFlag, _1, _2, &flags, name_flags[i].flag));
helpers.Add(name_flags[i].disable, false,
TR1::bind(&DirClearFlag, _1, _2, &flags,
name_flags[i].flag));
}
helpers.Add("noprogbits", false,
TR1::bind(&DirResetFlag, _1, _2, &type, SHT_NOBITS));
helpers.Add("nobits", false,
TR1::bind(&DirResetFlag, _1, _2, &type, SHT_NOBITS));
helpers.Add("progbits", false,
TR1::bind(&DirResetFlag, _1, _2, &type, SHT_PROGBITS));
helpers.Add("align", true, TR1::bind(&DirIntNumPower2, _1, _2, &m_object,
&align, &has_align));
helpers.Add("merge", true, TR1::bind(&DirIntNum, _1, _2, &m_object,
&merge, &has_merge));
helpers(++nvs.begin(), nvs.end(), info.getSource(), diags,
DirNameValueWarn);
// handle align
if (has_align)
sect->setAlign(align.getUInt());
// Handle merge entity size
if (has_merge)
{
flags |= SHF_MERGE;
elfsect->setEntSize(merge.getUInt());
}
elfsect->setTypeFlags(static_cast<ElfSectionType>(type),
static_cast<ElfSectionFlags>(flags));
sect->setBSS(type == SHT_NOBITS);
sect->setCode((flags & SHF_EXECINSTR) != 0);
}
void
ElfObject::DirType(DirectiveInfo& info, DiagnosticsEngine& diags)
{
assert(info.isObject(m_object));
NameValues& namevals = info.getNameValues();
NameValues::iterator nv = namevals.begin(), end = namevals.end();
SymbolRef sym = info.getObject().getSymbol(nv->getId());
sym->Use(info.getSource());
++nv;
ElfSymbol& elfsym = BuildSymbol(*sym);
// Throw away @ sign if provided (gas syntax)
if (nv != end && nv->isToken() && nv->getToken().is(Token::at))
++nv;
// Pull new type from param
if (nv == end)
{
diags.Report((nv-1)->getValueRange().getEnd(), diag::err_expected_ident);
return;
}
if (!nv->isId())
{
diags.Report(nv->getValueRange().getBegin(), diag::err_expected_ident);
return;
}
StringRef type = nv->getId();
if (type.equals_lower("function"))
elfsym.setType(STT_FUNC);
else if (type.equals_lower("object"))
elfsym.setType(STT_OBJECT);
else if (type.equals_lower("tls_object"))
elfsym.setType(STT_TLS);
else if (type.equals_lower("notype"))
elfsym.setType(STT_NOTYPE);
else
diags.Report(nv->getValueRange().getBegin(),
diag::warn_unrecognized_symbol_type) << type;
}
void
ElfObject::DirSize(DirectiveInfo& info, DiagnosticsEngine& diags)
{
assert(info.isObject(m_object));
NameValues& namevals = info.getNameValues();
NameValue& name_nv = namevals.front();
SymbolRef sym = info.getObject().getSymbol(name_nv.getId());
sym->Use(name_nv.getValueRange().getBegin());
// Pull new size from param
if (namevals.size() < 2)
{
diags.Report(info.getSource(), diag::err_no_size);
return;
}
NameValue& size_nv = namevals[1];
if (!size_nv.isExpr())
{
diags.Report(info.getSource(), diag::err_size_expression)
<< size_nv.getValueRange();
return;
}
Expr size = size_nv.getExpr(info.getObject());
ElfSymbol& elfsym = BuildSymbol(*sym);
elfsym.setSize(size, size_nv.getValueRange().getBegin());
}
void
ElfObject::DirWeak(DirectiveInfo& info, DiagnosticsEngine& diags)
{
assert(info.isObject(m_object));
NameValues& namevals = info.getNameValues();
for (NameValues::iterator nv = namevals.begin(), end = namevals.end();
nv != end; ++nv)
{
if (!nv->isId())
{
diags.Report(nv->getValueRange().getBegin(), diag::err_value_id);
continue;
}
SymbolRef sym = info.getObject().getSymbol(nv->getId());
sym->CheckedDeclare(Symbol::GLOBAL, nv->getValueRange().getBegin(),
diags);
ElfSymbol& elfsym = BuildSymbol(*sym);
elfsym.setBinding(STB_WEAK);
}
}
void
ElfObject::DirWeakRef(DirectiveInfo& info, DiagnosticsEngine& diags)
{
assert(info.isObject(m_object));
NameValues& nvs = info.getNameValues();
if (nvs.size() != 2 || !nvs[0].isId() || !nvs[1].isId())
{
diags.Report(info.getSource(), diag::err_value_id);
return;
}
SymbolRef sym1 = info.getObject().getSymbol(nvs[0].getId());
ElfSymbol& elfsym1 = BuildSymbol(*sym1);
elfsym1.setWeakRef(true);
SymbolRef sym2 = info.getObject().getSymbol(nvs[1].getId());
if (!sym2->getAssocData<ElfSymbol>())
{
ElfSymbol& elfsym2 = BuildSymbol(*sym2);
elfsym2.setWeakRefr(true);
}
sym1->CheckedDefineEqu(Expr(sym2), nvs[0].getValueRange().getBegin(),
diags);
}
void
ElfObject::VisibilityDir(DirectiveInfo& info,
DiagnosticsEngine& diags,
ElfSymbolVis vis)
{
assert(info.isObject(m_object));
NameValues& namevals = info.getNameValues();
for (NameValues::iterator nv = namevals.begin(), end = namevals.end();
nv != end; ++nv)
{
SymbolRef sym = info.getObject().getSymbol(nv->getId());
sym->CheckedDeclare(Symbol::GLOBAL, nv->getValueRange().getBegin(),
diags);
ElfSymbol& elfsym = BuildSymbol(*sym);
elfsym.setVisibility(vis);
}
}
void
ElfObject::DirInternal(DirectiveInfo& info, DiagnosticsEngine& diags)
{
VisibilityDir(info, diags, STV_INTERNAL);
}
void
ElfObject::DirHidden(DirectiveInfo& info, DiagnosticsEngine& diags)
{
VisibilityDir(info, diags, STV_HIDDEN);
}
void
ElfObject::DirProtected(DirectiveInfo& info, DiagnosticsEngine& diags)
{
VisibilityDir(info, diags, STV_PROTECTED);
}
void
ElfObject::DirSymVer(DirectiveInfo& info, DiagnosticsEngine& diags)
{
assert(info.isObject(m_object));
NameValues& namevals = info.getNameValues();
NameValues::iterator nv = namevals.begin(), end = namevals.end();
StringRef real = nv->getId();
++nv;
// name
if (nv == end || !nv->isId())
{
diags.Report(nv->getValueRange().getBegin(), diag::err_expected_ident);
return;
}
StringRef name = nv->getId();
++nv;
// @, @@, @@@ portion
int numat = 0;
while (nv != end && nv->isToken() && nv->getToken().is(Token::at))
{
++numat;
if (numat > 3)
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_expected_ident);
return;
}
++nv;
}
if (numat == 0)
{
diags.Report(nv->getValueRange().getBegin(), diag::err_expected_at);
return;
}
// version
if (nv == end || !nv->isId())
{
diags.Report(nv->getValueRange().getBegin(), diag::err_expected_ident);
return;
}
StringRef version = nv->getId();
ElfSymVersion::Mode mode;
switch (numat)
{
case 1: mode = ElfSymVersion::Standard; break;
case 2: mode = ElfSymVersion::Default; break;
case 3: mode = ElfSymVersion::Auto; break;
default:
assert(false && "unexpected number of @ tokens");
}
m_symvers.push_back(new ElfSymVersion(real, name, version, mode));
}
void
ElfObject::DirIdent(DirectiveInfo& info, DiagnosticsEngine& diags)
{
assert(info.isObject(m_object));
DirIdentCommon(*this, ".comment", info, diags);
}
void
ElfObject::DirVersion(DirectiveInfo& info, DiagnosticsEngine& diags)
{
assert(info.isObject(m_object));
// Put version data into .note section
Section* note = info.getObject().FindSection(".note");
if (!note)
note = AppendSection(".note", info.getSource(), diags);
for (NameValues::const_iterator nv=info.getNameValues().begin(),
end=info.getNameValues().end(); nv != end; ++nv)
{
if (!nv->isString())
{
diags.Report(nv->getValueRange().getBegin(),
diag::err_value_string);
continue;
}
StringRef str = nv->getString();
EndianState endian;
m_config.setEndian(endian);
AppendData(*note, str.size(), 4, endian); // name size
AppendData(*note, 0, 4, endian); // desc size
AppendData(*note, 1, 4, endian); // type
AppendData(*note, str, 4, true); // name
// empty desc
}
}
std::vector<StringRef>
ElfObject::getDebugFormatKeywords()
{
static const char* keywords[] =
{
"null",
"stabs",
"cfi",
"dwarf",
"dwarfpass",
"dwarf2",
"dwarf2pass"
};
size_t keywords_size = sizeof(keywords)/sizeof(keywords[0]);
return std::vector<StringRef>(keywords, keywords+keywords_size);
}
void
ElfObject::AddDirectives(Directives& dirs, StringRef parser)
{
static const Directives::Init<ElfObject> nasm_dirs[] =
{
{"section", &ElfObject::DirSection, Directives::ARG_REQUIRED},
{"segment", &ElfObject::DirSection, Directives::ARG_REQUIRED},
{"type", &ElfObject::DirType, Directives::ID_REQUIRED},
{"size", &ElfObject::DirSize, Directives::ID_REQUIRED},
{"weak", &ElfObject::DirWeak, Directives::ID_REQUIRED},
{"weakref", &ElfObject::DirWeakRef, Directives::ID_REQUIRED},
{"internal", &ElfObject::DirInternal, Directives::ID_REQUIRED},
{"hidden", &ElfObject::DirHidden, Directives::ID_REQUIRED},
{"protected", &ElfObject::DirProtected, Directives::ID_REQUIRED},
{"ident", &ElfObject::DirIdent, Directives::ANY},
};
static const Directives::Init<ElfObject> gas_dirs[] =
{
{".section", &ElfObject::DirGasSection, Directives::ARG_REQUIRED},
{".type", &ElfObject::DirType, Directives::ID_REQUIRED},
{".size", &ElfObject::DirSize, Directives::ID_REQUIRED},
{".weak", &ElfObject::DirWeak, Directives::ID_REQUIRED},
{".weakref", &ElfObject::DirWeakRef, Directives::ID_REQUIRED},
{".internal", &ElfObject::DirInternal, Directives::ID_REQUIRED},
{".hidden", &ElfObject::DirHidden, Directives::ID_REQUIRED},
{".protected", &ElfObject::DirProtected, Directives::ID_REQUIRED},
{".symver", &ElfObject::DirSymVer, Directives::ID_REQUIRED},
{".ident", &ElfObject::DirIdent, Directives::ANY},
{".version", &ElfObject::DirVersion, Directives::ARG_REQUIRED},
};
if (parser.equals_lower("nasm"))
dirs.AddArray(this, nasm_dirs);
else if (parser.equals_lower("gas") || parser.equals_lower("gnu"))
dirs.AddArray(this, gas_dirs);
}
#if 0
static const char *elf_nasm_stdmac[] = {
"%imacro type 1+.nolist",
"[type %1]",
"%endmacro",
"%imacro size 1+.nolist",
"[size %1]",
"%endmacro",
"%imacro weak 1+.nolist",
"[weak %1]",
"%endmacro",
NULL
};
static const yasm_stdmac elf_objfmt_stdmacs[] = {
{ "nasm", "nasm", elf_nasm_stdmac },
{ NULL, NULL, NULL }
};
#endif
void
yasm_objfmt_elf_DoRegister()
{
RegisterModule<ObjectFormatModule,
ObjectFormatModuleImpl<ElfObject> >("elf");
RegisterModule<ObjectFormatModule,
ObjectFormatModuleImpl<Elf32Object> >("elf32");
RegisterModule<ObjectFormatModule,
ObjectFormatModuleImpl<Elf64Object> >("elf64");
RegisterModule<ObjectFormatModule,
ObjectFormatModuleImpl<Elfx32Object> >("elfx32");
}
|
import {getElement} from "./functions";
var modal = function (modalId) {
const
ele = getElement(modalId),
errorDiv = ele.querySelector('.alert.error');
return {
show: function () {
errorDiv.classList.remove('show');
ele.classList.add('show');
getElement('.modal-overlay').classList.add('show');
return this;
},
showError: function (err) {
errorDiv.classList.add('show');
errorDiv.textContent = err;
// hide the error after 5 secs
setTimeout(function () {
errorDiv.classList.remove('show');
}, 5000);
return this;
},
close: function () {
ele.classList.remove('show');
getElement('.modal-overlay').classList.remove('show');
return this;
},
};
};
export default modal;
|
A suitable natural language processing task for summarizing a given article is extractive summarization. This task involves extracting out key phrases from the text. The extracted phrases are then used to generate an overview of the article by rephrasing them into more concise sentences. An algorithm such as the TextRank algorithm can be utilized to perform extractive summarization.
|
#!/bin/sh
set -e
Database_Check="${EDGEX_DB:-redis}"
Secure_Check="${EDGEX_SECURE:-true}"
# Set default config
cp /edgex/config/00-consul.json /consul/config/00-consul.json
# Set config files - Redis DB
if [ "$Database_Check" = 'redis' ]; then
echo "Installing redis health checks"
cp /edgex/config/database/01-redis.json /consul/config/01-redis.json
fi
# Set config files - Mongo DB
if [ "$Database_Check" = 'mongo' ]; then
echo "Installing mongo health checks"
cp /edgex/config/database/01-mongo.json /consul/config/01-mongo.json
fi
# Set config files - Secure Setup
if [ "$Secure_Check" = 'true' ]; then
echo "Installing security health checks"
cp /edgex/config/secure/*.json /consul/config/
fi
# Copy health check scripts
cp -r /edgex/scripts/* /consul/scripts/
echo "Chaining to original entrypoint"
exec "docker-entrypoint.sh" "$@"
|
#!/usr/bin/env bash
# Build Pype using existing virtual environment.
art () {
cat <<-EOF
. . .. . ..
_oOOP3OPP3Op_. .
.PPpo~· ·· ~2p. ·· ···· · ·
·Ppo · .pPO3Op.· · O:· · · ·
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
·~OP 3PO· .Op3 : · ·· _____ _____ _____
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
~P3·OPPPO3OP~ · ·· ·
· ' '· · ·· · · · ·· ·
EOF
}
# Colors for terminal
RST='\033[0m' # Text Reset
# Regular Colors
Black='\033[0;30m' # Black
Red='\033[0;31m' # Red
Green='\033[0;32m' # Green
Yellow='\033[0;33m' # Yellow
Blue='\033[0;34m' # Blue
Purple='\033[0;35m' # Purple
Cyan='\033[0;36m' # Cyan
White='\033[0;37m' # White
# Bold
BBlack='\033[1;30m' # Black
BRed='\033[1;31m' # Red
BGreen='\033[1;32m' # Green
BYellow='\033[1;33m' # Yellow
BBlue='\033[1;34m' # Blue
BPurple='\033[1;35m' # Purple
BCyan='\033[1;36m' # Cyan
BWhite='\033[1;37m' # White
# Bold High Intensity
BIBlack='\033[1;90m' # Black
BIRed='\033[1;91m' # Red
BIGreen='\033[1;92m' # Green
BIYellow='\033[1;93m' # Yellow
BIBlue='\033[1;94m' # Blue
BIPurple='\033[1;95m' # Purple
BICyan='\033[1;96m' # Cyan
BIWhite='\033[1;97m' # White
args=$@
disable_submodule_update=0
while :; do
case $1 in
--no-submodule-update)
disable_submodule_update=1
;;
--)
shift
break
;;
*)
break
esac
shift
done
##############################################################################
# Detect required version of python
# Globals:
# colors
# PYTHON
# Arguments:
# None
# Returns:
# None
###############################################################################
detect_python () {
echo -e "${BIGreen}>>>${RST} Using python \c"
command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.7 installed to continue.${RST}"; return 1; }
local version_command
version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))"
local python_version
python_version="$(python <<< ${version_command})"
oIFS="$IFS"
IFS=.
set -- $python_version
IFS="$oIFS"
if [ "$1" -ge "3" ] && [ "$2" -ge "6" ] ; then
if [ "$2" -gt "7" ] ; then
echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.7.x${RST}"; return 1;
else
echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}"
fi
else
command -v python >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
fi
}
##############################################################################
# Clean pyc files in specified directory
# Globals:
# None
# Arguments:
# Optional path to clean
# Returns:
# None
###############################################################################
clean_pyc () {
local path
path=$openpype_root
echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c"
find "$path" -path ./build -o -regex '^.*\(__pycache__\|\.py[co]\)$' -delete
echo -e "${BIGreen}DONE${RST}"
}
##############################################################################
# Return absolute path
# Globals:
# None
# Arguments:
# Path to resolve
# Returns:
# None
###############################################################################
realpath () {
echo $(cd $(dirname "$1") || return; pwd)/$(basename "$1")
}
# Main
main () {
echo -e "${BGreen}"
art
echo -e "${RST}"
detect_python || return 1
# Directories
openpype_root=$(dirname $(dirname "$(realpath ${BASH_SOURCE[0]})"))
pushd "$openpype_root" > /dev/null || return > /dev/null
version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);"
openpype_version="$(python <<< ${version_command})"
_inside_openpype_tool="1"
if [[ -z $POETRY_HOME ]]; then
export POETRY_HOME="$openpype_root/.poetry"
fi
echo -e "${BIYellow}---${RST} Cleaning build directory ..."
rm -rf "$openpype_root/build" && mkdir "$openpype_root/build" > /dev/null
echo -e "${BIGreen}>>>${RST} Building OpenPype ${BIWhite}[${RST} ${BIGreen}$openpype_version${RST} ${BIWhite}]${RST}"
echo -e "${BIGreen}>>>${RST} Cleaning cache files ..."
clean_pyc
echo -e "${BIGreen}>>>${RST} Reading Poetry ... \c"
if [ -f "$POETRY_HOME/bin/poetry" ]; then
echo -e "${BIGreen}OK${RST}"
else
echo -e "${BIYellow}NOT FOUND${RST}"
echo -e "${BIYellow}***${RST} We need to install Poetry and virtual env ..."
. "$openpype_root/tools/create_env.sh" || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return 1; }
fi
if [ "$disable_submodule_update" == 1 ]; then
echo -e "${BIYellow}***${RST} Not updating submodules ..."
else
echo -e "${BIGreen}>>>${RST} Making sure submodules are up-to-date ..."
git submodule update --init --recursive
fi
echo -e "${BIGreen}>>>${RST} Building ..."
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
"$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" build &> "$openpype_root/build/build.log" || { echo -e "${BIRed}------------------------------------------${RST}"; cat "$openpype_root/build/build.log"; echo -e "${BIRed}------------------------------------------${RST}"; echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return 1; }
elif [[ "$OSTYPE" == "darwin"* ]]; then
"$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" bdist_mac &> "$openpype_root/build/build.log" || { echo -e "${BIRed}------------------------------------------${RST}"; cat "$openpype_root/build/build.log"; echo -e "${BIRed}------------------------------------------${RST}"; echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return 1; }
fi
"$POETRY_HOME/bin/poetry" run python "$openpype_root/tools/build_dependencies.py"
if [[ "$OSTYPE" == "darwin"* ]]; then
# fix code signing issue
codesign --remove-signature "$openpype_root/build/OpenPype.app/Contents/MacOS/lib/Python"
if command -v create-dmg > /dev/null 2>&1; then
create-dmg \
--volname "OpenPype Installer" \
--window-pos 200 120 \
--window-size 600 300 \
--app-drop-link 100 50 \
"$openpype_root/build/OpenPype-Installer.dmg" \
"$openpype_root/build/OpenPype.app"
else
echo -e "${BIYellow}!!!${RST} ${BIWhite}create-dmg${RST} command is not available."
fi
fi
echo -e "${BICyan}>>>${RST} All done. You will find OpenPype and build log in \c"
echo -e "${BIWhite}$openpype_root/build${RST} directory."
}
return_code=0
main || return_code=$?
exit $return_code
|
<reponame>SamOdum/timeuler-mite
import { fail } from "../fail"
export function getMiteAccountName() {
let accountName = process.env.MITE_ACCOUNT_NAME!
if (!accountName) {
fail("MITE_ACCOUNT_NAME not found")
}
return accountName
}
export function getMiteAccessToken() {
let accessToken = process.env.MITE_API_KEY!
if (!accessToken) {
fail("MITE_API_KEY not found")
}
return accessToken
}
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = syntheticClick;
var _notifier = require('./notifier');
var _constants = require('./constants');
var blockClick = false;
var blockClickTimerID = null;
var blockClickCounter = 0;
// call node.click() on tap, and block a subsequent click fired by the browser if there is one,
// note that blocking the subsequent click event fired by the browser is required because
// when the tap/node.click() call results in a change to the layout of the DOM,
// e.g. hide something, the subsequent click event fired by the browser will land
// on the DOM in it's new layout, and if where the tap occurred now has something different
// that has a click handler, e.g. a link, then the subsequent click event will land on
// and trigger the click handler, which is very bad, so it needs to be blocked.
function syntheticClick(node) {
// only block one subsequent click event per node.click() call
blockClickCounter++;
if (blockClickTimerID !== null) window.clearTimeout(blockClickTimerID);
blockClick = false;
node.click();
blockClick = true;
// reset click blocking if subsequent click isn't added to browser's queue w/in queueTime
blockClickTimerID = window.setTimeout(function () {
blockClick = false;
blockClickCounter = 0;
blockClickTimerID = null;
}, _constants.queueTime);
}
function handleClick(e) {
if (!blockClick) return;
e.stopPropagation();
e.preventDefault();
blockClickCounter--;
// reset click blocking if the number of clicks to block is met
if (blockClickCounter === 0) {
blockClick = false;
if (blockClickTimerID !== null) {
window.clearTimeout(blockClickTimerID);
blockClickTimerID = null;
}
}
}
// only required for touch devices because called on touchend tap to control tapClick
if (_constants.deviceHasTouch) (0, _notifier.notifyOfAll)(['click'], handleClick);
module.exports = exports['default'];
|
#!/usr/bin/env bash
#Petit script pour suivre un redirect de type refresh dans un fichier HTML
#ATTENTION: ça été fait pour une structure perso !
#faudra modifier le script pour d'autres structures
#zf181114.1606
#source: https://stackoverflow.com/questions/428109/extract-substring-in-bash
#note:
shopt -s extglob #demand au bash de supporter les pipe !
t1=`curl --max-time 1 $1 2>err.txt |grep 'http-equiv="refresh"'`
tmp=${t1#*+(url|URL)=} # remove prefix ending in "url|URL="
b=${tmp%\"*} # remove suffix starting with "
e=`cat err.txt |grep -e 'Connection timed out after' -e 'Could not resolve host' `
e="${e:1}" #enlève le 1er caractère !
if [ "$e" != "" ]
then
echo -e "err: "$1", "$e >> err.log
fi
if [ "`echo $b |grep http`" != "" ]
then
r=$b
else
r=$1"/"$b
fi
echo $r
if [ "$b" != "" ]
then
echo -e "redirect: "$1", "$r >> redir.log
fi
|
#!/bin/sh
# this script prepare database and redis instance to run acceptance test
#
# NOTE: assumes existance of a "template_postgis" loaded with
# compatible version of postgis (legacy.sql included)
PREPARE_REDIS=yes
PREPARE_PGSQL=yes
while [ -n "$1" ]; do
if test "$1" = "--skip-pg"; then
PREPARE_PGSQL=no
shift; continue
elif test "$1" = "--skip-redis"; then
PREPARE_REDIS=no
shift; continue
fi
done
die() {
msg=$1
echo "${msg}" >&2
exit 1
}
# This is where postgresql connection parameters are read from
TESTENV=../config/environments/test.js
# Extract postgres configuration
PGHOST=`node -e "console.log(require('${TESTENV}').db_host || '')"`
echo "PGHOST: [$PGHOST]"
PGPORT=`node -e "console.log(require('${TESTENV}').db_port || '')"`
echo "PGPORT: [$PGPORT]"
PUBLICUSER=`node -e "console.log(require('${TESTENV}').db_pubuser || 'xxx')"`
PUBLICPASS=`node -e "console.log(require('${TESTENV}').db_pubuser_pass || 'xxx')"`
echo "PUBLICUSER: [${PUBLICUSER}]"
echo "PUBLICPASS: [${PUBLICPASS}]"
TESTUSERID=1
TESTUSER=`node -e "console.log(require('${TESTENV}').db_user || '')"`
if test -z "$TESTUSER"; then
echo "Missing db_user from ${TESTENV}" >&2
exit 1
fi
TESTUSER=`echo ${TESTUSER} | sed "s/<%= user_id %>/${TESTUSERID}/"`
echo "TESTUSER: [${TESTUSER}]"
TESTPASS=`node -e "console.log(require('${TESTENV}').db_user_pass || '')"`
TESTPASS=`echo ${TESTPASS} | sed "s/<%= user_id %>/${TESTUSERID}/"`
echo "TESTPASS: [${TESTPASS}]"
TEST_DB=`node -e "console.log(require('${TESTENV}').db_base_name || '')"`
if test -z "$TEST_DB"; then
echo "Missing db_base_name from ${TESTENV}" >&2
exit 1
fi
TEST_DB=`echo ${TEST_DB} | sed "s/<%= user_id %>/${TESTUSERID}/"`
export PGHOST PGPORT
if test x"$PREPARE_PGSQL" = xyes; then
echo "preparing postgres..."
dropdb ${TEST_DB} # 2> /dev/null # error expected if doesn't exist, but not otherwise
createdb -Ttemplate_postgis -EUTF8 ${TEST_DB} || die "Could not create test database"
cat test.sql |
sed "s/:PUBLICUSER/${PUBLICUSER}/" |
sed "s/:PUBLICPASS/${PUBLICPASS}/" |
sed "s/:TESTUSER/${TESTUSER}/" |
sed "s/:TESTPASS/${TESTPASS}/" |
psql -v ON_ERROR_STOP=1 ${TEST_DB} || exit 1
echo "Populating windshaft_test database with reduced populated places data"
cat ./fixtures/populated_places_simple_reduced.sql |
sed "s/:PUBLICUSER/${PUBLICUSER}/" |
sed "s/:PUBLICPASS/${PUBLICPASS}/" |
sed "s/:TESTUSER/${TESTUSER}/" |
sed "s/:TESTPASS/${TESTPASS}/" |
psql -v ON_ERROR_STOP=1 ${TEST_DB} || exit 1
# TODO: send in a single run, togheter with test.sql
psql -c "CREATE EXTENSION plpythonu;" ${TEST_DB}
curl -L -s https://github.com/CartoDB/cartodb-postgresql/raw/cdb/scripts-available/CDB_QueryStatements.sql -o support/CDB_QueryStatements.sql
curl -L -s https://github.com/CartoDB/cartodb-postgresql/raw/cdb/scripts-available/CDB_QueryTables.sql -o support/CDB_QueryTables.sql
psql -f support/CDB_QueryStatements.sql ${TEST_DB}
psql -f support/CDB_QueryTables.sql ${TEST_DB}
fi
if test x"$PREPARE_REDIS" = xyes; then
REDIS_PORT=`node -e "console.log(require('${TESTENV}').redis_port || '6336')"`
echo "preparing redis..."
# delete previous publicuser
cat <<EOF | redis-cli -p ${REDIS_PORT} -n 5
HDEL rails:users:vizzuality database_host
HDEL rails:users:vizzuality database_publicuser
EOF
cat <<EOF | redis-cli -p ${REDIS_PORT} -n 5
HMSET rails:users:vizzuality \
id 1 \
database_name ${TEST_DB} \
database_host localhost \
map_key 1234
SADD rails:users:vizzuality:map_key 1235
EOF
# A user configured as with cartodb-2.5.0+
cat <<EOF | redis-cli -p ${REDIS_PORT} -n 5
HMSET rails:users:cartodb250user \
id ${TESTUSERID} \
database_name ${TEST_DB} \
database_host localhost \
database_password ${TESTPASS} \
map_key 1234
EOF
cat <<EOF | redis-cli -p ${REDIS_PORT} -n 3
HMSET rails:oauth_access_tokens:l0lPbtP68ao8NfStCiA3V3neqfM03JKhToxhUQTR \
consumer_key fZeNGv5iYayvItgDYHUbot1Ukb5rVyX6QAg8GaY2 \
consumer_secret IBLCvPEefxbIiGZhGlakYV4eM8AbVSwsHxwEYpzx \
access_token_token l0lPbtP68ao8NfStCiA3V3neqfM03JKhToxhUQTR \
access_token_secret 22zBIek567fMDEebzfnSdGe8peMFVFqAreOENaDK \
user_id 1 \
time sometime
EOF
fi
echo "ok, you can run test now"
|
<gh_stars>10-100
const path = require("path");
const fs = require("fs").promises;
const BUILD_PATH = path.join(__dirname, "..", "dist");
const ignoreSet = new Set(["index.html", "robots.txt", "service-worker.js"]);
(async () => {
const files = await fs.readdir(BUILD_PATH);
const filteredFiles = files.filter((file) => !ignoreSet.has(file));
const cacheList = ["/", ...filteredFiles];
const serviceWorkerPath = path.join(BUILD_PATH, "service-worker.js");
const serviceWorker = await fs.readFile(serviceWorkerPath, "utf-8");
return fs.writeFile(
serviceWorkerPath,
serviceWorker.replace("CACHE_LIST", cacheList)
);
})();
|
from math import acos, degrees
def angle_planar_3d(normal1, normal2):
length1 = math.sqrt((normal1[0]**2) + (normal1[1]**2) + (normal1[2]**2))
length2 = math.sqrt((normal2[0]**2) + (normal2[1]**2) + (normal2[2]**2))
dotProduct = normal1[0]*normal2[0] + normal1[1]*normal2[1] + normal1[2]*normal2[2]
cosAngle = dotProduct / (length1 * length2)
return degrees(acos(cosAngle))
normal1=[1,0,0]
normal2=[0,1,0]
print(angle_planar_3d(normal1, normal2))
|
#!/bin/sh
sudo pip install -r requirements.txt
python test_hello_world.py 127.0.0.1 127.0.0.1
|
<filename>qiita_db/test/test_portal.py
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from unittest import TestCase, main
import numpy.testing as npt
from qiita_core.util import qiita_test_checker
from qiita_core.qiita_settings import qiita_config
import qiita_db as qdb
@qiita_test_checker()
class TestPortal(TestCase):
def setUp(self):
self.portal = qiita_config.portal
self.study = qdb.study.Study(1)
self.analysis = qdb.analysis.Analysis(1)
self.qiita_portal = qdb.portal.Portal('QIITA')
self.emp_portal = qdb.portal.Portal('EMP')
def tearDown(self):
qiita_config.portal = self.portal
def test_list_portals(self):
obs = qdb.portal.Portal.list_portals()
exp = ['EMP']
self.assertEqual(obs, exp)
def test_add_portal(self):
obs = qdb.portal.Portal.create("NEWPORTAL", "SOMEDESC")
obs = self.conn_handler.execute_fetchall(
"SELECT * FROM qiita.portal_type")
exp = [[1, 'QIITA', 'QIITA portal. Access to all data stored '
'in database.'],
[2, 'EMP', 'EMP portal'],
[4, 'NEWPORTAL', 'SOMEDESC']]
self.assertItemsEqual(obs, exp)
obs = self.conn_handler.execute_fetchall(
"SELECT * FROM qiita.analysis_portal")
exp = [[1, 1], [2, 1], [3, 1], [4, 1], [5, 1], [6, 1], [7, 2], [8, 2],
[9, 2], [10, 2], [11, 4], [12, 4], [13, 4], [14, 4]]
self.assertItemsEqual(obs, exp)
with self.assertRaises(qdb.exceptions.QiitaDBDuplicateError):
qdb.portal.Portal.create("EMP", "DOESNTMATTERFORDESC")
qdb.portal.Portal.delete('NEWPORTAL')
def test_remove_portal(self):
qdb.portal.Portal.create("NEWPORTAL", "SOMEDESC")
# Select some samples on a default analysis
qiita_config.portal = "NEWPORTAL"
a = qdb.user.User("<EMAIL>").default_analysis
a.add_samples({1: ['1.SKB8.640193', '1.SKD5.640186']})
qdb.portal.Portal.delete("NEWPORTAL")
obs = self.conn_handler.execute_fetchall(
"SELECT * FROM qiita.portal_type")
exp = [[1, 'QIITA', 'QIITA portal. Access to all data stored '
'in database.'],
[2, 'EMP', 'EMP portal']]
self.assertItemsEqual(obs, exp)
obs = self.conn_handler.execute_fetchall(
"SELECT * FROM qiita.analysis_portal")
exp = [[1, 1], [2, 1], [3, 1], [4, 1], [5, 1], [6, 1], [7, 2], [8, 2],
[9, 2], [10, 2]]
self.assertItemsEqual(obs, exp)
with self.assertRaises(qdb.exceptions.QiitaDBLookupError):
qdb.portal.Portal.delete("NOEXISTPORTAL")
with self.assertRaises(qdb.exceptions.QiitaDBError):
qdb.portal.Portal.delete("QIITA")
qdb.portal.Portal.create("NEWPORTAL2", "SOMEDESC")
# Add study to this new portal and make sure error raised
info = {
"timeseries_type_id": 1,
"metadata_complete": True,
"mixs_compliant": True,
"number_samples_collected": 25,
"number_samples_promised": 28,
"study_alias": "FCM",
"study_description": "Microbiome of people who eat nothing but "
"fried chicken",
"study_abstract": "Exploring how a high fat diet changes the "
"gut microbiome",
"emp_person_id": qdb.study.StudyPerson(2),
"principal_investigator_id": qdb.study.StudyPerson(3),
"lab_person_id": qdb.study.StudyPerson(1)
}
qdb.portal.Portal.create("NEWPORTAL3", "SOMEDESC")
qiita_config.portal = "NEWPORTAL3"
qdb.study.Study.create(
qdb.user.User('<EMAIL>'), "Fried chicken microbiome", info)
qiita_config.portal = "QIITA"
with self.assertRaises(qdb.exceptions.QiitaDBError):
qdb.portal.Portal.delete("NEWPORTAL3")
def test_check_studies(self):
with self.assertRaises(qdb.exceptions.QiitaDBError):
self.qiita_portal._check_studies([2000000000000, 122222222222222])
def test_check_analyses(self):
with self.assertRaises(qdb.exceptions.QiitaDBError):
self.qiita_portal._check_analyses([2000000000000, 122222222222222])
with self.assertRaises(qdb.exceptions.QiitaDBError):
self.qiita_portal._check_analyses([8, 9])
def test_get_studies_by_portal(self):
obs = self.emp_portal.get_studies()
self.assertEqual(obs, set())
obs = self.qiita_portal.get_studies()
self.assertEqual(obs, {qdb.study.Study(1)})
def test_add_study_portals(self):
obs = qdb.portal.Portal.create("NEWPORTAL4", "SOMEDESC")
obs.add_studies([self.study.id])
self.assertItemsEqual(self.study._portals, ['NEWPORTAL4', 'QIITA'])
npt.assert_warns(qdb.exceptions.QiitaDBWarning, obs.add_studies,
[self.study.id])
obs.remove_studies([self.study.id])
qdb.portal.Portal.delete("NEWPORTAL4")
def test_remove_study_portals(self):
with self.assertRaises(ValueError):
self.qiita_portal.remove_studies([self.study.id])
self.emp_portal.add_studies([1])
# Set up the analysis in EMP portal
self.emp_portal.add_analyses([self.analysis.id])
obs = self.analysis._portals
self.assertItemsEqual(obs, ['QIITA', 'EMP'])
# Test study removal failure
with self.assertRaises(qdb.exceptions.QiitaDBError):
self.emp_portal.remove_studies([self.study.id])
obs = self.study._portals
self.assertItemsEqual(obs, ['QIITA', 'EMP'])
# Test study removal
self.emp_portal.remove_analyses([self.analysis.id])
self.emp_portal.remove_studies([self.study.id])
obs = self.study._portals
self.assertEqual(obs, ['QIITA'])
obs = npt.assert_warns(
qdb.exceptions.QiitaDBWarning, self.emp_portal.remove_studies,
[self.study.id])
def test_get_analyses_by_portal(self):
qiita_config.portal = 'EMP'
exp = {qdb.analysis.Analysis(7), qdb.analysis.Analysis(8),
qdb.analysis.Analysis(9), qdb.analysis.Analysis(10)}
obs = self.emp_portal.get_analyses()
self.assertEqual(obs, exp)
qiita_config.portal = 'QIITA'
exp = {qdb.analysis.Analysis(1), qdb.analysis.Analysis(2),
qdb.analysis.Analysis(3), qdb.analysis.Analysis(4),
qdb.analysis.Analysis(5), qdb.analysis.Analysis(6)}
obs = self.qiita_portal.get_analyses()
self.assertEqual(obs, exp)
def test_add_analysis_portals(self):
obs = self.analysis._portals
self.assertEqual(obs, ['QIITA'])
with self.assertRaises(qdb.exceptions.QiitaDBError):
self.emp_portal.add_analyses([self.analysis.id])
obs = self.analysis._portals
self.assertEqual(obs, ['QIITA'])
self.emp_portal.add_studies([1])
self.emp_portal.add_analyses([self.analysis.id])
obs = self.analysis._portals
self.assertEqual(obs, ['EMP', 'QIITA'])
npt.assert_warns(
qdb.exceptions.QiitaDBWarning, self.emp_portal.add_analyses,
[self.analysis.id])
self.emp_portal.remove_analyses([self.analysis.id])
self.emp_portal.remove_studies([1])
def test_remove_analysis_portals(self):
with self.assertRaises(ValueError):
self.qiita_portal.remove_analyses([self.analysis.id])
# set up the analysis in EMP portal
self.emp_portal.add_studies([1])
self.emp_portal.add_analyses([self.analysis.id])
obs = self.analysis._portals
self.assertItemsEqual(obs, ['QIITA', 'EMP'])
# Test removal
self.emp_portal.remove_analyses([self.analysis.id])
obs = self.analysis._portals
self.assertEqual(obs, ['QIITA'])
obs = npt.assert_warns(
qdb.exceptions.QiitaDBWarning, self.emp_portal.remove_analyses,
[self.analysis.id])
self.emp_portal.remove_studies([1])
if __name__ == '__main__':
main()
|
<reponame>depp/raycast
#include "defs.h"
#include "imath.h"
#include "world.h"
#include <stdlib.h>
// #include <stdio.h>
struct world *world_new(void)
{
struct world *w = xmalloc(sizeof(*w));
struct obj *o = &w->player;
o->x0 = 0;
o->y0 = 0;
o->x1 = 0;
o->y1 = 0;
o->angle = 0;
o->speed = 0;
o->strafe = 0;
return w;
}
void world_delete(struct world *w)
{
free(w);
}
void world_update(struct world *w)
{
int ax, ay, dx, dy;
struct obj *o = &w->player;
o->x0 = o->x1;
o->y0 = o->y1;
ax = icos(o->angle);
ay = isin(o->angle);
dx = ax * o->speed - ay * o->strafe;
dy = ay * o->speed + ax * o->strafe;
o->x1 += dx / (1 << 14);
o->y1 += dy / (1 << 14);
// printf("dx %d dy %d\n", dx / (1 << 14), dy / (1 << 14));
}
|
import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { environment } from 'src/environments/environment';
import { Device } from '../_models/device';
@Injectable({
providedIn: 'root'
})
export class DeviceService {
baseUrl=environment.apiUrl;
constructor(private httpClient: HttpClient) { }
getDevices() {
return this.httpClient.get(this.baseUrl+'devices');
}
getDevice(id: number) {
return this.httpClient.get(this.baseUrl+'devices/'+ id);
}
createDevice(device: Device) {
return this.httpClient.post(this.baseUrl+'devices', device);
}
updateDevice(id: number, device: Device) {
return this.httpClient.put(this.baseUrl+'devices/'+ id, device);
}
deleteDevice(id: number) {
return this.httpClient.delete(this.baseUrl+'devices/'+ id);
}
}
|
#!/bin/bash
# This simulates a button press for a fixed period of seconds for the power switch
# similar to the press of a power switch for an ATX PSU
# Setting GPIO18 to 1 is the same as pushing the power switch for the X735
BUTTON=18
echo "$BUTTON" > /sys/class/gpio/export;
echo "out" > /sys/class/gpio/gpio$BUTTON/direction
echo "1" > /sys/class/gpio/gpio$BUTTON/value
SLEEP=${1:-4}
re='^[0-9\.]+$'
if ! [[ $SLEEP =~ $re ]] ; then
echo "error: sleep time not a number" >&2; exit 1
fi
echo "X730 Shutting down..."
/bin/sleep $SLEEP
#restore GPIO 18
echo "0" > /sys/class/gpio/gpio$BUTTON/value
|
#include "icon.hpp"
/*!
* \brief Constructor
*
*/
Icon::Icon(){}
/*!
* \brief Constructor with name of icon
*
* Write something about the function such as input/output, what it does
* etc...
* \n input:
* \n output:
*/
Icon::Icon(GUI & gui, std::string name){
my_gui = &gui;
_name = name;
my_gui->refBuilder->get_widget(_name, icon);
icon->set_from_icon_name(icon_error,icon_size);
}
Icon::Icon(GUI & gui, std::string name, std::string icon_name){
my_gui = &gui;
_name = name;
my_gui->refBuilder->get_widget(_name, icon);
icon->set_from_icon_name(icon_name,icon_size);
}
Icon::Icon(GUI & gui, std::vector<Icon> icon_vector){
my_gui = &gui;
_name = "icons";
icons = icon_vector;
}
Icon::~Icon(){}
bool Icon::on_timeout(int timer_number){
//std::cerr << "icon timer" << std::endl;
for(int i = 0; i < icons.size(); ++i){
if(my_gui->status[i]){
icons[i].set_icon(icon_ok);
my_gui->status[i] = false;
}
else{
icons[i].set_icon(icon_error);
}
}
return true;
}
void Icon::set_icon(std::string icon_name){
//std::cerr << "updating icon for: " << _name << std::endl;
icon->set_from_icon_name(icon_name,icon_size);
}
|
<filename>tests/test_triangle.py
from star_tracker.modules.triangle import Triangle
from numpy import pi
import pytest
def test_print():
t = Triangle([0,0,0],3,5)
p = str(t)
assert p == '| 0,0,0 --> 3.00 5.00 |'
def test_validArea():
t = Triangle([0,0,0],3,5)
s = [t.validArea(1,2), t.validArea(1,1)]
assert s == [True, False]
def test_validMoment():
t = Triangle([0,0,0],3,5)
s = [t.validMoment(1,2), t.validMoment(5,1)]
assert s == [False, True]
def test_isContained():
t1 = Triangle([142, 151, 166],3,5)
t2 = Triangle([148, 151, 142],3,5)
t3 = Triangle([ 70, 63, 66],3,5)
assert True == t1.isContained(t2)
assert False == t1.isContained(t3)
assert True == t1.isContained(t1)
def test_isContainedinList():
l1 = [
Triangle([113, 104, 86],3,5),
Triangle([122, 134, 111],3,5),
Triangle([172, 154, 175],3,5),
Triangle([142, 151, 166],3,5)]
l2 = [
Triangle([322, 323, 336],3,5),
Triangle([148, 151, 142],3,5),
Triangle([70, 63, 66],3,5),
Triangle([397, 401, 411],3,5),
Triangle([456, 457, 467],3,5)
]
assert False == l1[0].isContainedinList(l2)
assert False == l1[1].isContainedinList(l2)
assert False == l1[2].isContainedinList(l2)
assert True == l1[3].isContainedinList(l2)
assert True == l1[3].isContainedinList(l1)
|
#!/bin/bash
# Copyright 2019 The Vitess Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Tears down the container engine cluster, removes rules/pools
GKE_ZONE=${GKE_ZONE:-'us-central1-b'}
GKE_CLUSTER_NAME=${GKE_CLUSTER_NAME:-'example'}
base_ssd_name="$GKE_CLUSTER_NAME-vt-ssd-"
gcloud container clusters delete $GKE_CLUSTER_NAME -z $GKE_ZONE -q
num_ssds=`gcloud compute disks list | awk -v name="$base_ssd_name" -v zone=$GKE_ZONE '$1~name && $2==zone' | wc -l`
for i in `seq 1 $num_ssds`; do
gcloud compute disks delete $base_ssd_name$i --zone $GKE_ZONE -q
done
|
<gh_stars>0
import zipfile
z = zipfile.ZipFile("zipfile.zip", "r")
for filename in z.namelist():
print 'File:', filename,
bytes = z.read(filename)
print 'has', len(bytes), 'bytes'
|
<gh_stars>1000+
from .execution import (
ScheduledExecutionFailed,
ScheduledExecutionResult,
ScheduledExecutionSkipped,
ScheduledExecutionSuccess,
)
from .scheduler import (
DagsterDaemonScheduler,
DagsterScheduleDoesNotExist,
DagsterScheduleReconciliationError,
DagsterSchedulerError,
Scheduler,
SchedulerDebugInfo,
)
|
package pantry
import (
"fmt"
"reflect"
"strings"
ref "github.com/yazver/golibs/reflect"
)
func addIfNotSpace(m map[string]string, k, v string) {
k = strings.TrimSpace(strings.ToLower(k))
v = strings.TrimSpace(v)
if k != "" && v != "" {
m[k] = v
}
}
func parseTagSettings(tag reflect.StructTag, opt *Options) map[string]string {
configTagName := opt.Tags.Config.Name
settings := map[string]string{}
if str, ok := tag.Lookup(configTagName); ok {
tags := strings.Split(str, ";")
for _, value := range tags {
v := strings.SplitN(value, ":", 2)
if len(v) >= 2 {
addIfNotSpace(settings, v[0], v[1])
}
}
}
if configTagName != "" {
configTagName = configTagName + "."
}
for _, key := range []string{opt.Tags.Flag.Name, opt.Tags.Env.Name} {
if str, ok := tag.Lookup(configTagName + key); ok {
addIfNotSpace(settings, key, str)
}
}
return settings
}
func parseFlagSettings(str string) (flag *Flag) {
flag = &Flag{Name: "-"}
if strings.HasPrefix(str, "-") {
return
}
splitNTo(str, "|", &(flag.Name), &(flag.Usage))
fieldsTo(strings.TrimSpace(flag.Name), &(flag.Name), &(flag.Short))
return
}
func processDefaultValues(v interface{}, opt *Options) error {
if !opt.Tags.Default.Use {
return nil
}
initValues := func(value reflect.Value, name string, tag reflect.StructTag, state *ref.State) error {
if defaultValue, ok := tag.Lookup(opt.Tags.Default.Name); ok {
if err := ref.AssignStringToValue(value, defaultValue); err != nil {
return fmt.Errorf("Unable to assign default value \"%s\" to field \"%s\": %s", defaultValue, name, err)
}
}
return nil
}
return traverseStruct(v, initValues)
}
type traverseState struct {
Flag string
FlagHierarchically bool
Env string
EnvHierarchically bool
}
func processTags(v interface{}, opt *Options) error {
flags := &(opt.Flags)
enviropment := &(opt.Enviropment)
if opt.Tags.Config.Use && (flags.Using != FlagsDontUse || opt.Enviropment.Use) {
initFlags := func(value reflect.Value, name string, tag reflect.StructTag, s *ref.State) error {
state, _ := s.Value.(traverseState)
settings := parseTagSettings(tag, opt)
if flagSettings, ok := settings[opt.Tags.Flag.Name]; opt.Tags.Flag.Use && state.Flag != "-" && ok {
flag := parseFlagSettings(flagSettings)
if flag.Name != "" && flag.Name != "-" {
if state.FlagHierarchically {
if state.Flag != "" {
flag.Name = state.Flag + "-" + flag.Name
}
state.Flag = flag.Name
}
if opt.Tags.Description.Use {
if description, ok := tag.Lookup(opt.Tags.Description.Name); ok {
flag.Usage = description
}
}
if opt.Tags.Default.Use {
if defValue, ok := tag.Lookup(opt.Tags.Default.Name); ok {
flag.DefValue = defValue
}
}
flag.Value = value
if err := flags.Add(flag); err != nil {
return err
}
} else {
state.Flag = "-"
state.FlagHierarchically = false
}
}
if env, ok := settings[opt.Tags.Env.Name]; opt.Tags.Env.Use && state.Env != "-" && ok {
if env != "-" {
if state.EnvHierarchically {
if state.Env != "" {
env = state.Flag + "_" + env
}
state.Env = env
}
if err := enviropment.Get(value, env); err != nil {
return err
}
} else {
state.Env = "-"
state.EnvHierarchically = false
}
}
return nil
}
if flags.Using != FlagsDontUse || enviropment.Use {
if err := traverseStruct(v, initFlags); err != nil {
return err
}
}
return flags.Process()
// initValues := func(value reflect.Value, name string, tag reflect.StructTag, state *ref.State) error {
// settings := parseTagSettings(tag, opt)
// if envVarName, ok := settings["env"]; ok {
// if err := env.Get(value, envVarName); err != nil {
// return err
// }
// }
// if flagSettings, ok := settings["flag"]; ok {
// flag := parseFlagSettings(flagSettings)
// if err := flags.Get(value, flag.Name); err != nil {
// return err
// }
// }
// return nil
// }
// return traverseStruct(v, initValues)
//return nil
}
return nil
}
|
<reponame>Gr1f0n6x/TarantoolORM<filename>src/main/java/org/tarantool/orm/annotations/IndexedFieldParams.java
package org.tarantool.orm.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.SOURCE)
public @interface IndexedFieldParams {
String indexName();
int part() default 1;
boolean isNullable() default false;
}
|
package owlmoney.logic.command.transaction;
import static owlmoney.commons.log.LogsCenter.getLogger;
import java.util.logging.Logger;
import owlmoney.logic.command.Command;
import owlmoney.model.bank.exception.BankException;
import owlmoney.model.profile.Profile;
import owlmoney.model.transaction.exception.TransactionException;
import owlmoney.ui.Ui;
/**
* Executes DeleteDepositCommand to delete a deposit transaction.
*/
public class DeleteDepositCommand extends Command {
private final int expNumber;
private final String from;
private static final Logger logger = getLogger(EditDepositCommand.class);
/**
* Creates an instance of DeleteDepositCommand.
*
* @param bankName Bank account name.
* @param index Transaction number.
*/
public DeleteDepositCommand(String bankName, int index) {
this.expNumber = index;
this.from = bankName;
}
/**
* Executes the function to delete a deposit transaction.
*
* @param profile Profile of the user.
* @param ui Ui of OwlMoney.
* @return false so OwlMoney will not terminate yet.
* @throws BankException If bank account does not exist.
* @throws TransactionException If transaction is not a deposit.
*/
public boolean execute(Profile profile, Ui ui) throws BankException, TransactionException {
profile.profileDeleteDeposit(this.expNumber, this.from, ui, false);
logger.info("Successful execution of DeleteDepositCommand");
return this.isExit;
}
}
|
weak var saveState: PVSaveState? {
didSet {
if let saveState = saveState {
titleLabel.text = saveState.title
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "MMM d, h:mm a"
timeStampLabel.text = dateFormatter.string(from: saveState.timestamp)
coreLabel.text = saveState.coreDetails
}
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.bug = void 0;
var bug = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill-rule": "evenodd",
"d": "M11 10h3V9h-3V8l3.17-1.03-.34-.94L11 7V6c0-.55-.45-1-1-1V4c0-.48-.36-.88-.83-.97L10.2 2H12V1H9.8l-2 2h-.59L5.2 1H3v1h1.8l1.03 1.03C5.36 3.12 5 3.51 5 4v1c-.55 0-1 .45-1 1v1l-2.83-.97-.34.94L4 8v1H1v1h3v1L.83 12.03l.34.94L4 12v1c0 .55.45 1 1 1h1l1-1V6h1v7l1 1h1c.55 0 1-.45 1-1v-1l2.83.97.34-.94L11 11v-1zM9 5H6V4h3v1z"
},
"children": []
}],
"attribs": {}
};
exports.bug = bug;
|
<reponame>Zueuk/JitCat<filename>src/jitcat/ExpressionAny.cpp<gh_stars>0
/*
This file is part of the JitCat library.
Copyright (C) <NAME> 2018
Distributed under the MIT License (license terms are at http://opensource.org/licenses/MIT).
*/
#include "jitcat/ExpressionAny.h"
#include "jitcat/CatASTNodes.h"
#include "jitcat/Configuration.h"
#include "jitcat/Document.h"
#include "jitcat/ExpressionErrorManager.h"
#include "jitcat/JitCat.h"
#include "jitcat/SLRParseResult.h"
#include "jitcat/Tools.h"
#include "jitcat/TypeInfo.h"
using namespace jitcat;
using namespace jitcat::AST;
ExpressionAny::ExpressionAny():
nativeFunctionAddress(0)
{
}
ExpressionAny::ExpressionAny(const char* expression):
ExpressionBase(expression),
nativeFunctionAddress(0)
{
}
ExpressionAny::ExpressionAny(const std::string& expression):
ExpressionBase(expression),
nativeFunctionAddress(0)
{
}
ExpressionAny::ExpressionAny(CatRuntimeContext* compileContext, const std::string& expression):
ExpressionBase(compileContext, expression),
nativeFunctionAddress(0)
{
compile(compileContext);
}
const std::any ExpressionAny::getValue(CatRuntimeContext* runtimeContext)
{
if (isConstant)
{
return cachedValue;
}
else if (parseResult->astRootNode != nullptr)
{
if (runtimeContext == nullptr)
{
runtimeContext = &CatRuntimeContext::defaultContext;
}
if constexpr (Configuration::enableLLVM)
{
if (valueType.isIntType()) return std::any(reinterpret_cast<int(*)(CatRuntimeContext*)>(nativeFunctionAddress)(runtimeContext));
else if (valueType.isVoidType()) {reinterpret_cast<void(*)(CatRuntimeContext*)>(nativeFunctionAddress)(runtimeContext); return std::any();}
else if (valueType.isFloatType()) return std::any(reinterpret_cast<float(*)(CatRuntimeContext*)>(nativeFunctionAddress)(runtimeContext));
else if (valueType.isDoubleType()) return std::any(reinterpret_cast<double(*)(CatRuntimeContext*)>(nativeFunctionAddress)(runtimeContext));
else if (valueType.isBoolType()) return std::any(reinterpret_cast<bool(*)(CatRuntimeContext*)>(nativeFunctionAddress)(runtimeContext));
else if (valueType.isPointerToReflectableObjectType()) return valueType.getPointeeType()->getObjectType()->getTypeCaster()->castFromRawPointer(reinterpret_cast<uintptr_t(*)(CatRuntimeContext*)>(nativeFunctionAddress)(runtimeContext));
else
{
return std::any();
}
}
else
{
std::any result = parseResult->getNode<CatTypedExpression>()->execute(runtimeContext);
runtimeContext->clearTemporaries();
return result;
}
}
else
{
return std::any();
}
}
const std::any jitcat::ExpressionAny::getInterpretedValue(CatRuntimeContext* runtimeContext)
{
if (isConstant)
{
return cachedValue;
}
else if (parseResult->astRootNode != nullptr)
{
std::any result = parseResult->getNode<CatTypedExpression>()->execute(runtimeContext);
runtimeContext->clearTemporaries();
return result;
}
else
{
return std::any();
}
}
void ExpressionAny::compile(CatRuntimeContext* context)
{
if (context == nullptr)
{
context = &CatRuntimeContext::defaultContext;
context->getErrorManager()->clear();
}
if (parse(context, context->getErrorManager(), this, CatGenericType()) && isConstant)
{
cachedValue = parseResult->getNode<CatTypedExpression>()->execute(context);
}
}
void ExpressionAny::handleCompiledFunction(uintptr_t functionAddress)
{
nativeFunctionAddress = functionAddress;
}
|
<reponame>reportportal/plugin-bts-azure
/*
* WorkItemTracking
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: 6.1-preview
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.epam.reportportal.extension.azure.rest.client.api;
import com.epam.reportportal.extension.azure.rest.client.ApiCallback;
import com.epam.reportportal.extension.azure.rest.client.ApiClient;
import com.epam.reportportal.extension.azure.rest.client.ApiException;
import com.epam.reportportal.extension.azure.rest.client.ApiResponse;
import com.epam.reportportal.extension.azure.rest.client.Configuration;
import com.epam.reportportal.extension.azure.rest.client.Pair;
import com.epam.reportportal.extension.azure.rest.client.ProgressRequestBody;
import com.epam.reportportal.extension.azure.rest.client.ProgressResponseBody;
import com.epam.reportportal.extension.azure.rest.client.model.workitem.JsonPatchOperation;
import com.epam.reportportal.extension.azure.rest.client.model.workitem.WorkItem;
import com.google.gson.reflect.TypeToken;
import com.squareup.okhttp.Call;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.threeten.bp.OffsetDateTime;
public class WorkItemsApi {
private ApiClient apiClient;
public WorkItemsApi() {
this(Configuration.getDefaultApiClient());
}
public WorkItemsApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/**
* Build call for workItemsGetWorkItem
* @param organization The name of the Azure DevOps organization. (required)
* @param id The work item id (required)
* @param project Project ID or project name (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param fields Comma-separated list of requested fields (optional)
* @param asOf AsOf UTC date time string (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @param progressListener Progress listener
* @param progressRequestListener Progress request listener
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*/
public Call workItemsGetWorkItemCall(String organization, Integer id, String project, String apiVersion, String fields, OffsetDateTime asOf, String expand, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/{organization}/{project}/_apis/wit/workitems/{id}"
.replaceAll("\\{" + "organization" + "\\}", apiClient.escapeString(organization.toString()))
.replaceAll("\\{" + "id" + "\\}", apiClient.escapeString(id.toString()))
.replaceAll("\\{" + "project" + "\\}", apiClient.escapeString(project.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
if (fields != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("fields", fields));
}
if (asOf != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("asOf", asOf));
}
if (expand != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("$expand", expand));
}
if (apiVersion != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("api-version", apiVersion));
}
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) {
localVarHeaderParams.put("Accept", localVarAccept);
}
final String[] localVarContentTypes = {
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "oauth2", "accessToken" };
return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private Call workItemsGetWorkItemValidateBeforeCall(String organization, Integer id, String project, String apiVersion, String fields, OffsetDateTime asOf, String expand, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'organization' is set
if (organization == null) {
throw new ApiException("Missing the required parameter 'organization' when calling workItemsGetWorkItem(Async)");
}
// verify the required parameter 'id' is set
if (id == null) {
throw new ApiException("Missing the required parameter 'id' when calling workItemsGetWorkItem(Async)");
}
// verify the required parameter 'project' is set
if (project == null) {
throw new ApiException("Missing the required parameter 'project' when calling workItemsGetWorkItem(Async)");
}
// verify the required parameter 'apiVersion' is set
if (apiVersion == null) {
throw new ApiException("Missing the required parameter 'apiVersion' when calling workItemsGetWorkItem(Async)");
}
Call call = workItemsGetWorkItemCall(organization, id, project, apiVersion, fields, asOf, expand, progressListener, progressRequestListener);
return call;
}
/**
*
* Returns a single work item.
* @param organization The name of the Azure DevOps organization. (required)
* @param id The work item id (required)
* @param project Project ID or project name (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param fields Comma-separated list of requested fields (optional)
* @param asOf AsOf UTC date time string (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @return WorkItem
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public WorkItem workItemsGetWorkItem(String organization, Integer id, String project, String apiVersion, String fields, OffsetDateTime asOf, String expand) throws ApiException {
ApiResponse<WorkItem> resp = workItemsGetWorkItemWithHttpInfo(organization, id, project, apiVersion, fields, asOf, expand);
return resp.getData();
}
/**
*
* Returns a single work item.
* @param organization The name of the Azure DevOps organization. (required)
* @param id The work item id (required)
* @param project Project ID or project name (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param fields Comma-separated list of requested fields (optional)
* @param asOf AsOf UTC date time string (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @return ApiResponse<WorkItem>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<WorkItem> workItemsGetWorkItemWithHttpInfo(String organization, Integer id, String project, String apiVersion, String fields, OffsetDateTime asOf, String expand) throws ApiException {
Call call = workItemsGetWorkItemValidateBeforeCall(organization, id, project, apiVersion, fields, asOf, expand, null, null);
Type localVarReturnType = new TypeToken<WorkItem>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* (asynchronously)
* Returns a single work item.
* @param organization The name of the Azure DevOps organization. (required)
* @param id The work item id (required)
* @param project Project ID or project name (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param fields Comma-separated list of requested fields (optional)
* @param asOf AsOf UTC date time string (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public Call workItemsGetWorkItemAsync(String organization, Integer id, String project, String apiVersion, String fields, OffsetDateTime asOf, String expand, final ApiCallback<WorkItem> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
Call call = workItemsGetWorkItemValidateBeforeCall(organization, id, project, apiVersion, fields, asOf, expand, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<WorkItem>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/**
* Build call for workItemsCreate
* @param organization The name of the Azure DevOps organization. (required)
* @param body The JSON Patch document representing the work item (required)
* @param project Project ID or project name (required)
* @param type The work item type of the work item to create (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param validateOnly Indicate if you only want to validate the changes without saving the work item (optional)
* @param bypassRules Do not enforce the work item type rules on this update (optional)
* @param suppressNotifications Do not fire any notifications for this change (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @param progressListener Progress listener
* @param progressRequestListener Progress request listener
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*/
public Call workItemsCreateCall(String organization, List<JsonPatchOperation> body, String project, String type, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = body;
// create path and map variables
String localVarPath = "/{organization}/{project}/_apis/wit/workitems/${type}"
.replaceAll("\\{" + "organization" + "\\}", apiClient.escapeString(organization.toString()))
.replaceAll("\\{" + "project" + "\\}", apiClient.escapeString(project.toString()))
.replaceAll("\\{" + "type" + "\\}", apiClient.escapeString(type.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
if (validateOnly != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("validateOnly", validateOnly));
}
if (bypassRules != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("bypassRules", bypassRules));
}
if (suppressNotifications != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("suppressNotifications", suppressNotifications));
}
if (expand != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("$expand", expand));
}
if (apiVersion != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("api-version", apiVersion));
}
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) {
localVarHeaderParams.put("Accept", localVarAccept);
}
final String[] localVarContentTypes = {
"application/json-patch+json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "oauth2", "accessToken" };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private Call workItemsCreateValidateBeforeCall(String organization, List<JsonPatchOperation> body, String project, String type, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'organization' is set
if (organization == null) {
throw new ApiException("Missing the required parameter 'organization' when calling workItemsCreate(Async)");
}
// verify the required parameter 'body' is set
if (body == null) {
throw new ApiException("Missing the required parameter 'body' when calling workItemsCreate(Async)");
}
// verify the required parameter 'project' is set
if (project == null) {
throw new ApiException("Missing the required parameter 'project' when calling workItemsCreate(Async)");
}
// verify the required parameter 'type' is set
if (type == null) {
throw new ApiException("Missing the required parameter 'type' when calling workItemsCreate(Async)");
}
// verify the required parameter 'apiVersion' is set
if (apiVersion == null) {
throw new ApiException("Missing the required parameter 'apiVersion' when calling workItemsCreate(Async)");
}
Call call = workItemsCreateCall(organization, body, project, type, apiVersion, validateOnly, bypassRules, suppressNotifications, expand, progressListener, progressRequestListener);
return call;
}
/**
*
* Creates a single work item.
* @param organization The name of the Azure DevOps organization. (required)
* @param body The JSON Patch document representing the work item (required)
* @param project Project ID or project name (required)
* @param type The work item type of the work item to create (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param validateOnly Indicate if you only want to validate the changes without saving the work item (optional)
* @param bypassRules Do not enforce the work item type rules on this update (optional)
* @param suppressNotifications Do not fire any notifications for this change (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @return WorkItem
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public WorkItem workItemsCreate(String organization, List<JsonPatchOperation> body, String project, String type, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand) throws ApiException {
ApiResponse<WorkItem> resp = workItemsCreateWithHttpInfo(organization, body, project, type, apiVersion, validateOnly, bypassRules, suppressNotifications, expand);
return resp.getData();
}
/**
*
* Creates a single work item.
* @param organization The name of the Azure DevOps organization. (required)
* @param body The JSON Patch document representing the work item (required)
* @param project Project ID or project name (required)
* @param type The work item type of the work item to create (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param validateOnly Indicate if you only want to validate the changes without saving the work item (optional)
* @param bypassRules Do not enforce the work item type rules on this update (optional)
* @param suppressNotifications Do not fire any notifications for this change (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @return ApiResponse<WorkItem>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<WorkItem> workItemsCreateWithHttpInfo(String organization, List<JsonPatchOperation> body, String project, String type, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand) throws ApiException {
Call call = workItemsCreateValidateBeforeCall(organization, body, project, type, apiVersion, validateOnly, bypassRules, suppressNotifications, expand, null, null);
Type localVarReturnType = new TypeToken<WorkItem>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* (asynchronously)
* Creates a single work item.
* @param organization The name of the Azure DevOps organization. (required)
* @param body The JSON Patch document representing the work item (required)
* @param project Project ID or project name (required)
* @param type The work item type of the work item to create (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param validateOnly Indicate if you only want to validate the changes without saving the work item (optional)
* @param bypassRules Do not enforce the work item type rules on this update (optional)
* @param suppressNotifications Do not fire any notifications for this change (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public Call workItemsCreateAsync(String organization, List<JsonPatchOperation> body, String project, String type, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand, final ApiCallback<WorkItem> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
Call call = workItemsCreateValidateBeforeCall(organization, body, project, type, apiVersion, validateOnly, bypassRules, suppressNotifications, expand, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<WorkItem>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/**
* Build call for workItemsUpdate
* @param organization The name of the Azure DevOps organization. (required)
* @param body The JSON Patch document representing the update (required)
* @param id The id of the work item to update (required)
* @param project Project ID or project name (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param validateOnly Indicate if you only want to validate the changes without saving the work item (optional)
* @param bypassRules Do not enforce the work item type rules on this update (optional)
* @param suppressNotifications Do not fire any notifications for this change (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @param progressListener Progress listener
* @param progressRequestListener Progress request listener
* @return Call to execute
* @throws ApiException If fail to serialize the request body object
*/
public com.squareup.okhttp.Call workItemsUpdateCall(String organization, List<JsonPatchOperation> body, Integer id, String project, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = body;
// create path and map variables
String localVarPath = "/{organization}/{project}/_apis/wit/workitems/{id}"
.replaceAll("\\{" + "organization" + "\\}", apiClient.escapeString(organization.toString()))
.replaceAll("\\{" + "id" + "\\}", apiClient.escapeString(id.toString()))
.replaceAll("\\{" + "project" + "\\}", apiClient.escapeString(project.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>();
if (validateOnly != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("validateOnly", validateOnly));
}
if (bypassRules != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("bypassRules", bypassRules));
}
if (suppressNotifications != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("suppressNotifications", suppressNotifications));
}
if (expand != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("$expand", expand));
}
if (apiVersion != null) {
localVarQueryParams.addAll(apiClient.parameterToPair("api-version", apiVersion));
}
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) {
localVarHeaderParams.put("Accept", localVarAccept);
}
final String[] localVarContentTypes = {
"application/json-patch+json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "oauth2", "accessToken" };
return apiClient.buildCall(localVarPath, "PATCH", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call workItemsUpdateValidateBeforeCall(String organization, List<JsonPatchOperation> body, Integer id, String project, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'organization' is set
if (organization == null) {
throw new ApiException("Missing the required parameter 'organization' when calling workItemsUpdate(Async)");
}
// verify the required parameter 'body' is set
if (body == null) {
throw new ApiException("Missing the required parameter 'body' when calling workItemsUpdate(Async)");
}
// verify the required parameter 'id' is set
if (id == null) {
throw new ApiException("Missing the required parameter 'id' when calling workItemsUpdate(Async)");
}
// verify the required parameter 'project' is set
if (project == null) {
throw new ApiException("Missing the required parameter 'project' when calling workItemsUpdate(Async)");
}
// verify the required parameter 'apiVersion' is set
if (apiVersion == null) {
throw new ApiException("Missing the required parameter 'apiVersion' when calling workItemsUpdate(Async)");
}
com.squareup.okhttp.Call call = workItemsUpdateCall(organization, body, id, project, apiVersion, validateOnly, bypassRules, suppressNotifications, expand, progressListener, progressRequestListener);
return call;
}
/**
*
* Updates a single work item.
* @param organization The name of the Azure DevOps organization. (required)
* @param body The JSON Patch document representing the update (required)
* @param id The id of the work item to update (required)
* @param project Project ID or project name (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param validateOnly Indicate if you only want to validate the changes without saving the work item (optional)
* @param bypassRules Do not enforce the work item type rules on this update (optional)
* @param suppressNotifications Do not fire any notifications for this change (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @return WorkItem
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public WorkItem workItemsUpdate(String organization, List<JsonPatchOperation> body, Integer id, String project, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand) throws ApiException {
ApiResponse<WorkItem> resp = workItemsUpdateWithHttpInfo(organization, body, id, project, apiVersion, validateOnly, bypassRules, suppressNotifications, expand);
return resp.getData();
}
/**
*
* Updates a single work item.
* @param organization The name of the Azure DevOps organization. (required)
* @param body The JSON Patch document representing the update (required)
* @param id The id of the work item to update (required)
* @param project Project ID or project name (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param validateOnly Indicate if you only want to validate the changes without saving the work item (optional)
* @param bypassRules Do not enforce the work item type rules on this update (optional)
* @param suppressNotifications Do not fire any notifications for this change (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @return ApiResponse<WorkItem>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<WorkItem> workItemsUpdateWithHttpInfo(String organization, List<JsonPatchOperation> body, Integer id, String project, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand) throws ApiException {
com.squareup.okhttp.Call call = workItemsUpdateValidateBeforeCall(organization, body, id, project, apiVersion, validateOnly, bypassRules, suppressNotifications, expand, null, null);
Type localVarReturnType = new TypeToken<WorkItem>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* (asynchronously)
* Updates a single work item.
* @param organization The name of the Azure DevOps organization. (required)
* @param body The JSON Patch document representing the update (required)
* @param id The id of the work item to update (required)
* @param project Project ID or project name (required)
* @param apiVersion Version of the API to use. This should be set to '6.1-preview.3' to use this version of the api. (required)
* @param validateOnly Indicate if you only want to validate the changes without saving the work item (optional)
* @param bypassRules Do not enforce the work item type rules on this update (optional)
* @param suppressNotifications Do not fire any notifications for this change (optional)
* @param expand The expand parameters for work item attributes. Possible options are { None, Relations, Fields, Links, All }. (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call workItemsUpdateAsync(String organization, List<JsonPatchOperation> body, Integer id, String project, String apiVersion, Boolean validateOnly, Boolean bypassRules, Boolean suppressNotifications, String expand, final ApiCallback<WorkItem> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = workItemsUpdateValidateBeforeCall(organization, body, id, project, apiVersion, validateOnly, bypassRules, suppressNotifications, expand, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<WorkItem>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
}
|
<filename>app/src/main/java/com/ergdyne/tasktimer/EditTaskActivity.java
package com.ergdyne.tasktimer;
import android.app.AlertDialog;
import android.app.TimePickerDialog;
import android.content.DialogInterface;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.FrameLayout;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.SimpleCursorAdapter;
import android.widget.TextView;
import android.widget.TimePicker;
import com.ergdyne.lib.AppConstants;
import com.ergdyne.lib.DBMap;
import com.ergdyne.lib.ErgAlert;
import com.ergdyne.lib.ErgFormats;
/**
* Created by j on 4/3/17.
*/
//Similar to Edit Tag activity
public class EditTaskActivity extends AppCompatActivity {
/**********************/
//Variable definitions
/**********************/
public static final String TASK_ID ="taskID";
private static final String TAG = "EditTaskActivity";
private long taskID;
private long newID;
private long relationID;
private TextView editedTaskName;
private AutoCompleteTextView renameTask; //will be reusing a bit of code... Maybe a list provider is a better way?
private CheckBox deleteTask;
private AutoCompleteTextView tag;
private ListView taskTagList;
private TextView reminderDisplay;
private RelativeLayout reminder;
private Button addTag;
private DBHelper dbHelper;
private String currentTaskName;
private long reminderLength;
private long newReminderLength;
/**********************/
//Activity lifecycle
/**********************/
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//Cancel and confirm toolbar could be a separate class implemented here.
{
try{
final LayoutInflater inflater = (LayoutInflater) getSupportActionBar().getThemedContext()
.getSystemService(LAYOUT_INFLATER_SERVICE);
final View customActionBarView = inflater.inflate(
R.layout.actionbar_edit, null);
customActionBarView.findViewById(R.id.actionbar_cancel).setOnClickListener(
new View.OnClickListener(){
@Override
public void onClick(View v) {
finish();
}});
customActionBarView.findViewById(R.id.actionbar_confirm).setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View v) {
inputProcessing(dbHelper,
taskID,
currentTaskName,
renameTask.getText().toString(),
deleteTask.isChecked(),reminderLength,newReminderLength);
}
}
);
final ActionBar actionBar = getSupportActionBar();
actionBar.setDisplayOptions(
ActionBar.DISPLAY_SHOW_CUSTOM,
ActionBar.DISPLAY_SHOW_CUSTOM | ActionBar.DISPLAY_SHOW_HOME
| ActionBar.DISPLAY_HOME_AS_UP);
actionBar.setCustomView(customActionBarView,
new ActionBar.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT));
} catch(NullPointerException e){
Log.e(TAG,"getThemedContext returned null");
}
}
setContentView(R.layout.activity_two_pane);
taskID = getIntent().getLongExtra(TASK_ID, DBMap.TaskTable.defaultID);
//Bind the views.
{
FrameLayout editTaskFrame = (FrameLayout) findViewById(R.id.fragment_A);
getLayoutInflater().inflate(R.layout.fragment_edit_task_or_tag, editTaskFrame);
editedTaskName = (TextView) findViewById(R.id.textView_editedTaskName);
renameTask = (AutoCompleteTextView) findViewById(R.id.autoCompleteTextView_renameTask);
deleteTask = (CheckBox) findViewById(R.id.checkBox_deleteTask);
reminder = (RelativeLayout) findViewById(R.id.relativeLayout_reminder);
reminderDisplay = (TextView) findViewById(R.id.textView_reminder);
//add tags section
FrameLayout addTagsFrame = (FrameLayout) findViewById(R.id.fragment_B);
getLayoutInflater().inflate(R.layout.fragment_tag_a_task, addTagsFrame);
tag = (AutoCompleteTextView) findViewById(R.id.autoCompleteTextView_tag);
addTag = (Button) findViewById(R.id.button_addTag);
taskTagList = (ListView) findViewById(R.id.listView_tags);
dbHelper = new DBHelper(EditTaskActivity.this);
}
//Push values to views.
{
currentTaskName = dbHelper.getTaskName(taskID);
editedTaskName.setText(currentTaskName);
ArrayAdapter<String> taskAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, dbHelper.getTaskList());
renameTask.setThreshold(AppConstants.SUGGESTION_THRESHOLD);
renameTask.setAdapter(taskAdapter);
//Get reminder value.
reminderLength = dbHelper.getReminderLength(taskID);
newReminderLength = reminderLength;
reminderDisplay.setText(ErgFormats.durationHM(reminderLength));
ArrayAdapter<String> tagSuggestions = new ArrayAdapter<String>(this,android.R.layout.simple_list_item_1,dbHelper.getTagList());
tag.setThreshold(AppConstants.SUGGESTION_THRESHOLD);
tag.setAdapter(tagSuggestions);
addTag.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
addTagToTask(dbHelper, tag.getText().toString(), taskID);
tag.setText(null);
}
});
final int setHour = ErgFormats.hours(reminderLength);
final int setMinute= ErgFormats.minutes(reminderLength);
reminder.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
TimePickerDialog picker;
picker = new TimePickerDialog(EditTaskActivity.this,
new TimePickerDialog.OnTimeSetListener(){
@Override
public void onTimeSet(TimePicker view, int hourOfDay, int minute) {
newReminderLength = (hourOfDay*60 + minute)*60;
reminderDisplay.setText(ErgFormats.durationHM(newReminderLength));
}
},setHour,setMinute,true);
picker.setTitle(getResources().getString(R.string.hint_set_alarm_title));
picker.show();
}
});
}
loadTaskTags();
if (!TutorialToast.make(this, TutorialToast.TUT_ADD_TAG,
getResources().getString(R.string.tut_add_tag), TutorialToast.ADD_TAG_LENGTH)) {
if(!TutorialToast.make(this, TutorialToast.TUT_REMINDER,
getResources().getString(R.string.tut_reminder),TutorialToast.REMINDER_LENGTH)){
//add new layer here if needed
}
}
}
private void loadTaskTags(){
//the list of tags attached to the task
Cursor cursor = dbHelper.getTaskTagNames(taskID);
SimpleCursorAdapter taskTagsAdapter = new SimpleCursorAdapter(EditTaskActivity.this,
R.layout.row_item,
cursor,
new String[]{DBMap.TagTable.name},
new int[]{R.id.textView_row_item_item},0
);
taskTagList.setLongClickable(true);
taskTagList.setAdapter(taskTagsAdapter);
taskTagList.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {
@Override
public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) {
relationID = id;
AlertDialog.Builder b = new AlertDialog.Builder(EditTaskActivity.this);
b.setPositiveButton(getResources().getString(R.string.but_delete),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dbHelper.deleteByID(DBMap.TaskTagTable.table,relationID);
loadTaskTags();
}
});
b.setNegativeButton(getResources().getString(R.string.but_cancel),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
//not nothing
}
});
b.show();
return true;
}
});
}
/**********************/
//functions used in Lifecycle
/**********************/
private void addTagToTask(DBHelper db, String tagName, long idTask){
TutorialToast.remove(this, TutorialToast.TUT_ADD_TAG);
db.findOrInsertTaskTag(tagName,idTask);
//Refresh the display.
loadTaskTags();
}
/**********************/
//Input processing
/**********************/
//Similar to "functional" formatting from other input processing.
private boolean inputProcessing(DBHelper db, long id, String currentName, String newName,
boolean delete, long oldReminder, long newReminder){
if(delete && id == DBMap.TaskTable.defaultID){
ErgAlert.alert(EditTaskActivity.this, "Error cannot delete default Task");
return false;
}else{
if(!delete){
//Since confirm was press, we are going to go ahead and do a change to reminder if needed.
if(oldReminder != newReminder){
db.deleteTaskReminder(id);
db.insertTaskReminder(id,newReminder);
}
//Did the name change?
if(newName.length() == 0 || newName.equals(currentName)){
finish();
return true;
}else{
//Process name change.
long existingTaskID = db.getTaskID(newName);
if(existingTaskID == 0){
//Task name does not exist, so just rename the task and exit.
db.updateTaskName(id,newName);
finish();
return true;
}else{
//Task name exists, so it is a little more complicated, ask about merging first.
newID = existingTaskID;
AlertDialog.Builder b = new AlertDialog.Builder(EditTaskActivity.this);
b.setMessage(getResources().getString(R.string.warn_task_name_merge));
b.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
mergeTasks(dbHelper,taskID,newID);
finish();
}
});
b.setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
b.setIcon(android.R.drawable.ic_dialog_alert);
b.show();
return true;
}
}
}else{
//Confirm deleting
newID = DBMap.TaskTable.defaultID;
AlertDialog.Builder b = new AlertDialog.Builder(EditTaskActivity.this);
b.setMessage(getResources().getString(R.string.warn_task_name_delete));
b.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
//in events
mergeTasks(dbHelper,taskID,newID);
finish();
}
});
b.setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
b.setIcon(android.R.drawable.ic_dialog_alert);
b.show();
return true;
}
}
}
private void mergeTasks(DBHelper db, long oldID, long updateID){
db.updateTaskIDs(oldID,updateID);
//Update the current task if needed.
if(oldID == db.getCurrentTaskID()){
db.updateSettings(db.getCurrentStart(),updateID);
}
//Delete the task unless it is default... before the default check was added, this caused an unrecoverable error.
if(oldID != DBMap.TaskTable.defaultID){
db.deleteByID(DBMap.TaskTable.table,oldID);
}
db.deleteTaskRelations(oldID);
db.deleteTaskReminder(oldID);
}
}
|
timing_test(function() {
at(0, function() {
assert_styles(
'.anim',
[{'left':'100px'},
{'left':'100px'},
{'left':'0px'},
{'left':'0px'}]);
}, "Autogenerated");
at(0.5, function() {
assert_styles(
'.anim',
[{'left':'150px'},
{'left':'125px'},
{'left':'0px'},
{'left':'0px'}]);
}, "Autogenerated");
at(1.0, function() {
assert_styles(
'.anim',
[{'left':'200px'},
{'left':'150px'},
{'left':'0px'},
{'left':'0px'}]);
}, "Autogenerated");
at(1.5, function() {
assert_styles(
'.anim',
[{'left':'200px'},
{'left':'175px'},
{'left':'0px'},
{'left':'0px'}]);
}, "Autogenerated");
at(2.0, function() {
assert_styles(
'.anim',
[{'left':'200px'},
{'left':'200px'},
{'left':'100px'},
{'left':'100px'}]);
}, "Autogenerated");
at(2.5, function() {
assert_styles(
'.anim',
[{'left':'200px'},
{'left':'200px'},
{'left':'150px'},
{'left':'125px'}]);
}, "Autogenerated");
at(3.0, function() {
assert_styles(
'.anim',
[{'left':'200px'},
{'left':'200px'},
{'left':'200px'},
{'left':'150px'}]);
}, "Autogenerated");
at(3.5, function() {
assert_styles(
'.anim',
[{'left':'200px'},
{'left':'200px'},
{'left':'200px'},
{'left':'175px'}]);
}, "Autogenerated");
at(4.0, function() {
assert_styles('.anim', {'left':'200px'});
}, "Autogenerated");
}, "Autogenerated checks.");
|
package se.sundsvall.messaging.model;
import javax.validation.Valid;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Size;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
@Getter
@Setter
@EqualsAndHashCode
@NoArgsConstructor
@AllArgsConstructor(access = AccessLevel.PRIVATE)
@Builder(setterPrefix = "with")
public class Sender {
@Valid
private Sender.Sms sms;
@Valid
private Sender.Email email;
@Getter
@Setter
@EqualsAndHashCode
@NoArgsConstructor
@AllArgsConstructor(access = AccessLevel.PRIVATE)
@Builder(setterPrefix = "with")
public static class Sms {
@NotBlank
@Size(max = 11)
@Schema(description = "The sender of the SMS", maxLength = 11, example = "sender")
private String name;
}
@Getter
@Setter
@EqualsAndHashCode
@NoArgsConstructor
@AllArgsConstructor(access = AccessLevel.PRIVATE)
@Builder(setterPrefix = "with")
public static class Email {
@NotBlank
@Schema(description = "The sender of the e-mail")
private String name;
@NotBlank
@Schema(description = "Sender e-mail address", example = "<EMAIL>")
@javax.validation.constraints.Email
private String address;
@Schema(description = "Reply-to e-mail address", example = "<EMAIL>")
@javax.validation.constraints.Email
private String replyTo;
}
}
|
package br.edu.up.pessoa.model;
import java.util.Date;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import org.springframework.lang.NonNull;
@Entity
public class Pessoa {
@Id
@GeneratedValue
private Integer entidadeEmpresaId;
@OneToOne
@JoinColumn
private Senha senha;
@NonNull
private String tipoPessoa;
private String estiloNome;
private String titulo;
@NonNull
private String primeiroNome;
@NonNull
private String segundoNome;
@NonNull
private String sobreNome;
private String sufixo;
private String emailPromocional;
private String informacoesAdicionais;
private String informacoesSociais;
@NonNull
private Date dataModificacao;
public Pessoa() {
super();
dataModificacao = new Date();
}
public Pessoa(Senha senha, String tipoPessoa, String estiloNome, String titulo, String primeiroNome,
String segundoNome, String sobreNome, String sufixo, String emailPromocional, String informacoesAdicionais,
String informacoesSociais, Date dataModificacao) {
super();
this.senha = senha;
this.tipoPessoa = tipoPessoa;
this.estiloNome = estiloNome;
this.titulo = titulo;
this.primeiroNome = primeiroNome;
this.segundoNome = segundoNome;
this.sobreNome = sobreNome;
this.sufixo = sufixo;
this.emailPromocional = emailPromocional;
this.informacoesAdicionais = informacoesAdicionais;
this.informacoesSociais = informacoesSociais;
this.dataModificacao = dataModificacao;
}
public Integer getEntidadeEmpresaId() {
return entidadeEmpresaId;
}
public void setEntidadeEmpresaId(Integer entidadeEmpresaId) {
this.entidadeEmpresaId = entidadeEmpresaId;
}
public Senha getSenha() {
return senha;
}
public void setSenha(Senha senha) {
this.senha = senha;
}
public String getTipoPessoa() {
return tipoPessoa;
}
public void setTipoPessoa(String tipoPessoa) {
this.tipoPessoa = tipoPessoa;
}
public String getEstiloNome() {
return estiloNome;
}
public void setEstiloNome(String estiloNome) {
this.estiloNome = estiloNome;
}
public String getTitulo() {
return titulo;
}
public void setTitulo(String titulo) {
this.titulo = titulo;
}
public String getPrimeiroNome() {
return primeiroNome;
}
public void setPrimeiroNome(String primeiroNome) {
this.primeiroNome = primeiroNome;
}
public String getSegundoNome() {
return segundoNome;
}
public void setSegundoNome(String segundoNome) {
this.segundoNome = segundoNome;
}
public String getSobreNome() {
return sobreNome;
}
public void setSobreNome(String sobreNome) {
this.sobreNome = sobreNome;
}
public String getSufixo() {
return sufixo;
}
public void setSufixo(String sufixo) {
this.sufixo = sufixo;
}
public String getEmailPromocional() {
return emailPromocional;
}
public void setEmailPromocional(String emailPromocional) {
this.emailPromocional = emailPromocional;
}
public String getInformacoesAdicionais() {
return informacoesAdicionais;
}
public void setInformacoesAdicionais(String informacoesAdicionais) {
this.informacoesAdicionais = informacoesAdicionais;
}
public String getInformacoesSociais() {
return informacoesSociais;
}
public void setInformacoesSociais(String informacoesSociais) {
this.informacoesSociais = informacoesSociais;
}
public Date getDataModificacao() {
return dataModificacao;
}
public void setDataModificacao(Date dataModificacao) {
this.dataModificacao = dataModificacao;
}
}
|
set -ex
mkdir -p ~/.hermes/
cp ./scripts/ci/hermes/config.toml ~/.hermes/
hermes keys add stargaze -f $PWD/scripts/ci/hermes/stargaze.json
hermes keys add gaia -f $PWD/scripts/ci/hermes/gaia.json
hermes keys add osmosis -f $PWD/scripts/ci/hermes/osmosis.json
hermes tx raw ft-transfer stargaze gaia transfer channel-0 9999 -d stake -o 1000 -n 2
hermes tx raw ft-transfer gaia stargaze transfer channel-0 9999 -d ustarx -o 1000 -n 2
sleep 10
hermes tx raw ft-transfer stargaze osmosis transfer channel-0 9999 -d uosmo -o 1000 -n 2
sleep 10
hermes tx raw ft-transfer osmosis stargaze transfer channel-1 9999 -d ustarx -o 1000 -n 2
sleep 30
export GAIA_ADDRESS=cosmos1wt3khka7cmn5zd592x430ph4zmlhf5gfztgha6
export STARGAZE_ADDRESS=stars12g0xe2ld0k5ws3h7lmxc39d4rpl3fyxp5qys69
export OSMOSIS_ADDRESS=osmo1qk2rqkk28z8v3d7npupz33zqc6dae6n9a2x5v4
curl -s http://gaia:1317/bank/balances/$GAIA_ADDRESS | jq '.'
curl -s http://stargaze:1317/bank/balances/$STARGAZE_ADDRESS | jq '.'
curl -s http://osmosis:1317/bank/balances/$OSMOSIS_ADDRESS | jq '.'
|
<filename>src/shared/drivers/gps/ublox/UbloxGPSData.h
/* Copyright (c) 2020 Skyward Experimental Rocketry
* Author: <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#pragma once
#include "sensors/SensorData.h"
struct UbloxGPSData : public GPSData
{
static std::string header()
{
return "gps_timestamp,latitude,longitude,height,velocity_north,"
"velocity_east,velocity_down,speed,track,num_satellites,fix\n";
}
void print(std::ostream &os) const
{
os << gps_timestamp << "," << latitude << "," << longitude << ","
<< height << "," << velocity_north << "," << velocity_east << ","
<< velocity_down << "," << speed << "," << track << ","
<< (int)num_satellites << "," << (int)fix << "\n";
}
};
|
import React, { useState } from 'react';
function App() {
const [items, setItems] = useState([
{ name: 'Apples', selected: false },
{ name: 'Oranges', selected: false },
{ name: 'Bananas', selected: false },
]);
const toggleItemSelection = (index) => {
const updatedItems = [...items];
updatedItems[index].selected = !updatedItems[index].selected;
setItems(updatedItems);
}
return (
<div>
{items.map((item, index) => (
<div key={item.name}>
<input
type="checkbox"
checked={item.selected}
onChange={() => toggleItemSelection(index)}
/>
<label>{item.name}</label>
</div>
))}
</div>
);
}
export default App;
|
root=$(git rev-parse --show-toplevel)
kbase_dir=kbase-extension
src_dir=src
test_dir=test
ext_components_dir=kbase-extension/static/ext_components
nbextension_dir=nbextensions
container_root=/kb/dev_container/narrative/
if [ -z "$ENV" ]; then
echo "The 'ENV' environment variable is required, set to either ci, next, appdev, or prod"
exit 1
fi
if [ -z "$PORT" ]; then
$PORT=8888
fi
echo "Starting Narrative for environment '${ENV}'"
mount_local_dirs="${mount:-t}"
if [ "${mount_local_dirs}" == "t" ]; then
echo "Mounting local dirs ${mount_local_dirs}"
docker run \
--dns=8.8.8.8 \
-e "CONFIG_ENV=${ENV}" \
-p ${PORT}:8888 \
--network=kbase-dev \
--name=narrative \
--mount type=bind,src=${root}/${kbase_dir},dst=${container_root}/${kbase_dir} \
--mount type=bind,src=${root}/${test_dir},dst=${container_root}/${test_dir} \
--mount type=bind,src=${root}/${src_dir},dst=${container_root}/${src_dir} \
--mount type=bind,src=${root}/${nbextension_dir},dst=${container_root}/kbase-extension/static/${nbextension_dir} \
--rm -it \
kbase/narrative:dev
else
echo "Not mounting local dirs ${mount}"
docker run \
--dns=8.8.8.8 \
-e "CONFIG_ENV=${ENV}" \
-p ${PORT}:8888 \
--network=kbase-dev \
--name=narrative \
--rm -it \
kbase/narrative:dev
fi
|
lock "~> 3.11.0"
#
# あなたのGitHubユーザー名に変更してください
#
set :repo_url, "https://github.com/kenjimanishi/20180430-schoo.git"
#
# あなたのWebサーバーの環境に合わせてデプロイ先ディレクトリパスを設定してください
#
set :deploy_to, "/var/www/schoo"
|
# import libraries
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import MultinomialNB
from sklearn.metrics import accuracy_score
# create features using count vectorizer
corpus = [words]
vect = CountVectorizer(stop_words='english')
X = vect.fit_transform(corpus)
# split the dataset into train and test
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=1)
# fit the model to the training set
model = MultinomialNB().fit(X_train, y_train)
# make predictions on the test data
y_pred = model.predict(X_test)
# determine the accuracy of the predictions
score = accuracy_score(y_test, y_pred)
# print the accuracy
print("Accuracy:", round(score*100, 2), "%")
|
#!/bin/bash
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# from scala-lang 2.10.4
# restore stty settings (echo in particular)
function restoreSttySettings() {
if [[ -n $SCALA_RUNNER_DEBUG ]]; then
echo "restoring stty:"
echo "$saved_stty"
fi
stty $saved_stty
saved_stty=""
}
function onExit() {
[[ "$saved_stty" != "" ]] && restoreSttySettings
exit $scala_exit_status
}
# to reenable echo if we are interrupted before completing.
trap onExit INT
# save terminal settings
saved_stty=$(stty -g 2>/dev/null)
# clear on error so we don't later try to restore them
if [[ ! $? ]]; then
saved_stty=""
fi
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
. "$bin"/config.sh
FLINK_CLASSPATH=`constructFlinkClassPath`
# https://issues.scala-lang.org/browse/SI-6502, cant load external jars interactively
# in scala shell since 2.10, has to be done at startup
# checks arguments for additional classpath and adds it to the "standard classpath"
EXTERNAL_LIB_FOUND=false
for ((i=1;i<=$#;i++))
do
if [[ ${!i} = "-a" || ${!i} = "--addclasspath" ]]
then
EXTERNAL_LIB_FOUND=true
#adding to classpath
k=$((i+1))
j=$((k+1))
echo " "
echo "Additional classpath:${!k}"
echo " "
EXT_CLASSPATH="${!k}"
FLINK_CLASSPATH="$FLINK_CLASSPATH:${!k}"
set -- "${@:1:$((i-1))}" "${@:j}"
fi
done
if [ "$FLINK_IDENT_STRING" = "" ]; then
FLINK_IDENT_STRING="$USER"
fi
MODE=$1
LOG=$FLINK_LOG_DIR/flink-$FLINK_IDENT_STRING-scala-shell-$MODE-$HOSTNAME.log
if [[ $MODE = "yarn" ]]
then
LOG4J_CONFIG=log4j-session.properties
LOGBACK_CONFIG=logback-session.xml
FLINK_CLASSPATH=$FLINK_CLASSPATH:$HADOOP_CLASSPATH:$HADOOP_CONF_DIR:$YARN_CONF_DIR
else
# Set the default log config when MODE is something other than yarn. eg: local, remote or other invalid mode.
LOG4J_CONFIG=log4j.properties
LOGBACK_CONFIG=logback.xml
fi
log_setting=("-Dlog.file=$LOG" "-Dlog4j.configuration=file:$FLINK_CONF_DIR/$LOG4J_CONFIG" "-Dlog4j.configurationFile=file:$FLINK_CONF_DIR/$LOG4J_CONFIG" "-Dlogback.configurationFile=file:$FLINK_CONF_DIR/$LOGBACK_CONFIG")
if ${EXTERNAL_LIB_FOUND}
then
"$JAVA_RUN" -Dscala.color -cp "$FLINK_CLASSPATH" "${log_setting[@]}" org.apache.flink.api.scala.FlinkShell $@ --addclasspath "$EXT_CLASSPATH"
else
"$JAVA_RUN" -Dscala.color -cp "$FLINK_CLASSPATH" "${log_setting[@]}" org.apache.flink.api.scala.FlinkShell $@
fi
#restore echo
onExit
|
<!DOCTYPE html>
<html>
<head>
<title>States and Capitals</title>
</head>
<body>
<h1>States and Capitals</h1>
<ul>
<li>Alabama - Montgomery</li>
<li>Alaska - Juneau</li>
<li>Arizona - Phoenix</li>
<li>Arkansas - Little Rock</li>
<li>California - Sacramento</li>
<li>Colorado - Denver</li>
<li>Connecticut - Hartford</li>
<li>Delaware - Dover</li>
<li>Florida - Tallahassee</li>
<li>Georgia - Atlanta</li>
<li>Hawaii - Honolulu</li>
<li>Idaho - Boise</li>
<li>Illinois - Springfield</li>
<li>Indiana - Indianapolis</li>
<li>Iowa - Des Moines</li>
<li>Kansas - Topeka</li>
<li>Kentucky - Frankfort</li>
<li>Louisiana - Baton Rouge</li>
<li>Maine - Augusta</li>
<li>Maryland - Annapolis</li>
<li>Massachusetts - Boston</li>
<li>Michigan - Lansing</li>
<li>Minnesota - Saint Paul</li>
<li>Mississippi - Jackson</li>
<li>Missouri - Jefferson City</li>
<li>Montana - Helena</li>
<li>Nebraska - Lincoln</li>
<li>Nevada - Carson City</li>
<li>New Hampshire - Concord</li>
<li>New Jersey - Trenton</li>
<li>New Mexico - Santa Fe</li>
<li>New York - Albany</li>
<li>North Carolina - Raleigh</li>
<li>North Dakota - Bismarck</li>
<li>Ohio - Columbus</li>
<li>Oklahoma - Oklahoma City</li>
<li>Oregon - Salem</li>
<li>Pennsylvania - Harrisburg</li>
<li>Rhode Island - Providence</li>
<li>South Carolina - Columbia</li>
<li>South Dakota - Pierre</li>
<li>Tennessee - Nashville</li>
<li>Texas - Austin</li>
<li>Utah - Salt Lake City</li>
<li>Vermont - Montpelier</li>
<li>Virginia - Richmond</li>
<li>Washington - Olympia</li>
<li>West Virginia - Charleston</li>
<li>Wisconsin - Madison</li>
<li>Wyoming - Cheyenne</li>
</ul>
</body>
</html>
|
<filename>data_structures/Academind JavaScript Data Structures/course_resources/01_Getting_Started/01-arrays/app.js
const names = ['Max', 'Manu', 'Julie', 'Max'];
// Index starts at zero
console.log(names[1]);
console.log(names.length);
for (const el of names) {
console.log(el);
}
names.push('Julie');
console.log(names.length);
const julieIndex = names.findIndex(el => el === 'Julie');
names.splice(2, 1);
console.log(names);
|
<gh_stars>0
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.extension.elytron;
import static org.wildfly.extension.elytron._private.ElytronSubsystemMessages.ROOT_LOGGER;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import javax.naming.directory.Attributes;
import javax.naming.ldap.LdapName;
import org.jboss.msc.inject.Injector;
import org.jboss.msc.service.Service;
import org.jboss.msc.service.StartContext;
import org.jboss.msc.service.StartException;
import org.jboss.msc.service.StopContext;
import org.jboss.msc.value.InjectedValue;
import org.wildfly.extension.elytron.capabilities.DirContextSupplier;
import org.wildfly.security.keystore.LdapKeyStore;
import org.wildfly.security.keystore.UnmodifiableKeyStore;
/**
* A {@link Service} responsible for a single {@link LdapKeyStore} instance.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
class LdapKeyStoreService implements ModifiableKeyStoreService {
private final InjectedValue<DirContextSupplier> dirContextSupplierInjector = new InjectedValue<>();
private final String searchPath;
private final String filterAlias;
private final String filterCertificate;
private final String filterIterate;
private final LdapName createPath;
private final String createRdn;
private final Attributes createAttributes;
private final String aliasAttribute;
private final String certificateAttribute;
private final String certificateType;
private final String certificateChainAttribute;
private final String certificateChainEncoding;
private final String keyAttribute;
private final String keyType;
private volatile KeyStore modifiableKeyStore = null;
private volatile KeyStore unmodifiableKeyStore = null;
LdapKeyStoreService(String searchPath, String filterAlias, String filterCertificate,
String filterIterate, LdapName createPath, String createRdn, Attributes createAttributes,
String aliasAttribute, String certificateAttribute, String certificateType,
String certificateChainAttribute, String certificateChainEncoding,
String keyAttribute, String keyType) {
this.searchPath = searchPath;
this.filterAlias = filterAlias;
this.filterCertificate = filterCertificate;
this.filterIterate = filterIterate;
this.createPath = createPath;
this.createRdn = createRdn;
this.createAttributes = createAttributes;
this.aliasAttribute = aliasAttribute;
this.certificateAttribute = certificateAttribute;
this.certificateType = certificateType;
this.certificateChainAttribute = certificateChainAttribute;
this.certificateChainEncoding = certificateChainEncoding;
this.keyAttribute = keyAttribute;
this.keyType = keyType;
}
Injector<DirContextSupplier> getDirContextSupplierInjector() {
return dirContextSupplierInjector;
}
/*
* Service Lifecycle Related Methods
*/
@Override
public void start(StartContext startContext) throws StartException {
try {
LdapKeyStore.Builder builder = LdapKeyStore.builder()
.setDirContextSupplier(dirContextSupplierInjector.getValue())
.setSearchPath(searchPath);
if (filterAlias != null) builder.setFilterAlias(filterAlias);
if (filterCertificate != null) builder.setFilterCertificate(filterCertificate);
if (filterIterate != null) builder.setFilterIterate(filterIterate);
if (createPath != null) builder.setCreatePath(createPath);
if (createRdn != null) builder.setCreateRdn(createRdn);
if (createAttributes != null) builder.setCreateAttributes(createAttributes);
if (aliasAttribute != null) builder.setAliasAttribute(aliasAttribute);
if (certificateAttribute != null) builder.setCertificateAttribute(certificateAttribute);
if (certificateType != null) builder.setCertificateType(certificateType);
if (certificateChainAttribute != null) builder.setCertificateChainAttribute(certificateChainAttribute);
if (certificateChainEncoding != null) builder.setCertificateChainEncoding(certificateChainEncoding);
if (keyAttribute != null) builder.setKeyAttribute(keyAttribute);
if (keyType != null) builder.setKeyType(keyType);
KeyStore keyStore = builder.build();
keyStore.load(null); // initialize
this.modifiableKeyStore = keyStore;
this.unmodifiableKeyStore = UnmodifiableKeyStore.unmodifiableKeyStore(keyStore);
} catch (GeneralSecurityException | IOException e) {
throw ROOT_LOGGER.unableToStartService(e);
}
}
@Override
public void stop(StopContext stopContext) {
this.modifiableKeyStore = null;
this.unmodifiableKeyStore = null;
}
@Override
public KeyStore getValue() throws IllegalStateException, IllegalArgumentException {
return unmodifiableKeyStore;
}
public KeyStore getModifiableValue() {
return modifiableKeyStore;
}
}
|
<filename>lib/active_scaffold/helpers/association_helpers.rb<gh_stars>100-1000
module ActiveScaffold
module Helpers
module AssociationHelpers
# Cache the options for select
def cache_association_options(association, conditions, klass, cache = true)
if active_scaffold_config.cache_association_options && cache
@_associations_cache ||= Hash.new { |h, k| h[k] = {} }
key = [association.name, association.inverse_klass.name, klass.name].join('/')
@_associations_cache[key][conditions] ||= yield
else
yield
end
end
# Provides a way to honor the :conditions on an association while searching the association's klass
def association_options_find(association, conditions = nil, klass = nil, record = nil)
if klass.nil? && association.polymorphic?
class_name = record.send(association.foreign_type) if association.belongs_to?
return [] if class_name.blank?
klass = class_name.constantize
cache = !block_given?
else
cache = !block_given? && klass.nil?
klass ||= association.klass
end
conditions ||= options_for_association_conditions(association, record)
cache_association_options(association, conditions, klass, cache) do
klass = association_klass_scoped(association, klass, record)
relation = klass.where(conditions)
column = column_for_association(association, record)
if column&.includes
include_assoc = column.includes.find { |assoc| assoc.is_a?(Hash) && assoc.include?(association.name) }
relation = relation.includes(include_assoc[association.name]) if include_assoc
end
if column&.sort && column.sort&.dig(:sql)
# with threasafe enabled, column.sort[:sql] returns proxied strings and
# regexp capture won't work, which rails uses internally, so to_s is needed
relation = relation.order(Array(column.sort[:sql]).map(&:to_s))
end
relation = yield(relation) if block_given?
relation.to_a
end
end
def column_for_association(association, record)
active_scaffold_config_for(record.class).columns[association.name]
rescue StandardError => e
message = "Error on config for #{record.class.name}:"
Rails.logger.warn "#{message}\n#{e.message}\n#{e.backtrace.join("\n")}"
nil
end
def association_klass_scoped(association, klass, record)
if nested? && nested.through_association? && nested.child_association&.through_reflection == association
# only ActiveRecord associations
if nested.association.through_reflection.collection?
nested_parent_record.send(nested.association.through_reflection.name)
else
klass.where(association.association_primary_key => nested_parent_record.send(nested.association.through_reflection.name)&.id)
end
else
klass
end
end
# Sorts the options for select
def sorted_association_options_find(association, conditions = nil, record = nil)
options = association_options_find(association, conditions, nil, record)
column = column_for_association(association, record)
unless column&.sort && column.sort&.dig(:sql)
method = column.options[:label_method] if column
options = options.sort_by(&(method || :to_label).to_sym)
end
options
end
def association_options_count(association, conditions = nil)
association.klass.where(conditions).count
end
def options_for_association_count(association, record)
conditions = options_for_association_conditions(association, record)
association_options_count(association, conditions)
end
# A useful override for customizing the records present in an association dropdown.
# Should work in both the subform and form_ui=>:select modes.
# Check association.name to specialize the conditions per-column.
def options_for_association_conditions(association, record = nil)
return nil if association.through?
return nil unless association.has_one? || association.has_many?
# Find only orphaned objects
{association.foreign_key => nil}
end
def record_select_params_for_add_existing(association, edit_associated_url_options, record)
{:onselect => "ActiveScaffold.record_select_onselect(#{url_for(edit_associated_url_options).to_json}, #{active_scaffold_id.to_json}, id);"}
end
end
end
end
|
package models
import org.joda.time.DateTime
import play.api.libs.json.{Writes, Reads, Json}
/**
* Created by Jo on 2/4/16.
*/
case class Group (
groupId: Long,
groupName: String,
creator: String,
profileImg: String,
description: String,
created: DateTime,
updated: DateTime
)
object Group {
implicit val writer: Writes[Group] = Json.writes[Group]
implicit val reader: Reads[Group] = Json.reads[Group]
}
|
package com.decathlon.ara.service.dto.executedscenario;
import java.util.Date;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ExecutedScenarioDTO {
private Long id;
private String featureFile;
private String featureName;
private String featureTags;
private String tags;
private String severity;
private String name;
private String cucumberId;
private int line;
private String content;
private Date startDateTime;
private String screenshotUrl;
private String videoUrl;
private String logsUrl;
private String httpRequestsUrl;
private String javaScriptErrorsUrl;
private String diffReportUrl;
private String cucumberReportUrl;
private String apiServer;
private String seleniumNode;
}
|
#!/bin/bash
# ec2_get_root_volume_sizes
# [AWS_PROFILE=<aws cli profile name>] ./ec2_get_root_volume_sizes <environment> <project>
environment="${1}";
project="${2}";
echo -e "
EBS Root Volume Sizes
##############################################################
| Name | Instance | Volume Id | Size |
##############################################################";
while read line;
do
echo -e "$line $(aws --profile ${AWS_PROFILE} \
ec2 \
describe-volumes \
--volume-id \
"$(echo $line \
| cut -f3 -d" "
)" \
--query "Volumes[*].Size"
)";
done<<<"$(aws --profile ${AWS_PROFILE} \
ec2 describe-instances \
--filters "Name=tag:environment,Values=${environment}" \
"Name=tag:project,Values=${project}" \
--query "Reservations[*]
.Instances[*]
.[
BlockDeviceMappings[?DeviceName=='/dev/sda1'].Ebs.VolumeId,
InstanceId,
Tags[?Key=='Name'].Value[]
]" \
| sed 'N;N;s/\n/ /g' \
| awk '{printf "%-30s %-10s %s\n",$3, $1, $2}' \
| sort -t- -k1,1 -k2,2n
)";
|
#!/bin/bash
docker build -f Dockerfile_build_px4_mavlink_aarch64 -t px4_mavlink_build .
# creates a shared proj directory where firmware sources can live
mkdir -p $PWD/proj
docker run --rm --name px4_build0 -v $PWD/proj:/proj -it px4_mavlink_build
# This embeds px4_sitl in a container for easy running
docker build -f Dockerfile_sitl_mavlink_px4_aarch64 -t px4_mavlink_sitl .
|
/*
* Copyright © 2021 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.http.netty;
import io.servicetalk.concurrent.SingleSource;
import io.servicetalk.transport.netty.internal.ChannelInitializer;
import io.servicetalk.transport.netty.internal.StacklessClosedChannelException;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.handler.ssl.SniCompletionEvent;
import javax.annotation.Nullable;
import static io.servicetalk.transport.netty.internal.ChannelCloseUtils.assignConnectionError;
final class SniCompleteChannelSingle extends ChannelInitSingle<SniCompletionEvent> {
SniCompleteChannelSingle(final Channel channel, final ChannelInitializer channelInitializer) {
super(channel, channelInitializer);
}
@Override
protected ChannelHandler newChannelHandler(final Subscriber<? super SniCompletionEvent> subscriber) {
return new SniCompleteChannelHandler(subscriber);
}
private static final class SniCompleteChannelHandler extends ChannelInboundHandlerAdapter {
@Nullable
private Subscriber<? super SniCompletionEvent> subscriber;
SniCompleteChannelHandler(Subscriber<? super SniCompletionEvent> subscriber) {
this.subscriber = subscriber;
}
@Override
public void handlerAdded(final ChannelHandlerContext ctx) throws Exception {
super.handlerAdded(ctx);
// Force a read to get the SSL handshake started. We initialize pipeline before
// SslHandshakeCompletionEvent will complete, therefore, no data will be propagated before we finish
// initialization.
ctx.read();
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
if (evt instanceof SniCompletionEvent && subscriber != null) {
ctx.pipeline().remove(this);
Subscriber<? super SniCompletionEvent> subscriberCopy = subscriber;
subscriber = null;
subscriberCopy.onSuccess((SniCompletionEvent) evt);
}
ctx.fireUserEventTriggered(evt);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
if (subscriber != null) {
propagateError(ctx.channel(), StacklessClosedChannelException.newInstance(
SniCompleteChannelHandler.class, "exceptionCaught(...)").initCause(cause));
} else {
// Propagate exception in the pipeline if subscriber is already complete
ctx.fireExceptionCaught(cause);
ctx.close();
}
}
@Override
public void channelInactive(ChannelHandlerContext ctx) {
if (subscriber != null) {
propagateError(ctx.channel(), StacklessClosedChannelException.newInstance(
SniCompleteChannelHandler.class, "channelInactive(...)"));
} else {
ctx.fireChannelInactive();
}
}
private void propagateError(Channel channel, Throwable cause) {
assert subscriber != null;
assignConnectionError(channel, cause);
final SingleSource.Subscriber<? super SniCompletionEvent> subscriberCopy = subscriber;
subscriber = null;
subscriberCopy.onError(cause);
}
}
}
|
TERMUX_PKG_HOMEPAGE=https://rybczak.net/ncmpcpp/
TERMUX_PKG_DESCRIPTION="NCurses Music Player Client (Plus Plus)"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_VERSION=0.8.2
TERMUX_PKG_REVISION=5
TERMUX_PKG_SHA256=650ba3e8089624b7ad9e4cc19bc1ac6028edb7523cc111fa1686ea44c0921554
TERMUX_PKG_SRCURL=https://rybczak.net/ncmpcpp/stable/ncmpcpp-${TERMUX_PKG_VERSION}.tar.bz2
TERMUX_PKG_DEPENDS="fftw, boost, readline, libcurl, libmpdclient, ncurses"
TERMUX_PKG_BUILD_IN_SRC=yes
TERMUX_PKG_KEEP_SHARE_DOC=yes
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="--enable-visualizer --enable-outputs --enable-clock"
termux_step_pre_configure() {
./autogen.sh
CXXFLAGS+=" -DNCURSES_WIDECHAR -U_XOPEN_SOURCE"
}
|
<filename>site/app/features/shared/components/notFound.js
import React from 'react';
import {compose} from '@truefit/bach';
import {withGATracker} from '../enhancers';
const NotFound = () => <div>Sorry I wasnt able to find the page</div>;
export default compose(withGATracker())(NotFound);
|
<gh_stars>0
package org.jooby.internal.couchbase;
import static org.junit.Assert.assertEquals;
import org.jooby.couchbase.GeneratedValue;
import org.junit.Test;
import com.couchbase.client.java.repository.annotation.Id;
public class IdGeneratorTest {
public static class Base {
String id;
}
public static class Sub extends Base {
}
@Test
public void getId() {
new IdGenerator();
assertEquals("id1", IdGenerator.getId(new Object() {
private String id = "id1";
@Override
public String toString() {
return id;
}
}));
assertEquals(1L, IdGenerator.getId(new Object() {
@Id
private Long beerId = 1L;
}));
}
@Test
public void getIdName() {
assertEquals("id", IdGenerator.getIdName(new Object() {
private String id = "id1";
@Override
public String toString() {
return id;
}
}));
assertEquals("beerId", IdGenerator.getIdName(new Object() {
@Id
private Long beerId = 1L;
}));
}
@Test
public void idFromSuper() {
assertEquals("id", IdGenerator.getIdName(new Sub()));
}
@Test
public void getOrGen() {
assertEquals(null, IdGenerator.getOrGenId(new Object() {
private String id;
@Override
public String toString() {
return id;
}
}, () -> 7L));
assertEquals("id1", IdGenerator.getOrGenId(new Object() {
private String id = "id1";
@Override
public String toString() {
return id;
}
}, () -> 7L));
assertEquals(3L, IdGenerator.getOrGenId(new Object() {
private Long id = 3L;
@Override
public String toString() {
return id.toString();
}
}, () -> 7L));
assertEquals(7L, IdGenerator.getOrGenId(new Object() {
@GeneratedValue
private Long id;
@Override
public String toString() {
return id.toString();
}
}, () -> 7L));
}
@Test
public void generatedValue() {
Object entity = new Object() {
@GeneratedValue
private Long id;
@Override
public String toString() {
return id.toString();
}
};
assertEquals(null, IdGenerator.getId(entity));
assertEquals(7L, IdGenerator.getOrGenId(entity, () -> 7L));
assertEquals(7L, IdGenerator.getId(entity));
}
@Test(expected = IllegalArgumentException.class)
public void generatedValueMustBeLong() {
IdGenerator.getOrGenId(new Object() {
@GeneratedValue
private Integer id;
@Override
public String toString() {
return id.toString();
}
}, () -> 7L);
}
@Test(expected = IllegalArgumentException.class)
public void noId() {
IdGenerator.getOrGenId(new Object(), () -> 7L);
}
@Test(expected = IllegalArgumentException.class)
public void noId2() {
IdGenerator.getOrGenId(new Object() {
@SuppressWarnings("unused")
String foo;
}, () -> 7L);
}
@Test(expected = IllegalStateException.class)
public void errorWhileGeneratingValue() {
IdGenerator.getOrGenId(new Object() {
@GeneratedValue
private Long id;
@Override
public String toString() {
return id.toString();
}
}, () -> {
throw new IllegalStateException("intentional errr");
});
}
}
|
#!/bin/bash
# Start the first process
# /usr/lib/postgresql/9.3/bin/postgres -D /var/lib/postgresql/9.3/main -c config_file=/etc/postgresql/9.3/main/postgresql.conf
runuser -l postgres -c '/usr/lib/postgresql/9.3/bin/postgres -D /var/lib/postgresql/9.3/main -c config_file=/etc/postgresql/9.3/main/postgresql.conf' &
status=$?
if [ $status -ne 0 ]; then
echo "Failed to start my_first_process: $status"
exit $status
fi
# Start the third process
/usr/local/spark/bin/spark-submit --class org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 &
# Start the second process
/usr/local/bin/start-notebook.sh
status=$?
if [ $status -ne 0 ]; then
echo "Failed to start my_second_process: $status"
exit $status
fi
# Naive check runs checks once a minute to see if either of the processes exited.
# This illustrates part of the heavy lifting you need to do if you want to run
# more than one service in a container. The container exits with an error
# if it detects that either of the processes has exited.
# Otherwise it loops forever, waking up every 60 seconds
while sleep 60; do
ps aux |grep my_first_process |grep -q -v grep
PROCESS_1_STATUS=$?
ps aux |grep my_second_process |grep -q -v grep
PROCESS_2_STATUS=$?
# If the greps above find anything, they exit with 0 status
# If they are not both 0, then something is wrong
if [ $PROCESS_1_STATUS -ne 0 -o $PROCESS_2_STATUS -ne 0 ]; then
echo "One of the processes has already exited."
exit 1
fi
done
|
<filename>src/main/java/org/wildfly/extension/elytron/KeyStoreAliasDefinition.java<gh_stars>0
/*
* JBoss, Home of Professional Open Source.
* Copyright 2015 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.extension.elytron;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR;
import static org.wildfly.extension.elytron.CertificateChainAttributeDefinitions.CERTIFICATE;
import static org.wildfly.extension.elytron.CertificateChainAttributeDefinitions.getNamedCertificateList;
import static org.wildfly.extension.elytron.CertificateChainAttributeDefinitions.writeCertificate;
import static org.wildfly.extension.elytron.CertificateChainAttributeDefinitions.writeCertificates;
import static org.wildfly.extension.elytron.ElytronExtension.ISO_8601_FORMAT;
import static org.wildfly.extension.elytron.ElytronExtension.getRequiredService;
import static org.wildfly.extension.elytron._private.ElytronSubsystemMessages.ROOT_LOGGER;
import java.security.KeyStore;
import java.security.KeyStore.PrivateKeyEntry;
import java.security.KeyStore.SecretKeyEntry;
import java.security.KeyStore.TrustedCertificateEntry;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.Certificate;
import java.security.cert.CertificateEncodingException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.jboss.as.controller.OperationContext;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.PathElement;
import org.jboss.as.controller.ResourceDefinition;
import org.jboss.as.controller.SimpleAttributeDefinition;
import org.jboss.as.controller.SimpleAttributeDefinitionBuilder;
import org.jboss.as.controller.SimpleResourceDefinition;
import org.jboss.as.controller.registry.ManagementResourceRegistration;
import org.jboss.as.controller.registry.OperationEntry;
import org.jboss.dmr.ModelNode;
import org.jboss.dmr.ModelType;
import org.jboss.msc.service.ServiceController;
import org.jboss.msc.service.ServiceName;
import org.wildfly.security.keystore.PasswordEntry;
/**
* A {@link ResourceDefinition} for an alias stored within a {@link KeyStore}.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
class KeyStoreAliasDefinition extends SimpleResourceDefinition {
private final ServiceUtil<KeyStore> keyStoreServiceUtil;
static final SimpleAttributeDefinition CREATION_DATE = new SimpleAttributeDefinitionBuilder(ElytronDescriptionConstants.CREATION_DATE, ModelType.STRING)
.setStorageRuntime()
.build();
static final SimpleAttributeDefinition ENTRY_TYPE = new SimpleAttributeDefinitionBuilder(ElytronDescriptionConstants.ENTRY_TYPE, ModelType.STRING)
.setStorageRuntime()
.setAllowedValues(PasswordEntry.class.getSimpleName(), PrivateKeyEntry.class.getSimpleName(),
SecretKeyEntry.class.getSimpleName(), TrustedCertificateEntry.class.getSimpleName(), "Other")
.build();
KeyStoreAliasDefinition(final ServiceUtil<KeyStore> keyStoreServiceUtil) {
super(new Parameters(PathElement.pathElement(ElytronDescriptionConstants.ALIAS), ElytronExtension.getResourceDescriptionResolver(ElytronDescriptionConstants.KEY_STORE, ElytronDescriptionConstants.ALIAS))
.setRemoveHandler(new RemoveHandler(keyStoreServiceUtil))
.setAddRestartLevel(OperationEntry.Flag.RESTART_NONE)
.setRemoveRestartLevel(OperationEntry.Flag.RESTART_RESOURCE_SERVICES)
.setRuntime());
this.keyStoreServiceUtil = keyStoreServiceUtil;
}
@Override
public void registerAttributes(ManagementResourceRegistration resourceRegistration) {
resourceRegistration.registerReadOnlyAttribute(CREATION_DATE, new KeyStoreRuntimeOnlyHandler(false, false, keyStoreServiceUtil) {
@Override
protected void performRuntime(ModelNode result, ModelNode operation, ModifiableKeyStoreService keyStoreService) throws OperationFailedException {
SimpleDateFormat sdf = new SimpleDateFormat(ISO_8601_FORMAT);
String alias = alias(operation);
Date creationDate;
try {
creationDate = keyStoreService.getValue().getCreationDate(alias);
} catch (KeyStoreException | RuntimeException e) {
ROOT_LOGGER.tracef(e, "Unable to populate %s", CREATION_DATE);
return;
}
if (creationDate != null) {
result.set(sdf.format(creationDate));
}
}
});
resourceRegistration.registerReadOnlyAttribute(ENTRY_TYPE, new KeyStoreRuntimeOnlyHandler(false, false, keyStoreServiceUtil) {
@Override
protected void performRuntime(ModelNode result, ModelNode operation, ModifiableKeyStoreService keyStoreService)
throws OperationFailedException {
KeyStore keyStore = keyStoreService.getValue();
String alias = alias(operation);
try {
if (keyStore.entryInstanceOf(alias, PrivateKeyEntry.class)) {
result.set(PrivateKeyEntry.class.getSimpleName());
} else if (keyStore.entryInstanceOf(alias, SecretKeyEntry.class)) {
result.set(SecretKeyEntry.class.getSimpleName());
} else if (keyStore.entryInstanceOf(alias, TrustedCertificateEntry.class)) {
result.set(TrustedCertificateEntry.class.getSimpleName());
} else if (keyStore.entryInstanceOf(alias, PasswordEntry.class)) {
result.set(PasswordEntry.class.getSimpleName());
} else {
result.set("Other");
}
} catch (KeyStoreException | RuntimeException e) {
ROOT_LOGGER.tracef(e, "Unable to populate %s", ENTRY_TYPE);
return;
}
}
});
resourceRegistration.registerReadOnlyAttribute(CERTIFICATE, new KeyStoreRuntimeOnlyHandler(false, false, keyStoreServiceUtil) {
@Override
protected void performRuntime(ModelNode result, ModelNode operation, ModifiableKeyStoreService keyStoreService) throws OperationFailedException {
String alias = alias(operation);
KeyStore keyStore = keyStoreService.getValue();
// If we have a certificate chain don't waste time reporting what would just be the first cert in the chain.
try {
if (keyStore.getCertificateChain(alias) == null) {
Certificate cert = keyStore.getCertificate(alias);
if (cert != null) {
writeCertificate(result, cert);
}
}
} catch (KeyStoreException | NoSuchAlgorithmException| RuntimeException | CertificateEncodingException e) {
ROOT_LOGGER.tracef(e, "Unable to populate %s", CERTIFICATE);
return;
}
}
});
resourceRegistration.registerReadOnlyAttribute(getNamedCertificateList(ElytronDescriptionConstants.CERTIFICATE_CHAIN), new KeyStoreRuntimeOnlyHandler(false, false, keyStoreServiceUtil) {
@Override
protected void performRuntime(ModelNode result, ModelNode operation, ModifiableKeyStoreService keyStoreService) throws OperationFailedException {
String alias = alias(operation);
KeyStore keyStore = keyStoreService.getValue();
try {
Certificate[] chain = keyStore.getCertificateChain(alias);
if (chain != null) {
writeCertificates(result, chain);
}
} catch (KeyStoreException | CertificateEncodingException | NoSuchAlgorithmException | RuntimeException e) {
ROOT_LOGGER.tracef(e, "Unable to populate %s", ElytronDescriptionConstants.CERTIFICATE_CHAIN);
return;
}
}
});
}
static String alias(ModelNode operation) {
String aliasName = null;
PathAddress pa = PathAddress.pathAddress(operation.require(OP_ADDR));
for (int i = pa.size() - 1; i > 0; i--) {
PathElement pe = pa.getElement(i);
if (ElytronDescriptionConstants.ALIAS.equals(pe.getKey())) {
aliasName = pe.getValue();
break;
}
}
if (aliasName == null) {
throw ROOT_LOGGER.operationAddressMissingKey(ElytronDescriptionConstants.ALIAS);
}
return aliasName;
}
abstract static class KeyStoreRuntimeOnlyHandler extends ElytronRuntimeOnlyHandler {
private final boolean serviceMustBeUp;
private final boolean writeAccess;
private final ServiceUtil<KeyStore> keyStoreServiceUtil;
KeyStoreRuntimeOnlyHandler(final boolean serviceMustBeUp, final boolean writeAccess, final ServiceUtil<KeyStore> keyStoreServiceUtil) {
this.serviceMustBeUp = serviceMustBeUp;
this.writeAccess = writeAccess;
this.keyStoreServiceUtil = keyStoreServiceUtil;
}
@Override
protected void executeRuntimeStep(OperationContext context, ModelNode operation) throws OperationFailedException {
ServiceName serviceName = keyStoreServiceUtil.serviceName(operation);
ServiceController<KeyStore> serviceContainer = getRequiredService(context.getServiceRegistry(writeAccess), serviceName, KeyStore.class);
ServiceController.State serviceState;
if ((serviceState = serviceContainer.getState()) != ServiceController.State.UP) {
if (serviceMustBeUp) {
throw ROOT_LOGGER.requiredServiceNotUp(serviceName, serviceState);
}
return;
}
performRuntime(context.getResult(), context, operation, (ModifiableKeyStoreService) serviceContainer.getService());
}
protected void performRuntime(ModelNode result, ModelNode operation, ModifiableKeyStoreService keyStoreService) throws OperationFailedException {}
protected void performRuntime(ModelNode result, OperationContext context, ModelNode operation, ModifiableKeyStoreService keyStoreService) throws OperationFailedException {
performRuntime(result, operation, keyStoreService);
}
}
private static class RemoveHandler extends KeyStoreRuntimeOnlyHandler {
RemoveHandler(final ServiceUtil<KeyStore> keyStoreServiceUtil) {
super(true, true, keyStoreServiceUtil);
}
@Override
protected void performRuntime(ModelNode result, ModelNode operation, ModifiableKeyStoreService keyStoreService) throws OperationFailedException {
String alias = alias(operation);
KeyStore keyStore = keyStoreService.getModifiableValue();
try {
keyStore.deleteEntry(alias);
} catch (KeyStoreException e) {
throw new OperationFailedException(e);
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.