text stringlengths 1 1.05M |
|---|
package org.hiro.things.scrolltype;
import org.hiro.IOUtil;
import org.hiro.Misc;
import org.hiro.character.Player;
import org.hiro.things.Scroll;
public class WakeUpMonster extends Scroll {
public WakeUpMonster(){
super();
}
@Override
public void read(Player player) {
/*
* This scroll aggravates all the monsters on the current
* level and sets them running towards the hero
*/
Misc.aggravate();
IOUtil.msg("you hear a high pitched humming noise");
}
}
|
#!/bin/bash
# TODO: find out why we are using the if/else and if it's still needed for kubernetes
if oc --insecure-skip-tls-verify -n ${OPENSHIFT_PROJECT} get route "$ROUTE_DOMAIN" &> /dev/null; then
oc --insecure-skip-tls-verify -n ${OPENSHIFT_PROJECT} patch route "$ROUTE_DOMAIN" -p "{\"metadata\":{\"labels\":{\"dioscuri.amazee.io/migrate\": \"${ROUTE_MIGRATE}\"},\"annotations\":{\"kubernetes.io/tls-acme\":\"${ROUTE_TLS_ACME}\",\"haproxy.router.openshift.io/hsts_header\":\"${ROUTE_HSTS}\"}},\"spec\":{\"to\":{\"name\":\"${ROUTE_SERVICE}\"},\"tls\":{\"insecureEdgeTerminationPolicy\":\"${ROUTE_INSECURE}\"}}}"
else
oc process --local -o yaml --insecure-skip-tls-verify \
-n ${OPENSHIFT_PROJECT} \
-f /oc-build-deploy/openshift-templates/route.yml \
-p SAFE_BRANCH="${SAFE_BRANCH}" \
-p SAFE_PROJECT="${SAFE_PROJECT}" \
-p BRANCH="${BRANCH}" \
-p PROJECT="${PROJECT}" \
-p LAGOON_GIT_SHA="${LAGOON_GIT_SHA}" \
-p OPENSHIFT_PROJECT=${OPENSHIFT_PROJECT} \
-p ROUTE_DOMAIN="${ROUTE_DOMAIN}" \
-p ROUTE_SERVICE="${ROUTE_SERVICE}" \
-p ROUTE_TLS_ACME="${ROUTE_TLS_ACME}" \
-p ROUTE_INSECURE="${ROUTE_INSECURE}" \
-p ROUTE_HSTS="${ROUTE_HSTS}" \
-p ROUTE_MIGRATE="${ROUTE_MIGRATE}" \
| outputToYaml
fi
|
package io.chronetic.data.measure;
import org.jetbrains.annotations.NotNull;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Collectors;
import static java.util.Objects.requireNonNull;
/**
* Holds the minimum/maximum scale ChronoScaleUnits can take as well
* as the current enabled and disabled units.
*
* @version 1.0
* @since 1.0
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class ChronoScale {
private final Map<ChronoUnit, ChronoScaleUnit> chronoScaleMap;
public ChronoScale() {
chronoScaleMap = new EnumMap<>(ChronoUnit.class);
}
/**
* Returns random enabled ChronoScaleUnit.
*
* @param random Random to use
* @return random enabled ChronoScaleUnit
*/
@NotNull
public ChronoScaleUnit getRandomEnabledChronoScaleUnit(@NotNull Random random) {
List<ChronoScaleUnit> enabledUnits = chronoScaleMap.values().stream()
.filter(chronoScaleUnit -> !chronoScaleUnit.isDisabled())
.collect(Collectors.toList());
if (enabledUnits.isEmpty()) {
throw new IllegalStateException("Couldn't find any enabled chrono scale units");
} else {
return enabledUnits.get(requireNonNull(random).nextInt(enabledUnits.size()));
}
}
/**
* Returns all enabled ChronoScaleUnit(s).
*
* @return all enabled ChronoScaleUnit(s)
*/
@NotNull
public List<ChronoScaleUnit> getEnabledChronoScaleUnits() {
List<ChronoScaleUnit> enabledUnits = chronoScaleMap.values().stream()
.filter(chronoScaleUnit -> !chronoScaleUnit.isDisabled())
.collect(Collectors.toList());
if (enabledUnits.isEmpty()) {
throw new IllegalStateException("Couldn't find any enabled chrono scale units");
} else {
return enabledUnits;
}
}
/**
* Returns the ChronoScaleUnit for the given ChronoUnit.
*
* @param chronoUnit desired ChronoUnit
* @return ChronoScaleUnit for given ChronoUnit
*/
@NotNull
public ChronoScaleUnit getChronoScaleUnit(@NotNull ChronoUnit chronoUnit) {
List<ChronoScaleUnit> unitList = chronoScaleMap.values().stream()
.filter(chronoScaleUnit -> chronoScaleUnit.getChronoUnit() == requireNonNull(chronoUnit))
.collect(Collectors.toList());
if (unitList.isEmpty()) {
throw new IllegalStateException("Couldn't find any chrono scale units");
} else {
return unitList.get(0);
}
}
/**
* Returns the parent ChronoScaleUnit limit based on the given Duration.
*
* @param duration desired Duration
* @return parent limit ChronoScaleUnit
*/
@NotNull
public ChronoScaleUnit getParentChronoScaleUnitLimit(@NotNull Duration duration) {
long decades = ChronoUnit.DECADES.between(LocalDateTime.now(), LocalDateTime.now().plus(duration));
if (decades == 0) {
return getChronoScaleUnit(ChronoUnit.DECADES);
}
long years = ChronoUnit.YEARS.between(LocalDateTime.now(), LocalDateTime.now().plus(duration));
if (years == 0) {
return getChronoScaleUnit(ChronoUnit.YEARS);
}
long months = ChronoUnit.MONTHS.between(LocalDateTime.now(), LocalDateTime.now().plus(duration));
if (months == 0) {
return getChronoScaleUnit(ChronoUnit.MONTHS);
}
long days = ChronoUnit.DAYS.between(LocalDateTime.now(), LocalDateTime.now().plus(duration));
if (days == 0) {
return getChronoScaleUnit(ChronoUnit.DAYS);
}
long hours = ChronoUnit.HOURS.between(LocalDateTime.now(), LocalDateTime.now().plus(duration));
if (hours == 0) {
return getChronoScaleUnit(ChronoUnit.HOURS);
}
long minutes = ChronoUnit.MINUTES.between(LocalDateTime.now(), LocalDateTime.now().plus(duration));
if (minutes == 0) {
return getChronoScaleUnit(ChronoUnit.MINUTES);
}
long seconds = ChronoUnit.SECONDS.between(LocalDateTime.now(), LocalDateTime.now().plus(duration));
if (seconds == 0) {
return getChronoScaleUnit(ChronoUnit.SECONDS);
}
long millis = ChronoUnit.MILLIS.between(LocalDateTime.now(), LocalDateTime.now().plus(duration));
if (millis == 0) {
return getChronoScaleUnit(ChronoUnit.MILLIS);
}
long micros = ChronoUnit.MICROS.between(LocalDateTime.now(), LocalDateTime.now().plus(duration));
if (micros == 0) {
return getChronoScaleUnit(ChronoUnit.MICROS);
} else {
return getChronoScaleUnit(ChronoUnit.NANOS);
}
}
/**
* Returns the parent ChronoScaleUnit for the given ChronoUnit.
*
* @param chronoUnit desired ChronoUnit
* @return parent ChronoScaleUnit of given ChronoUnit
*/
@NotNull
public ChronoScaleUnit getParentChronoScaleUnit(@NotNull ChronoUnit chronoUnit) {
return getChronoScaleUnit(getParentChronoUnit(requireNonNull(chronoUnit)));
}
/**
* Returns the child ChronoScaleUnit for the given ChronoUnit.
* If there is no child ChronoScaleUnit, empty optional is returned.
*
* @param chronoUnit desired ChronoUnit
* @return the child ChronoScaleUnit, if present
*/
@NotNull
public Optional<ChronoScaleUnit> getChildScaleUnit(@NotNull ChronoUnit chronoUnit) {
final ChronoUnit childChronoUnit;
try {
childChronoUnit = getChildChronoUnit(chronoUnit);
} catch (Exception ex) {
return Optional.empty();
}
return Optional.of(getChronoScaleUnit(childChronoUnit));
}
/**
* Returns the parent ChronoUnit for the given ChronoUnit.
*
* @param chronoUnit desired ChronoUnit
* @return parent ChronoUnit of the given ChronoUnit
*/
@NotNull
public static ChronoUnit getParentChronoUnit(@NotNull ChronoUnit chronoUnit) {
switch (requireNonNull(chronoUnit)) {
case NANOS:
return ChronoUnit.SECONDS;
case MICROS:
return ChronoUnit.SECONDS;
case MILLIS:
return ChronoUnit.SECONDS;
case SECONDS:
return ChronoUnit.MINUTES;
case MINUTES:
return ChronoUnit.HOURS;
case HOURS:
return ChronoUnit.DAYS;
case DAYS:
return ChronoUnit.WEEKS;
case WEEKS:
return ChronoUnit.MONTHS;
case MONTHS:
return ChronoUnit.YEARS;
case YEARS:
return ChronoUnit.DECADES;
case DECADES:
return ChronoUnit.CENTURIES;
case CENTURIES:
return ChronoUnit.ERAS;
default:
throw new UnsupportedOperationException("Unsupported chrono unit: " + chronoUnit);
}
}
/**
* Returns the child ChronoUnit for the given ChronoUnit.
*
* @param chronoUnit desired ChronoUnit
* @return child ChronoUnit of the given ChronoUnit
*/
@NotNull
public static ChronoUnit getChildChronoUnit(@NotNull ChronoUnit chronoUnit) {
switch (requireNonNull(chronoUnit)) {
case MICROS:
return ChronoUnit.NANOS;
case MILLIS:
return ChronoUnit.MICROS;
case SECONDS:
return ChronoUnit.MILLIS;
case MINUTES:
return ChronoUnit.SECONDS;
case HOURS:
return ChronoUnit.MINUTES;
case DAYS:
return ChronoUnit.HOURS;
case WEEKS:
return ChronoUnit.DAYS;
case MONTHS:
return ChronoUnit.WEEKS;
case YEARS:
return ChronoUnit.MONTHS;
case DECADES:
return ChronoUnit.YEARS;
case MILLENNIA:
return ChronoUnit.DECADES;
case ERAS:
return ChronoUnit.MILLENNIA;
default:
throw new UnsupportedOperationException("Unsupported chrono unit: " + chronoUnit);
}
}
/**
* Updates the current stored ChronoScaleUnit with the given ChronoScaleUnit.
*
* @param chronoScaleUnit updated ChronoScaleUnit
*/
public void updateChronoScaleUnit(@NotNull ChronoScaleUnit chronoScaleUnit) {
chronoScaleMap.put(requireNonNull(chronoScaleUnit).getChronoUnit(), chronoScaleUnit);
}
/**
* Returns the actual minimum value for the given ChronoUnit.
*
* @param chronoUnit desired ChronoUnit
* @return actual minimum value for the given ChronoUnit
*/
public long getActualMinimum(@NotNull ChronoUnit chronoUnit) {
if (!chronoScaleMap.containsKey(requireNonNull(chronoUnit))) {
throw new IllegalArgumentException("Missing chrono unit: " + chronoUnit);
}
return chronoScaleMap.get(chronoUnit).getActualMinimum();
}
/**
* Returns the actual maximum value for the given ChronoUnit.
*
* @param chronoUnit desired ChronoUnit
* @return actual maximum value for the given ChronoUnit
*/
public long getActualMaximum(@NotNull ChronoUnit chronoUnit) {
if (!chronoScaleMap.containsKey(requireNonNull(chronoUnit))) {
throw new IllegalArgumentException("Missing chrono unit: " + chronoUnit);
}
return chronoScaleMap.get(chronoUnit).getActualMaximum();
}
/**
* Returns the factual minimum value for the given ChronoUnit.
*
* @param chronoUnit desired ChronoUnit
* @return factual minimum value for the given ChronoUnit
*/
public static long getFactualMinimum(@NotNull ChronoUnit chronoUnit) {
switch (requireNonNull(chronoUnit)) {
case NANOS:
case MICROS:
case MILLIS:
case SECONDS:
case MINUTES:
case HOURS:
case DAYS:
case WEEKS:
case MONTHS:
case YEARS:
case DECADES:
case CENTURIES:
return 0;
default:
throw new UnsupportedOperationException("Unsupported chrono unit: " + chronoUnit);
}
}
/**
* Returns the factual maximum value for the given ChronoUnit.
*
* @param chronoUnit desired ChronoUnit
* @return factual maximum value for the given ChronoUnit
*/
public static long getFactualMaximum(@NotNull ChronoUnit chronoUnit) {
switch (requireNonNull(chronoUnit)) {
case NANOS:
return 1_000_000_000;
case MICROS:
return 1_000_000;
case MILLIS:
return 1000;
case SECONDS:
case MINUTES:
return 60;
case HOURS:
return 24;
case DAYS:
return 7;
case WEEKS:
return 6;
case MONTHS:
return 12;
case YEARS:
return 4000; //someone else should probably take over from here
case DECADES:
case CENTURIES:
return 10;
default:
throw new UnsupportedOperationException("Unsupported chrono unit: " + chronoUnit);
}
}
/**
* Returns the equivalent ChronoField for the given ChronoUnit.
*
* @param chronoUnit desired ChronoUnit
* @return equivalent ChronoField of the given ChronoUnit
*/
@NotNull
public static ChronoField getChronoField(@NotNull ChronoUnit chronoUnit) {
switch (requireNonNull(chronoUnit)) {
case NANOS:
return ChronoField.NANO_OF_SECOND;
case MICROS:
return ChronoField.MICRO_OF_SECOND;
case MILLIS:
return ChronoField.MILLI_OF_SECOND;
case SECONDS:
return ChronoField.SECOND_OF_MINUTE;
case MINUTES:
return ChronoField.MINUTE_OF_HOUR;
case HOURS:
return ChronoField.HOUR_OF_DAY;
case DAYS:
return ChronoField.DAY_OF_WEEK;
case WEEKS:
return ChronoField.ALIGNED_WEEK_OF_MONTH;
case MONTHS:
return ChronoField.MONTH_OF_YEAR;
case YEARS:
return ChronoField.YEAR_OF_ERA;
default:
throw new UnsupportedOperationException("Unsupported chrono unit: " + chronoUnit);
}
}
}
|
describe FaHarnessTools::CheckSchedule do
describe "#verify?" do
after do
Timecop.return
end
context "before 9am on Monday" do
before do
Timecop.freeze(Time.utc(2019, 10, 28, 07, 0))
end
it "returns false outside deployment window" do
expect(subject.verify?).to eq([false, "outside deployment schedule"])
end
end
context "at 9am on Monday" do
before do
Timecop.freeze(Time.utc(2019, 10, 28, 9, 0))
end
it "returns true inside deployment window" do
expect(subject.verify?).to eq([true, "scheduled deploy time"])
end
end
context "at 4pm on Monday" do
before do
Timecop.freeze(Time.utc(2019, 10, 28, 16, 0))
end
it "returns false outside deployment window" do
expect(subject.verify?).to eq([false, "outside deployment schedule"])
end
end
context "at 9am on Friday" do
before do
Timecop.freeze(Time.utc(2019, 11, 1, 9, 0))
end
it "returns true inside deployment window" do
expect(subject.verify?).to eq([true, "scheduled deploy time"])
end
end
context "at 12pm on Friday" do
before do
Timecop.freeze(Time.utc(2019, 11, 1, 12, 0))
end
it "returns false outside deployment window" do
expect(subject.verify?).to eq([false, "outside deployment schedule"])
end
end
context "at 9am on Saturday" do
before do
Timecop.freeze(Time.utc(2019, 11, 2, 9, 0))
end
it "returns false outside deployment window" do
expect(subject.verify?).to eq([false, "outside deployment schedule"])
end
end
context "at 9am in summer time" do
before do
Timecop.freeze(Time.utc(2019, 8, 5, 8, 0))
end
it "returns true inside deployment window" do
expect(subject.verify?).to eq([true, "scheduled deploy time"])
end
end
end
end
|
package sword.langbook3.android.db;
import android.os.Parcel;
import sword.collections.ImmutableMap;
import sword.collections.MutableHashMap;
import sword.collections.MutableMap;
import sword.langbook3.android.models.Conversion;
public final class ConversionParceler {
public static Conversion<AlphabetId> read(Parcel in) {
final AlphabetId sourceAlphabet = AlphabetIdParceler.read(in);
final AlphabetId targetAlphabet = AlphabetIdParceler.read(in);
final int mapSize = in.readInt();
final MutableMap<String, String> map = MutableHashMap.empty();
for (int i = 0; i < mapSize; i++) {
final String source = in.readString();
final String target = in.readString();
map.put(source, target);
}
return new Conversion<>(sourceAlphabet, targetAlphabet, map);
}
public static void write(Parcel out, Conversion<AlphabetId> conversion) {
AlphabetIdParceler.write(out, conversion.getSourceAlphabet());
AlphabetIdParceler.write(out, conversion.getTargetAlphabet());
final ImmutableMap<String, String> map = conversion.getMap();
final int mapSize = map.size();
out.writeInt(mapSize);
for (int i = 0; i < mapSize; i++) {
out.writeString(map.keyAt(i));
out.writeString(map.valueAt(i));
}
}
private ConversionParceler() {
}
}
|
/*
* Copyright 2015 lixiaobo
*
* VersionUpgrade project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.cats.ui.optiondlg;
import java.awt.BorderLayout;
import java.awt.Cursor;
import java.awt.FlowLayout;
import java.awt.MouseInfo;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.WindowEvent;
import java.awt.event.WindowFocusListener;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JList;
import com.cats.ui.custome.JBgColorButton;
import com.cats.utils.SwingUtil;
/**
* @author xblia 2015年10月28日
*/
public class AlertDialogForOptionSelection extends JDialog implements JDialogParent,
MouseListener,WindowFocusListener
{
private static final long serialVersionUID = 1L;
private int width;
private int height;
private String[] message;
private String value;
private String failReason;
private JList<String> resultList;
private InputDialog inputDialog;
private ResultCallback resultCallback;
private boolean isMouseOver = false;
private AlertDialogForOptionSelection(ResultCallback resultCallback, String defaultVal, String defaultFailReason, String[] message, int x, int y)
{
this.resultCallback = resultCallback;
this.value = defaultVal;
this.failReason = defaultFailReason;
this.message = message;
this.width = 150;
this.height = 90;
//this.setModal(true);
this.setUndecorated(true);
this.setAlwaysOnTop(true);
this.setCursor(new Cursor(Cursor.HAND_CURSOR));
this.setSize(width, height);
if(-1 == x || -1 == y)
{
SwingUtil.centerWindow(width, height, this);
}else
{
this.setLocation(x, y);
}
initView();
}
private void initView()
{
this.resultList = new JList<String>(this.message);
this.resultList.setCellRenderer(new OptionListCellRenderer());
this.setLayout(new BorderLayout());
this.add(resultList);
if(null != value) resultList.setSelectedValue(this.value, false);
this.resultList.addMouseListener(this);
this.addWindowFocusListener(this);
}
public static String[] show(ResultCallback resultCallback, String[] message, String defaultSelectitem, String failReason, int x, int y)
{
AlertDialogForOptionSelection dlg = new AlertDialogForOptionSelection(resultCallback, defaultSelectitem, failReason,
message, x, y);
dlg.setVisible(true);
return new String[]{dlg.value, dlg.failReason};//This Result value invalid, because this is not modal dialog.
}
public static void main(String[] args)
{
JFrame frame = new JFrame();
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setSize(500, 500);
SwingUtil.centerWindow(500, 500, frame);
frame.setLayout(new FlowLayout());
JBgColorButton btn = new JBgColorButton("Open");
frame.add(btn);
frame.setVisible(true);
btn.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
AlertDialogForOptionSelection.show(new ResultCallback()
{
@Override
public void onResult(String result, String lParam, String wParam)
{
System.err.println("OnResult: " + result + " " +lParam + " " + wParam);
}
}, new String[]{ "Pass", "Fail", "Block" }, null, null, -1, -1);
}
});
}
@Override
public void mouseClicked(MouseEvent arg0)
{
}
@Override
public void mouseEntered(MouseEvent arg0)
{
isMouseOver = true;
}
@Override
public void mouseExited(MouseEvent arg0)
{
isMouseOver = false;
}
@Override
public void mousePressed(MouseEvent arg0)
{
}
@Override
public void mouseReleased(MouseEvent arg0)
{
String value = this.resultList.getSelectedValue();
if(value != null && this.value != null && !this.value.equals(value))
{
this.failReason = "";
}
if (null != value)
{
this.value = value;
if(null != inputDialog)
{
inputDialog.dispose();
}
if(!value.toLowerCase().equals("pass"))
{
Point point = new Point();
point.x = (int)(this.getLocation().getX() + this.getWidth());
point.y = (int)MouseInfo.getPointerInfo().getLocation().getY();
inputDialog = new InputDialog(this, point, failReason);
inputDialog.setVisible(true);
}else
{
this.dispose();
}
}
}
@Override
public void dispose(String result, String wParam, String lParam, CLOSE_TYPE closeType)
{
if(closeType == CLOSE_TYPE.BY_LOSTFOCUS && isMouseOver)
{
return;
}
this.failReason = wParam;
this.dispose();
this.resultCallBack();
}
@Override
public void windowGainedFocus(WindowEvent e)
{
}
@Override
public void windowLostFocus(WindowEvent e)
{
if(null != inputDialog)
{
if(inputDialog.isVisible())
{
return;
}
}
this.dispose();
resultCallBack();
}
private void resultCallBack()
{
if(null != resultCallback)
{
resultCallback.onResult(value, failReason, null);
}
}
}
|
-- Optimize the program by applying indexes to columns used in WHERE and ORDER BY clauses.
CREATE INDEX discount_index
ON order (discount);
-- Optimize the program by changing the order of operations.
SELECT item, price, quantity
FROM order
WHERE discount > 0
ORDER BY discount desc
FETCH FIRST 10 ROWS ONLY; |
<filename>src/reduxUtils/modules/footer.js<gh_stars>0
/* eslint-disable import/prefer-default-export */
import reducerRegistry from '../../reduxUtils/reducerRegistry';
import makeRequest from '../../utils/makeRequest';
const reducerName = 'footer';
const createActionName = name => `toi/${reducerName}/${name}`;
// actions
export const RECEIVE_DATA = createActionName('RECEIVE_DATA');
export const HANDLE_DATA_ERROR = createActionName('HANDLE_DATA_ERROR');
function receiveFooterData(data) {
return {
type: RECEIVE_DATA,
payload: data,
};
}
function handleFooterError(error) {
return {
type: HANDLE_DATA_ERROR,
error,
};
}
function fetchFooterData(params) {
const wapParameter = params.isWapView ? ',frmwap-yes' : '';
const appParameter = params.isAppView ? ',frmapp-yes' : '';
const msidParameter =
!params.isWapView && !params.isAppView && params.msid
? `,msid-${params.msid}`
: '';
const url = `/feeds/feed_footer/feedtype-json${wapParameter}${appParameter}${msidParameter}.cms?path=${encodeURIComponent(
params.fullPath,
)}`;
return makeRequest.get(url);
}
export function loadFooterData(params) {
return dispatch =>
fetchFooterData(params)
.then(data => dispatch(receiveFooterData(data.data)))
.catch(error => dispatch(handleFooterError(error)));
}
export default function reducer(state = {}, action) {
switch (action.type) {
case RECEIVE_DATA:
return {
...state,
data: action.payload,
};
default:
return state;
}
}
reducerRegistry.register(reducerName, reducer);
|
<filename>src/components/Footer/styles.js
import styled from "styled-components";
export const Container = styled.section`
background-color: var(--black);
color: var(--white);
`;
export const FooterContent = styled.div`
padding: 6vh 4vw;
display: flex;
flex-direction: column;
align-items: center;
max-width: 1450px;
margin: 0 auto;
p {
span {
color: #747474;
}
a {
color: #747474;
transition: 0.2s all ease-in-out;
&:hover {
color: var(--primary);
}
}
}
`;
|
TERMUX_PKG_HOMEPAGE=https://rustscan.github.io/RustScan
TERMUX_PKG_DESCRIPTION="The modern,fast,smart and effective port scanner"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER="Krishna Kanhaiya @kcubeterm"
TERMUX_PKG_VERSION=2.0.1
TERMUX_PKG_DEPENDS="nmap"
TERMUX_PKG_SRCURL=https://github.com/RustScan/RustScan/archive/${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=1d458cb081cbed2db38472ff33f9546a6640632148b4396bd12f0229ca9de7eb
TERMUX_PKG_AUTO_UPDATE=true
TERMUX_PKG_BUILD_IN_SRC=true
termux_step_pre_configure() {
rm -r Makefile
}
|
package net.cabezudo.sofia.core.sites;
import java.nio.file.Path;
import java.sql.SQLException;
import java.util.Iterator;
import net.cabezudo.json.JSONPair;
import net.cabezudo.json.values.JSONArray;
import net.cabezudo.json.values.JSONObject;
import net.cabezudo.json.values.JSONValue;
import net.cabezudo.sofia.core.list.EntryList;
/**
* @author <a href="http://cabezudo.net"><NAME></a>
* @version 0.01.00, 2019.01.28
*/
// TODO Extends from Sites, this is a paginated list of sites.
public class SiteList extends EntryList<Site> {
Sites sites = new Sites();
public SiteList(int offset, int pageSize) {
super(offset, pageSize);
}
@Override
public Iterator<Site> iterator() {
return sites.iterator();
}
public void add(Site s) throws SQLException {
sites.add(s);
}
@Override
public JSONValue toJSONTree() {
JSONObject listObject = new JSONObject();
JSONArray jsonList = new JSONArray();
JSONPair jsonListPair = new JSONPair("list", jsonList);
listObject.add(jsonListPair);
int row = super.getOffset();
for (Site site : sites) {
JSONObject jsonSite = new JSONObject();
jsonSite.add(new JSONPair("row", row));
jsonSite.add(new JSONPair("id", site.getId()));
jsonSite.add(new JSONPair("name", site.getName()));
jsonSite.add(new JSONPair("basePath", site.getFullBasePath().toString()));
jsonSite.add(new JSONPair("betaVersion", site.getBetaVersion().toJSONTree()));
jsonSite.add(new JSONPair("actualVersion", site.getActualVersion().toJSONTree()));
jsonList.add(jsonSite);
row++;
}
return listObject;
}
void add(int id, String name, Path basePath, int baseDomainNameId, Version betaVersion, Version actualVersion, int domainNameId, String domainNameName) {
sites.add(id, name, basePath, baseDomainNameId, betaVersion, actualVersion, domainNameId, domainNameName);
}
void create() throws SQLException {
sites.create();
}
@Override
public void toFormatedString(StringBuilder sb, int indent, boolean includeFirst) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
|
/**
Copyright 2019 University of Denver
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const metadataModule = (function () {
'use strict';
const api = configModule.getApi();
let obj = {};
/**
* Constructs metadata display
* @param record
* @returns {string}
*/
obj.createDisplay = function(record) {
let display = '';
display += createTitle(record);
display += createPid(record);
display += createUri(record);
display += createDates(record);
display += createExtents(record);
display += createIdentifiers(record);
display += createLanguage(record);
display += createNames(record);
display += createNotes(record);
display += createParts(record);
display += createSubjects(record);
display += createAbstract(record);
return display;
};
/**
* Creates thumbnail link fragment
* @param record
*/
obj.createThumbnailLink = function (record) {
let tn = '';
let token = userModule.getUserToken();
if (record.thumbnail === undefined || record.thumbnail === null) {
tn = api + '/api/admin/v1/repo/object/tn?uuid=' + DOMPurify.sanitize(record.pid) + '&type=' + DOMPurify.sanitize(record.mime_type) + '&t=' + token;
} else if (record.thumbnail.search('http') === 0) {
tn = DOMPurify.sanitize(record.thumbnail);
} else {
if (record.object_type === 'collection') {
tn = api + '/api/admin/v1/repo/object/tn?uuid=' + DOMPurify.sanitize(record.thumbnail) + '&type=' + DOMPurify.sanitize(record.mime_type) + '&t=' + token;
} else if (record.object_type === 'object') {
tn = api + '/api/admin/v1/repo/object/tn?uuid=' + DOMPurify.sanitize(record.pid) + '&type=' + DOMPurify.sanitize(record.mime_type) + '&t=' + token;
}
}
return tn;
};
/**
* Creates thumbnail display fragment
* @param record
* @returns {string}
*/
obj.createThumbnailDisplay = function(record, tn) {
let tnDisplay = '';
let token = userModule.getUserToken();
if (record.object_type === 'object') {
tnDisplay += '<a href="' + api + '/api/admin/v1/repo/object/viewer?uuid=' + DOMPurify.sanitize(record.pid) + '&t=' + token + '" target="_blank">';
tnDisplay += '<img style="max-height: 200px; max-width: 200px;" display: block; padding: 5px;" src="' + tn + '" alt="image" />';
tnDisplay += '</a>';
} else {
tnDisplay += '<img style="max-height: 200px; max-width: 200px;" display: block; padding: 5px;" src="' + tn + '" alt="image" />';
}
return tnDisplay;
};
/**
* Creates collection menu fragment
* @param record
* @returns {string}
*/
obj.createCollectionMenu = function(record) {
let menu = '';
let is_published = parseInt(record.is_published);
if (record.object_type === 'collection') {
menu += '<p><small style="background: skyblue; padding: 3px; color: white">Collection</small></p>';
if (is_published === 1) {
menu += '<p><small style="background: green; padding: 3px; color: white">Published</small></p>';
menu += '<p><a href="#" onclick="objectsModule.unpublishObject(\'' + DOMPurify.sanitize(record.pid) + '\', \'collection\'); return false;"><i class="fa fa-cloud-download"></i> Unpublish</a></p>';
} else if (is_published === 0) {
menu += '<p><small style="background: red; padding: 3px; color: white">Not published</small></p>';
menu += '<p><a href="#" onclick="objectsModule.publishObject(\'' + DOMPurify.sanitize(record.pid) + '\', \'collection\'); return false;"><i class="fa fa-cloud-upload"></i> Publish</a></p>';
}
menu += '<p><a href="' + api + '/dashboard/objects/unpublished?pid=' + DOMPurify.sanitize(record.pid) + '"><i class="fa fa-info-circle"></i> Unpublished objects</a></p>';
menu += '<p><a href="' + api + '/dashboard/object/thumbnail?pid=' + DOMPurify.sanitize(record.pid) + '"><i class="fa fa-edit"></i> Change Thumbnail</a></p>';
menu += '<p><a href="#" onclick="collectionsModule.updateCollectionMetadata(\'' + DOMPurify.sanitize(record.pid) + '\', \'collection\'); return false;"><i class="fa fa-code"></i> Update Collection Metadata</a></p>';
}
return menu;
};
/**
* Creates object menu fragment
* @param record
* @returns {string}
*/
obj.createObjectMenu = function(record) {
let menu = '';
let is_published = parseInt(record.is_published);
let is_compound = parseInt(record.is_compound);
if (record.object_type === 'object') {
if (is_compound === 1) {
menu += '<p><small style="background: cadetblue; padding: 3px; color: white">Compound Object</small></p>';
} else {
menu += '<p><small style="background: cadetblue; padding: 3px; color: white">Object</small></p>';
}
if (is_published === 1) {
menu += '<p><small style="background: green; padding: 3px; color: white">Published</small></p>';
menu += '<p><a href="#" onclick="objectsModule.unpublishObject(\'' + DOMPurify.sanitize(record.pid) + '\', \'object\'); return false;"><i class="fa fa-cloud-download"></i> Unpublish</a></p>';
} else if (is_published === 0) {
menu += '<p><small style="background: red; padding: 3px; color: white">Not published</small></p>';
menu += '<p><a href="#" onclick="objectsModule.publishObject(\'' + DOMPurify.sanitize(record.pid) + '\', \'object\'); return false;"><i class="fa fa-cloud-upload"></i> Publish</a></p>';
}
menu += '<p><a href="#" onclick="objectsModule.updateMetadata(\'' + DOMPurify.sanitize(record.pid) + '\', \'object\'); return false;"><i class="fa fa-code"></i> Update Metadata</a></p>';
}
return menu;
};
/**
* Creates title fragment
* @param record
* @returns {string}
*/
function createTitle(record) {
let title = '';
if (record.display_record.title !== undefined && record.object_type === 'collection') {
title += '<h4><a href="' + api + '/dashboard/objects/?pid=' + DOMPurify.sanitize(record.pid) + '">' + DOMPurify.sanitize(record.display_record.title) + '</a></h4>';
} else if (record.object_type === 'object') {
title += '<h4>' + DOMPurify.sanitize(record.display_record.title) + '</h4>';
} else {
title += '<h4>No Title</h4>';
}
return title;
}
/**
* Creates pid fragment
* @param record
* @returns {string}
*/
function createPid(record) {
let pid = '';
pid += '<ul>';
pid += '<li><strong>Pid:</strong> <a target="_blank" href="' + DOMPurify.sanitize(record.handle) + '">' + DOMPurify.sanitize(record.pid) + '</a> <i class="fa fa-external-link"></i></li>';
pid += '</ul>';
return pid;
}
/**
* Creates uri fragment
* @param record
* @returns {string}
*/
function createUri(record) {
let uri = '';
if (record.display_record.uri !== undefined) {
uri += '<ul><li><strong>Uri:</strong> ' + DOMPurify.sanitize(record.display_record.uri) + '</li></ul>';
}
return uri;
}
/**
* Creates dates fragment
* @param record
* @returns {string}
*/
function createDates(record) {
let dates = '';
if (record.display_record.dates !== undefined && record.display_record.dates.length !== 0) {
dates += '<ul>';
dates += '<li><strong>Dates:</strong></li>';
dates += '<ul>';
for (let j = 0; j < record.display_record.dates.length; j++) {
if (record.object_type === 'collection') {
dates += '<li>' + DOMPurify.sanitize(record.display_record.dates[j].expression) + ' ( ' + DOMPurify.sanitize(record.display_record.dates[j].date_type) + '</a> )</li>';
} else {
dates += '<li>' + DOMPurify.sanitize(record.display_record.dates[j].expression) + ' ( ' + DOMPurify.sanitize(record.display_record.dates[j].type) + '</a> )</li>';
}
}
dates += '</ul></ul>';
}
return dates;
}
/**
* Creates extents fragment
* @param record
* @returns {string}
*/
function createExtents(record) {
let extents = '';
if (record.display_record.extents !== undefined && record.display_record.extents.length !== 0) {
extents += '<ul>';
extents += '<li><strong>Extents:</strong></li>';
extents += '<ul>';
for (let i = 0; i < record.display_record.extents.length; i++) {
// collection object
if (typeof record.display_record.extents[i] === 'object') {
for (let prop in record.display_record.extents[i]) {
if (prop === 'number') {
extents += '<li>number: ' + DOMPurify.sanitize(record.display_record.extents[i][prop]) + '</li>';
} else if (prop === 'container_summary') {
extents += '<li>container summary: ' + DOMPurify.sanitize(record.display_record.extents[i][prop]) + '</li>';
} else if (prop === 'created_by') {
extents += '<li>created by: ' + DOMPurify.sanitize(record.display_record.extents[i][prop]) + '</li>';
} else if (prop === 'last_modified_by') {
extents += '<li>last modified by: ' + DOMPurify.sanitize(record.display_record.extents[i][prop]) + '</li>';
} else if (prop === 'portion') {
extents += '<li>portion: ' + DOMPurify.sanitize(record.display_record.extents[i][prop]) + '</li>';
} else if (prop ==='extent_type') {
extents += '<li>extent type: ' + DOMPurify.sanitize(record.display_record.extents[i][prop]) + '</li>';
}
}
} else {
extents += '<li>' + DOMPurify.sanitize(record.display_record.extents[i]) + '</li>';
}
}
extents += '</ul></ul>';
}
return extents;
}
/**
* Creates identifiers fragment
* @param record
* @returns {string}
*/
function createIdentifiers(record) {
let identifiers = '';
if (record.display_record.identifiers !== undefined && record.display_record.identifiers.length !== 0) {
identifiers += '<ul>';
identifiers += '<li><strong>Identifiers:</strong></li>';
identifiers += '<ul>';
for (let i = 0; i < record.display_record.identifiers.length; i++) {
identifiers += '<li>' + DOMPurify.sanitize(record.display_record.identifiers[i].identifier) + ' ( ' + DOMPurify.sanitize(record.display_record.identifiers[i].type) + ' )</li>';
}
identifiers += '</ul></ul>';
}
return identifiers;
}
/**
* Creates language fragment
* @param record
* @returns {string}
*/
function createLanguage(record) {
let language = '';
if (record.display_record.language !== undefined && record.display_record.language.length !== 0) {
if (typeof record.display_record.language === 'object') {
language += '<ul>';
for (let i = 0; i < record.display_record.language.length; i++) {
language += '<li><strong>Language:</strong> ' + DOMPurify.sanitize(record.display_record.language[i].text) + ' ( ' + DOMPurify.sanitize(record.display_record.language[i].authority) + ' )</li>';
}
language += '</ul>';
} else {
language += '<ul><li><strong>Language:</strong> ' + DOMPurify.sanitize(record.display_record.language) + '</li></ul>';
}
}
return language;
}
/**
* Creates names fragment
* @param record
*/
function createNames(record) {
let names = '';
if (record.display_record.names !== undefined && record.display_record.names.length !== 0) {
names += '<ul>';
names += '<li><strong>Names:</strong></li>';
names += '<ul>';
for (let i = 0; i < record.display_record.names.length; i++) {
names += '<li>' + DOMPurify.sanitize(record.display_record.names[i].title) + ' ( ' + DOMPurify.sanitize(record.display_record.names[i].source) + ' )</li>';
}
names += '</ul></ul>';
}
return names;
}
/**
* Creates notes fragment
* @param record
* @returns {string}
*/
function createNotes(record) {
let notes = '';
if (record.display_record.notes !== undefined && record.display_record.notes.length !== 0) {
notes += '<ul>';
notes += '<li><strong>Notes:</strong></li>';
notes += '<ul>';
for (let i = 0; i < record.display_record.notes.length; i++) {
if (record.display_record.notes[i].content !== undefined) {
notes += '<li>' + DOMPurify.sanitize(record.display_record.notes[i].content.toString()) + ' ( ' + DOMPurify.sanitize(record.display_record.notes[i].type) + ' )</li>';
}
}
notes += '</ul></ul>';
}
return notes;
}
/**
* Creates parts fragment
* @param record
*/
function createParts(record) {
let parts = '';
if (record.display_record.parts !== undefined && record.display_record.parts.length !== 1) {
parts += '<ul>';
parts += '<li><strong>Parts:</strong></li>';
parts += '<ul>';
for (let i = 0; i < record.display_record.parts.length; i++) {
if (i === 10) {
parts += '<li><strong>Only showing ' + i + ' out of ' + DOMPurify.sanitize(record.display_record.parts.length) + ' parts.</strong></li>';
break;
} else {
parts += '<li>' + DOMPurify.sanitize(record.display_record.parts[i].title) + ' ( ' + DOMPurify.sanitize(record.display_record.parts[i].type) + ' ) order: ' + DOMPurify.sanitize(record.display_record.parts[i].order);
let tn = helperModule.getTn(DOMPurify.sanitize(record.display_record.parts[i].thumbnail), '');
parts += '<br><img src="' + tn + '" width="100px" height="100px"></li>';
}
}
parts += '</ul></ul>';
}
return parts;
}
/**
* Creates subjects fragment
* @param records
*/
function createSubjects(record) {
let subjects = '';
if (record.object_type !== 'collection' && record.display_record.subjects !== undefined && record.display_record.subjects.length !== 0) {
subjects += '<ul>';
subjects += '<li><strong>Subjects:</strong></li>';
subjects += '<ul>';
for (let i = 0; i < record.display_record.subjects.length; i++) {
if (record.display_record.subjects[i].authority_id !== undefined) {
subjects += '<li>' + DOMPurify.sanitize(record.display_record.subjects[i].title) + ' ( <a target="_blank" href="' + DOMPurify.sanitize(record.display_record.subjects[i].authority_id) + '">' + DOMPurify.sanitize(record.display_record.subjects[i].authority) + '</a> )</li>';
} else {
subjects += '<li>' + DOMPurify.sanitize(record.display_record.subjects[i].title) + ' ( ' + DOMPurify.sanitize(record.display_record.subjects[i].authority) + ' )</li>';
}
}
subjects += '</ul></ul>';
}
return subjects;
}
/**
* Creates abstract fragment
* @param record
* @returns {string}
*/
function createAbstract(record) {
let abstract = '';
if (record.abstract !== undefined) {
abstract += '<ul>';
abstract += '<li><strong>Abstract:</strong></li>';
abstract += '<ul>';
abstract += '<li style="min-height: 75px">' + DOMPurify.sanitize(record.abstract) + '</li>';
abstract += '</ul></ul>';
}
return abstract;
}
return obj;
}()); |
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
// used only when the data pased is object
var core_1 = require('@angular/core');
// articles pipes
var SortTitlePipe = (function () {
function SortTitlePipe() {
}
SortTitlePipe.prototype.transform = function (value, args) {
if (!value || !value.sort) {
return value;
}
if (args !== null) {
if (args[0] === 'A') {
return value.sort(function (a, b) {
if (a.Title < b.Title) {
return -1;
}
if (a.Title > b.Title) {
return 1;
}
return 0;
});
}
else {
return value.sort(function (a, b) {
if (a.Title > b.Title) {
return -1;
}
if (a.Title < b.Title) {
return 1;
}
return 0;
});
}
}
else {
return value;
}
};
SortTitlePipe = __decorate([
core_1.Pipe({ name: 'SortArticlesTitle' }),
__metadata('design:paramtypes', [])
], SortTitlePipe);
return SortTitlePipe;
}());
exports.SortTitlePipe = SortTitlePipe;
var SortCategoryPipe = (function () {
function SortCategoryPipe() {
}
SortCategoryPipe.prototype.transform = function (value, args) {
if (!value || !value.sort) {
return value;
}
if (args !== null) {
if (args === 'A') {
return value.sort(function (a, b) {
if (a.CategoryName < b.CategoryName) {
return -1;
}
if (a.CategoryName > b.CategoryName) {
return 1;
}
return 0;
});
}
else {
return value.sort(function (a, b) {
if (a.CategoryName > b.CategoryName) {
return -1;
}
if (a.CategoryName < b.CategoryName) {
return 1;
}
return 0;
});
}
}
else {
return value;
}
};
SortCategoryPipe = __decorate([
core_1.Pipe({ name: 'SortArticlesCategory' }),
__metadata('design:paramtypes', [])
], SortCategoryPipe);
return SortCategoryPipe;
}());
exports.SortCategoryPipe = SortCategoryPipe;
var SortNamePipe = (function () {
function SortNamePipe() {
}
SortNamePipe.prototype.transform = function (value, args) {
if (!value || !value.sort) {
return value;
}
if (args !== null) {
if (args === 'A') {
return value.sort(function (a, b) {
if (a.AuthorName < b.AuthorName) {
return -1;
}
if (a.AuthorName > b.AuthorName) {
return 1;
}
return 0;
});
}
else {
return value.sort(function (a, b) {
if (a.AuthorName > b.AuthorName) {
return -1;
}
if (a.AuthorName < b.AuthorName) {
return 1;
}
return 0;
});
}
}
else {
return value;
}
};
SortNamePipe = __decorate([
core_1.Pipe({ name: 'SortArticlesName' }),
__metadata('design:paramtypes', [])
], SortNamePipe);
return SortNamePipe;
}());
exports.SortNamePipe = SortNamePipe;
var SortDatePipe = (function () {
function SortDatePipe() {
}
SortDatePipe.prototype.transform = function (value, args) {
if (!value || !value.sort) {
return value;
}
if (args !== null) {
if (args === 'A') {
return value.sort(function (a, b) {
if (a.DatePublished < b.DatePublished) {
return -1;
}
if (a.DatePublished > b.DatePublished) {
return 1;
}
return 0;
});
}
else {
return value.sort(function (a, b) {
if (a.DatePublished > b.DatePublished) {
return -1;
}
if (a.DatePublished < b.DatePublished) {
return 1;
}
return 0;
});
}
}
else {
return value;
}
};
SortDatePipe = __decorate([
core_1.Pipe({ name: 'SortArticlesDate' }),
__metadata('design:paramtypes', [])
], SortDatePipe);
return SortDatePipe;
}());
exports.SortDatePipe = SortDatePipe;
var TopArticlesPipe = (function () {
function TopArticlesPipe() {
}
TopArticlesPipe.prototype.transform = function (value, args) {
if (args !== null) {
return value.slice(0, args);
}
else {
return value.slice(0, 3);
}
};
TopArticlesPipe = __decorate([
core_1.Pipe({ name: 'TopArticles' }),
__metadata('design:paramtypes', [])
], TopArticlesPipe);
return TopArticlesPipe;
}());
exports.TopArticlesPipe = TopArticlesPipe;
// authors pipes
var SortAuthorsPipe = (function () {
function SortAuthorsPipe() {
}
SortAuthorsPipe.prototype.transform = function (value, args) {
if (!value || !value.sort) {
return value;
}
return value.sort(function (a, b) {
if (a.Name < b.Name) {
return -1;
}
if (a.Name > b.Name) {
return 1;
}
return 0;
});
};
SortAuthorsPipe = __decorate([
core_1.Pipe({ name: 'SortedAuthors' }),
__metadata('design:paramtypes', [])
], SortAuthorsPipe);
return SortAuthorsPipe;
}());
exports.SortAuthorsPipe = SortAuthorsPipe;
// teams pipes
// general pipes
var InitCapsPipe = (function () {
function InitCapsPipe() {
}
InitCapsPipe.prototype.transform = function (value, args) {
return value
.toLowerCase()
.replace(/(?:^|\s)[a-z]/g, function (m) {
return m.toUpperCase();
});
};
InitCapsPipe = __decorate([
core_1.Pipe({ name: 'InitCaps' }),
__metadata('design:paramtypes', [])
], InitCapsPipe);
return InitCapsPipe;
}());
exports.InitCapsPipe = InitCapsPipe;
var CapsPipe = (function () {
function CapsPipe() {
}
CapsPipe.prototype.transform = function (value, args) {
if (args != null) {
if (args === 'L') {
return value.toLowerCase();
}
else {
return value.toUpperCase();
}
}
else {
// default to upper case with no argument
return value.toUpperCase();
}
};
CapsPipe = __decorate([
core_1.Pipe({ name: 'Caps' }),
__metadata('design:paramtypes', [])
], CapsPipe);
return CapsPipe;
}());
exports.CapsPipe = CapsPipe;
//@Pipe({ name: 'values' })
//class ValuesPipe implements PipeTransform {
// transform(value: any, args?: any[]): Object[] {
// let keyArr = Object.keys(value),
// dataArr = [],
// keyName = args[0];
// keyArr.forEach(key => {
// value[key][keyName] = key;
// dataArr.push(value[key])
// });
// if (args[1]) {
// dataArr.sort((a: Object, b: Object): number => {
// return a[keyName] > b[keyName] ? 1 : -1;
// });
// }
// return dataArr;
// }
//}
//# sourceMappingURL=pipes.js.map |
/**
* @typedef {object} Phaser.Types.Physics.Arcade.ArcadeWorldTreeMinMax
* @since 3.0.0
*
* @property {number} minX - The minimum x value used in RTree searches.
* @property {number} minY - The minimum y value used in RTree searches.
* @property {number} maxX - The maximum x value used in RTree searches.
* @property {number} maxY - The maximum y value used in RTree searches.
*/
|
import { Injectable } from '@angular/core'
import { HttpClient } from '@angular/common/http'
@Injectable({
providedIn: 'root'
})
export class ApiService {
constructor(private http: HttpClient) {}
getData() {
return this.http.get('http://myapi.com/data');
}
} |
class Date:
def __init__(self, year, month, day):
self.year = year
self.month = month
self.day = day
def is_leap_year(self):
if (self.year % 4 == 0 and self.year % 100 != 0) or self.year % 400 == 0:
return True
else:
return False
def days_until(self, other_date):
from datetime import date as dt_date
current_date = dt_date(self.year, self.month, self.day)
target_date = dt_date(other_date.year, other_date.month, other_date.day)
delta = target_date - current_date
return delta.days
def __str__(self):
return f"{self.year:04d}-{self.month:02d}-{self.day:02d}"
# Test the Date class
date_1 = Date(2020, 1, 24)
date_2 = Date(2020, 6, 24)
print(date_1.is_leap_year()) # Output: True
print(date_2.is_leap_year()) # Output: True
print(date_1.days_until(date_2)) # Output: 152
print(date_2.days_until(date_1)) # Output: -152
print(date_1) # Output: 2020-01-24
print(date_2) # Output: 2020-06-24 |
/*
* Copyright (c) 2020 Ubique Innovation AG <https://www.ubique.ch>
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*
* SPDX-License-Identifier: MPL-2.0
*/
package org.dpppt.android.sdk.internal.backend;
import android.content.Context;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Build;
import androidx.annotation.NonNull;
import okhttp3.Cache;
import okhttp3.OkHttpClient;
import okhttp3.Request;
interface Repository {
default OkHttpClient.Builder getClientBuilder(@NonNull Context context) {
String versionName;
PackageManager manager = context.getPackageManager();
try {
PackageInfo info = manager.getPackageInfo(context.getPackageName(), 0);
versionName = info.versionName;
} catch (PackageManager.NameNotFoundException e) {
versionName = "unknown";
}
String userAgent = context.getPackageName() + ";" + versionName + ";Android;" + Build.VERSION.SDK_INT;
OkHttpClient.Builder okHttpBuilder = new OkHttpClient.Builder();
okHttpBuilder.addInterceptor(chain -> {
Request request = chain.request()
.newBuilder()
.header("User-Agent", userAgent)
.build();
return chain.proceed(request);
});
int cacheSize = 50 * 1024 * 1024; // 50 MB
Cache cache = new Cache(context.getCacheDir(), cacheSize);
okHttpBuilder.cache(cache);
okHttpBuilder.certificatePinner(CertificatePinning.getCertificatePinner());
return okHttpBuilder;
}
}
|
<gh_stars>0
/*
* This file is generated by jOOQ.
*/
package jooq.generated.entities.mappings.tables.records;
import java.sql.Timestamp;
import java.util.UUID;
import javax.annotation.Generated;
import jooq.generated.entities.mappings.tables.AreaMapper;
import org.jooq.Field;
import org.jooq.Record3;
import org.jooq.Row3;
import org.jooq.impl.TableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.9.2"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class AreaMapperRecord extends TableRecordImpl<AreaMapperRecord> implements Record3<String, UUID, Timestamp> {
private static final long serialVersionUID = 78257641;
/**
* Setter for <code>public.area_mapper.gtfs_id</code>.
*/
public AreaMapperRecord setGtfsId(String value) {
set(0, value);
return this;
}
/**
* Getter for <code>public.area_mapper.gtfs_id</code>.
*/
public String getGtfsId() {
return (String) get(0);
}
/**
* Setter for <code>public.area_mapper.id</code>.
*/
public AreaMapperRecord setId(UUID value) {
set(1, value);
return this;
}
/**
* Getter for <code>public.area_mapper.id</code>.
*/
public UUID getId() {
return (UUID) get(1);
}
/**
* Setter for <code>public.area_mapper.update</code>.
*/
public AreaMapperRecord setUpdate(Timestamp value) {
set(2, value);
return this;
}
/**
* Getter for <code>public.area_mapper.update</code>.
*/
public Timestamp getUpdate() {
return (Timestamp) get(2);
}
// -------------------------------------------------------------------------
// Record3 type implementation
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Row3<String, UUID, Timestamp> fieldsRow() {
return (Row3) super.fieldsRow();
}
/**
* {@inheritDoc}
*/
@Override
public Row3<String, UUID, Timestamp> valuesRow() {
return (Row3) super.valuesRow();
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field1() {
return AreaMapper.AREA_MAPPER.GTFS_ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<UUID> field2() {
return AreaMapper.AREA_MAPPER.ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Timestamp> field3() {
return AreaMapper.AREA_MAPPER.UPDATE;
}
/**
* {@inheritDoc}
*/
@Override
public String value1() {
return getGtfsId();
}
/**
* {@inheritDoc}
*/
@Override
public UUID value2() {
return getId();
}
/**
* {@inheritDoc}
*/
@Override
public Timestamp value3() {
return getUpdate();
}
/**
* {@inheritDoc}
*/
@Override
public AreaMapperRecord value1(String value) {
setGtfsId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public AreaMapperRecord value2(UUID value) {
setId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public AreaMapperRecord value3(Timestamp value) {
setUpdate(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public AreaMapperRecord values(String value1, UUID value2, Timestamp value3) {
value1(value1);
value2(value2);
value3(value3);
return this;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached AreaMapperRecord
*/
public AreaMapperRecord() {
super(AreaMapper.AREA_MAPPER);
}
/**
* Create a detached, initialised AreaMapperRecord
*/
public AreaMapperRecord(String gtfsId, UUID id, Timestamp update) {
super(AreaMapper.AREA_MAPPER);
set(0, gtfsId);
set(1, id);
set(2, update);
}
}
|
const greaterThan = (array, num) => {
return array.filter(item => item > num);
};
const array = [1,2,3,4,5];
const number = 3;
const result = greaterThan(array, number);
console.log(result); // [4,5] |
<filename>test/ehonda/typed_message_test.rb
require_relative '../test_helper'
require 'active_attr'
require 'ostruct'
require 'ehonda/typed_message'
describe Ehonda::TypedMessage do
before do
@typed_message = Ehonda::TypedMessage
@valid_message = {
headers: {
id: SecureRandom.uuid,
type: 'some-type',
version: 1
},
body: {
some_key: 'some value'
}}
@valid_message_json = ActiveSupport::JSON.encode @valid_message
end
it 'can be built from a valid message hash' do
message = @typed_message.new @valid_message
message.to_h['body']['some_key'].must_equal 'some value'
end
it 'can be built from valid message json' do
message = @typed_message.new @valid_message_json
message.to_h['body']['some_key'].must_equal 'some value'
end
it 'can be built from valid non-raw format message json' do
non_raw_hash = { 'Message' => @valid_message_json }
message = @typed_message.new non_raw_hash
message.to_h['body']['some_key'].must_equal 'some value'
end
it 'fails when building from invalid json' do
invalid_json = ActiveSupport::JSON.encode(blah: 123)
->{ @typed_message.new(invalid_json).to_h }.must_raise Ehonda::MessageSanitizer::InvalidMessageError
end
it 'can be built from an existing typed message' do
message = @typed_message.new @valid_message
message2 = @typed_message.new message
message2.to_h['body']['some_key'].must_equal 'some value'
end
it 'can be built from an active attr model' do
MyMessage = Class.new do
include ActiveAttr::Model
attribute :foo
end
message = @typed_message.new MyMessage.new(foo: 121)
message.to_h['body']['foo'].must_equal 121
end
it 'can be built from an Shoryuken::Message' do
if defined? ::Shoryuken::Message
shoryuken_message = Shoryuken::Message.new(
Object.new,
'http://example.org/queue1',
OpenStruct.new(body: @valid_message_json))
message = @typed_message.new shoryuken_message
message.to_h['body']['some_key'].must_equal 'some value'
end
end
it 'can be built from an Shoryuken::Message' do
if defined? ::Aws::SQS::Message
sqs_message = Aws::SQS::Message.new(
queue_url: 'http://example.org/queue1',
client: Object.new,
receipt_handle: SecureRandom.uuid,
data: { body: @valid_message_json })
message = @typed_message.new sqs_message
message.to_h['body']['some_key'].must_equal 'some value'
end
end
end
|
<filename>src/sections/home/home.ctrl.js
'use strict';
angular
.module('app.core', ['ui.bootstrap'])
.controller('HomeController', function ($scope, $uibModal, PageValues) {
//Set page title and description
PageValues.title = "HOME";
PageValues.description = "Learn AngularJS using best practice real world examples.";
//Setup view model object
var vm = this;
$scope.showdetail = function (args) {
console.log(args);
var modalInstance = $uibModal.open({
templateUrl: 'sections/home/modal.tpl.html',
scope: $scope, //passed current scope to the modal
controller: 'ChartDrilController', //passed current scope to the modal
size: 'lg'
});
}
$scope.showdetailpie = function (args) {
console.log(args);
}
$scope.data1 = {
data: [{
"org2": "Chemical",
"Member_Only": 0,
"Leadership_Role": 1,
"Committee_Member": 10,
},
{
"org2": "Fuels_Lubricants",
"Member_Only": 0,
"Leadership_Role": 4,
"Committee_Member": 0,
},
{
"org2": "Refining_Supply",
"Member_Only": 2,
"Leadership_Role": 207,
"Committee_Member": 423,
},
{
"org2": "IOL",
"Member_Only": 0,
"Leadership_Role": 13,
"Committee_Member": 17,
},
],
title: "Refining & Supply (Org Lvl 1) Participation by Org Lvl 2 - 6",
subtitle: "(With Participation Type Detail)",
axes: [{
name: "org2",
type: "categoryX",
label: "org2",
title: "Emp Org2",
gap: 0.3,
},
{
name: "Volume",
type: "numericY",
title: "Count of ID"
}
],
series: [{
name: "Member_Only",
title: "Member Only",
type: "stackedFragment",
showTooltip: true,
tooltipTemplate: "Member Only",
valueMemberPath: "Member_Only"
}, {
name: "Leadership_Role",
title: "Leadership Role",
type: "stackedFragment",
showTooltip: true,
tooltipTemplate: "Leadership Role",
valueMemberPath: "Leadership_Role"
}, {
name: "<NAME>",
title: "Committee Member",
showTooltip: true,
tooltipTemplate: "Committee Member",
type: "stackedFragment",
valueMemberPath: "Committee_Member"
}],
name: "parent",
xAxis: "org2",
yAxis: "Volume",
legend: "legend0"
};
$scope.data2 = {
data: [{
"org2": "KEEP", //Chemical
"Member_Only": 2,
"Leadership_Role": 189,
"Committee_Member": 359,
},
{
"org2": "NEW", //Fuels_Lubricants
"Member_Only": 0,
"Leadership_Role": 56,
"Committee_Member": 87,
},
{
"org2": "NEW_ROLE", //Refining_Supply
"Member_Only": 0,
"Leadership_Role": 0,
"Committee_Member": 10,
},
{
"org2": "Reject", //Refining_Supply
"Member_Only": 0,
"Leadership_Role": 0,
"Committee_Member": 1,
},
{
"org2": "Remove", //Refining_Supply
"Member_Only": 0,
"Leadership_Role": 0,
"Committee_Member": 0,
},
{
"org2": "Review", //Refining_Supply
"Member_Only": 0,
"Leadership_Role": 0,
"Committee_Member": 0,
},
],
title: "Refining & Supply (Org Lvl 1) by Request Type",
subtitle: "(With Participation Type Detail)",
axes: [{
name: "org2",
type: "categoryX",
label: "org2",
title: "pAction",
gap: 1,
},
{
name: "Volume",
type: "numericY",
title: "Count of ID"
}
],
series: [{
name: "Member_Only",
title: "Member Only",
type: "stackedFragment",
showTooltip: true,
tooltipTemplate: "Member Only",
valueMemberPath: "Member_Only"
}, {
name: "Leadership_Role",
title: "Leadership Role",
type: "stackedFragment",
showTooltip: true,
tooltipTemplate: "Leadership Role",
valueMemberPath: "Leadership_Role"
}, {
name: "Committee Member",
title: "Committee Member",
showTooltip: true,
tooltipTemplate: "Committee Member",
type: "stackedFragment",
valueMemberPath: "Committee_Member"
}],
name: "parent",
xAxis: "org2",
yAxis: "Volume",
legend: "legend1"
};
$scope.data3 = {
data: [{
"country": "Norway",
"Member_Only": 0,
"Leadership_Role": 0,
"Committee_Member": 2,
},
{
"country": "Singapore",
"Member_Only": 2,
"Leadership_Role": 4,
"Committee_Member": 7,
},
{
"country": "Thailand",
"Committee_Member": 5,
"Leadership_Role": 0,
"Member_Only": 0,
},
{
"country": "The_Netherlands",
"Committee_Member": 2,
"Leadership_Role": 1,
"Member_Only": 0,
},
{
"country": "Turkey",
"Committee_Member": 3,
"Leadership_Role": 1,
"Member_Only": 0,
},
{
"country": "Egypt",
"Committee_Member": 20,
"Leadership_Role": 1,
"Member_Only": 0,
},
{
"country": "Europe",
"Committee_Member": 9,
"Leadership_Role": 0,
"Member_Only": 0,
},
{
"country": "France",
"Committee_Member": 4,
"Leadership_Role": 2,
"Member_Only": 0,
},
{
"country": "Germany",
"Committee_Member": 9,
"Leadership_Role": 0,
"Member_Only": 0,
},
{
"country": "Italy",
"Committee_Member": 1,
"Leadership_Role": 1,
"Member_Only": 0,
},
{
"country": "Netherlands",
"Committee_Member": 2,
"Leadership_Role": 1,
"Member_Only": 0,
},
{
"country": "New_Zealand",
"Committee_Member": 1,
"Leadership_Role": 0,
"Member_Only": 0,
},
{
"country": "Australia",
"Committee_Member": 2,
"Leadership_Role": 4,
"Member_Only": 0,
},
{
"country": "Belgium",
"Committee_Member": 7,
"Leadership_Role": 2,
"Member_Only": 0,
},
{
"country": "CA",
"Committee_Member": 0,
"Leadership_Role": 2,
"Member_Only": 0,
},
{
"country": "Canada",
"Committee_Member": 13,
"Leadership_Role": 12,
"Member_Only": 0,
},
{
"country": "China",
"Committee_Member": 1,
"Leadership_Role": 0,
"Member_Only": 0,
},
{
"country": "Dubai",
"Committee_Member": 1,
"Leadership_Role": 0,
"Member_Only": 0,
},
{
"country": "United_Kingdom",
"Member_Only": 0,
"Leadership_Role": 0,
"Committee_Member": 18,
},
{
"country": "United_States",
"Member_Only": 0,
"Leadership_Role": 22,
"Committee_Member": 63,
},
{
"country": "USA",
"Member_Only": 0,
"Leadership_Role": 2,
"Committee_Member": 3,
},
{
"country": "blank",
"Member_Only": 0,
"Leadership_Role": 188,
"Committee_Member": 293,
},
],
title: "Refining & Supply(Org Lvl 1) TAIG Geographical Profile",
// subtitle: "The top five Total Primary Energy producers",
axes: [{
name: "country",
type: "categoryY",
label: "country",
title: "Country",
gap: 1,
labelMargin: 0,
interval: 1
}, {
name: "Volume",
type: "numericX",
title: "pRole"
}],
// axes: [{
// name: "org2",
// type: "categoryX",
// label: "org2",
// title: "org2",
// gap: 1,
// },
// {
// name: "Volume",
// type: "numericY",
// title: "Quadrillion Btu"
// }
// ],
series: [{
name: "Member_Only",
title: "Member Only",
type: "stackedFragment",
showTooltip: true,
tooltipTemplate: "Member Only",
valueMemberPath: "Member_Only"
}, {
name: "Leadership_Role",
title: "Leadership Role",
type: "stackedFragment",
showTooltip: true,
tooltipTemplate: "Leadership Role",
valueMemberPath: "Leadership_Role"
}, {
name: "<NAME>",
title: "Committee Member",
showTooltip: true,
tooltipTemplate: "Committee Member",
type: "stackedFragment",
valueMemberPath: "Committee_Member"
}],
name: "parent",
xAxis: "Volume",
yAxis: "country",
legend: "legend2"
};
$scope.data4 = {
data: [{
"org2": "Chemical",
"Member_Only": 0,
"Leadership_Role": 1,
"Committee_Member": 10,
},
{
"org2": "Fuels_Lubricants",
"Member_Only": 0,
"Leadership_Role": 4,
"Committee_Member": 0,
},
{
"org2": "Refining_Supply",
"Member_Only": 2,
"Leadership_Role": 207,
"Committee_Member": 423,
},
{
"org2": "IOL",
"Member_Only": 0,
"Leadership_Role": 13,
"Committee_Member": 17,
},
],
title: "Energy Production Per Country",
subtitle: "The top five Total Primary Energy producers",
axes: [{
name: "org2",
type: "categoryX",
label: "org2",
title: "org2",
gap: 1,
},
{
name: "Volume",
type: "numericY",
title: "Quadrillion Btu"
}
],
series: [{
name: "Member_Only",
title: "Member_Only",
type: "stackedFragment",
showTooltip: true,
tooltipTemplate: "Member_Only",
valueMemberPath: "Member_Only"
}, {
name: "Leadership_Role",
title: "Leadership_Role",
type: "stackedFragment",
showTooltip: true,
tooltipTemplate: "Leadership_Role",
valueMemberPath: "Leadership_Role"
}, {
name: "Committee Member",
title: "Committee Member",
showTooltip: true,
tooltipTemplate: "Committee Member",
type: "stackedFragment",
valueMemberPath: "Committee_Member"
}],
name: "parent",
xAxis: "org2",
yAxis: "Volume",
legend: "legend3"
};
}); |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-common/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-common/13-512+512+512-STWS-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_trigrams_within_sentences_first_third_sixth --eval_function penultimate_sixth_eval |
echo "Wait 5s"
sleep 5
go run client.go
echo "Wait 5s"
sleep 5
go run client.go
# mantain container running
tail -f /dev/null |
#!/usr/bin/env bash
# Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
checkpoints=${checkpoints:-"results/models/base/checkpoints"}
for folder in $checkpoints; do
ckpts_dir=${folder}
output_dir=${folder}
for f in $ckpts_dir/*.index; do
ckpt=${f%.*}
echo "==================================== START $ckpt ===================================="
python postprocess_pretrained_ckpt.py --pretrained_checkpoint=$ckpt --output_dir=$output_dir --amp
bash scripts/run_squad.sh $(source scripts/configs/squad_config.sh && rtx3090_1gpu_amp_local) train_eval;
echo "==================================== END $ckpt ====================================";
done
done
#bash scripts/run_squad.sh $(source scripts/configs/squad_config.sh && dgxa100_8gpu_amp) train_eval
# bash scripts/run_squad.sh results/models/base/checkpoints/discriminator; |
package net.synqg.qg.service;
import com.google.common.collect.ImmutableList;
import com.google.gson.Gson;
import lombok.extern.slf4j.Slf4j;
import net.synqg.qg.nlp.DependencyNode;
import net.synqg.qg.nlp.NamedEntitySpan;
import net.synqg.qg.nlp.SemanticRole;
import net.synqg.qg.nlp.SemanticRoleList;
import net.synqg.qg.nlp.SentenceParse;
import net.synqg.qg.nlp.labels.DependencyLabel;
import net.synqg.qg.nlp.labels.NamedEntityType;
import net.synqg.qg.nlp.labels.PosLabel;
import net.synqg.qg.nlp.labels.SemanticRoleLabel;
import net.synqg.qg.nlp.normalizer.TextNormalizer;
import net.synqg.qg.nlp.jsonresponse.DependencyResponse;
import net.synqg.qg.nlp.jsonresponse.LemmaResponse;
import net.synqg.qg.nlp.jsonresponse.NERResponse;
import net.synqg.qg.nlp.jsonresponse.SemanticResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.json.simple.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* @author viswa
*/
@Slf4j
public class DefaultQgAnalysisService implements QgAnalysisService {
static final List<String> verbsToDiscard = new ArrayList<>(Arrays.asList("believe", "thought"));
private static Function<String, String> normalizer;
private boolean printLogs = false;
public DefaultQgAnalysisService() {
normalizer = new TextNormalizer();
}
public DefaultQgAnalysisService(boolean printLogs) {
this.printLogs = printLogs;
normalizer = new TextNormalizer();
}
public List<IQgAnalysis> parse(List<String> sentences) {
return sentences
.parallelStream()
.map(normalizer)
.map(this::parse)
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
private IQgAnalysis parse(String text) {
QgAnalysis qgAnalysis = new QgAnalysis();
try {
String ner = getJson(text, "predictner");
String fineNer = getJson(text, "predictfinener");
SentenceParse sentenceParse = new SentenceParse()
.sentenceText(text)
.dependencyNodes(buildDependecyNodes(getJson(text, "predictdep"), ner,
getJson(text, "predictlemma")));
String predictSrl = getJson(text, "predictsrl");
List<SemanticRoleList> spanListList = getSemanticRoles(predictSrl, sentenceParse.dependencyNodes());
qgAnalysis.srlSpans(spanListList);
qgAnalysis.namedEntities(buildNamedEntityList(ner, sentenceParse));
qgAnalysis.sentenceParse(sentenceParse);
qgAnalysis.fineGrainedNamedEntities(buildFineGrainedNamedEntityList(fineNer, sentenceParse));
Set<NamedEntitySpan> namedEntitySpans = new HashSet<>();
namedEntitySpans.addAll(buildNamedEntitySpans(ner, sentenceParse));
namedEntitySpans.addAll(buildNamedEntitySpans(fineNer, sentenceParse));
qgAnalysis.namedEntitySpans(namedEntitySpans);
return qgAnalysis;
} catch (Exception e) {
System.out.println(e);
return null;
}
}
public static String getJson(String input, String apitype) throws ClientProtocolException, IOException {
CloseableHttpClient client = HttpClients.createDefault();
HttpPost httpPost = new HttpPost("http://127.0.0.1:8001/" + apitype);
Gson gson = new Gson();
JSONObject json = new JSONObject();
if (apitype.equalsIgnoreCase("predictcoref")) {
json.put("document", input);
} else if (apitype.equalsIgnoreCase("nounify")) {
json.put("word", input);
} else {
json.put("sentence", input);
}
StringEntity entity = new StringEntity(json.toJSONString());
httpPost.setEntity(entity);
httpPost.setHeader("Accept", "application/json");
httpPost.setHeader("Content-type", "application/json");
CloseableHttpResponse response = client.execute(httpPost);
BufferedReader br = new BufferedReader(
new InputStreamReader((response.getEntity().getContent())));
String output = br.lines().collect(Collectors.joining());
client.close();
return output;
}
private static List<String> getAllMatches(String text, String regex) {
List<String> matches = new ArrayList<String>();
for (int length = 1; length <= text.length(); length++) {
for (int index = 0; index <= text.length() - length; index++) {
String sub = text.substring(index, index + length);
if (sub.matches(regex) && !sub.substring(1).contains("[")) {
matches.add(sub);
}
}
}
return matches;
}
private List<Map<SemanticRoleLabel, String>> semanticRoleLabelFilters(List<Map<SemanticRoleLabel, String>> semanticRoles) {
List<String> filterSrlString = new ArrayList<>();
List<Map<SemanticRoleLabel, String>> newSrlList = new ArrayList<>();
for (Map<SemanticRoleLabel, String> semanticRoleLabelStringMap : semanticRoles) {
if (semanticRoleLabelStringMap.containsKey(SemanticRoleLabel.V)) {
for (String verbTodiscard : verbsToDiscard) {
if (semanticRoleLabelStringMap.get(SemanticRoleLabel.V).toLowerCase().contains(verbTodiscard)) {
if (semanticRoleLabelStringMap.containsKey(SemanticRoleLabel.ARG1)) {
filterSrlString.add(semanticRoleLabelStringMap.get(SemanticRoleLabel.ARG1));
}
if (semanticRoleLabelStringMap.containsKey(SemanticRoleLabel.ARG2)) {
filterSrlString.add(semanticRoleLabelStringMap.get(SemanticRoleLabel.ARG2));
}
}
}
}
}
for (Map<SemanticRoleLabel, String> semanticRoleLabelStringMap : semanticRoles) {
String sentence = "";
if (!semanticRoleLabelStringMap.containsKey(SemanticRoleLabel.ARG0)
|| !semanticRoleLabelStringMap.containsKey(SemanticRoleLabel.V)) {
newSrlList.add(semanticRoleLabelStringMap);
continue;
}
sentence = semanticRoleLabelStringMap.get(SemanticRoleLabel.ARG0) +
semanticRoleLabelStringMap.get(SemanticRoleLabel.V);
if (semanticRoleLabelStringMap.containsKey(SemanticRoleLabel.ARG1)) {
sentence = sentence + semanticRoleLabelStringMap.get(SemanticRoleLabel.ARG1);
for (String srlString : filterSrlString) {
if (!srlString.contains(sentence)) {
newSrlList.add(semanticRoleLabelStringMap);
}
}
}
if (!semanticRoleLabelStringMap.containsKey(SemanticRoleLabel.ARG1) && semanticRoleLabelStringMap.containsKey(SemanticRoleLabel.ARG2)) {
sentence = sentence + " " + semanticRoleLabelStringMap.get(SemanticRoleLabel.ARG2);
for (String srlString : filterSrlString) {
if (!srlString.contains(sentence)) {
newSrlList.add(semanticRoleLabelStringMap);
}
}
}
}
return newSrlList;
}
private List<SemanticRoleList> getSemanticRoles(String input, List<DependencyNode> dependencyNodes) {
Gson gson = new Gson();
SemanticResponse result = gson.fromJson(input, SemanticResponse.class);
List<SemanticRoleList> semanticRoles = new ArrayList<>();
for (SemanticResponse.Verb verb : result.verbs()) {
String[] tags = verb.tags();
SemanticRoleList spanList = new SemanticRoleList();
SemanticRole span = null;
if (tags.length != dependencyNodes.size()) {
System.out.println("SRL and Dependency have been tokenized separately. The number of tokens do not match. ");
}
for (int i = 0; i < tags.length; i++) {
String tag = tags[i];
DependencyNode token = dependencyNodes.get(i);
if (tag.startsWith("B-")) {
if (span != null) {
spanList.add(span);
}
if (tag.equalsIgnoreCase("B-V")) {
spanList.verb(dependencyNodes.get(i));
}
span = new SemanticRole();
span.type(SemanticRoleLabel.fromString(tag.replace("B-", "")));
span.tokens().add(token);
} else if (tag.startsWith("I-")) {
span.tokens().add(token);
} else if (tag.equalsIgnoreCase("O") && span != null) {
spanList.add(span);
span = null;
// when there is no tag, don't create any span
}
if (i == (tags.length - 1) && span != null) { // add the last span
spanList.add(span);
}
}
semanticRoles.add(spanList);
}
return semanticRoles;
}
/**
* This makes an assumption that the tokens in the spans coming out of the SRL predictor are space concatenated.
*
* @param spanString
* @param dependencyNodes
* @return
*/
private List<DependencyNode> getTokens(String spanString, int spanStart, List<DependencyNode> dependencyNodes) {
String[] tokens = spanString.split(" ");
int spanEnd = tokens.length;
return dependencyNodes.subList(spanStart, spanEnd);
}
private NamedEntityType getNamedEntitytype(String s) {
if (s.equals("U-PER") || s.equalsIgnoreCase("B-PER") || s.equalsIgnoreCase("L-PER")) {
return NamedEntityType.PERSON;
}
if (s.equals("U-ORG")) {
return NamedEntityType.ORGANIZATION;
}
return NamedEntityType.MISC;
}
private PosLabel getPos(String s) {
return PosLabel.fromString(s);
}
private ImmutableList<DependencyNode> buildDependecyNodes(String depinput, String nerinput, String lemmainput) {
List<DependencyNode> dependencyNodes = new ArrayList<>();
Gson gson = new Gson();
DependencyResponse dependencyResponse = gson.fromJson(depinput, DependencyResponse.class);
LemmaResponse lemmaResponse = gson.fromJson(lemmainput, LemmaResponse.class);
NERResponse nerResponse = gson.fromJson(nerinput, NERResponse.class);
List<String> depLabels = dependencyResponse.depLabels();
List<String> words = dependencyResponse.words();
List<Integer> heads = dependencyResponse.nodeHeads();
List<String> pos = dependencyResponse.posLabels();
List<String> lemmas = lemmaResponse.lemmas();
List<String> ners = nerResponse.tags();
int numberOfNodes = depLabels.size();
if (words.size() != numberOfNodes || heads.size() != numberOfNodes || pos.size() != numberOfNodes
|| lemmas.size() != numberOfNodes || ners.size() != numberOfNodes) {
log.error("All parsers have not returned the same size of tokens. (One reason could be the number of extra spaces.)");
}
for (int i = 0; i < numberOfNodes; i++) {
DependencyNode dependencyNode = new DependencyNode();
dependencyNode.form(words.get(i));
dependencyNode.depLabel(DependencyLabel.fromString(depLabels.get(i)));
dependencyNode.lemma(lemmas.get(i));
dependencyNode.namedEntityType(NamedEntityType.fromString(ners.get(i)));
dependencyNode.pos(getPos(pos.get(i)));
dependencyNodes.add(dependencyNode);
}
for (int i = 0; i < depLabels.size(); i++) {
DependencyNode dependencyNode = dependencyNodes.get(i);
if (heads.get(i) == 0) {
dependencyNode.head(null);
} else {
dependencyNode.head(dependencyNodes.get(heads.get(i) - 1));
dependencyNodes.get(heads.get(i) - 1).children().add(dependencyNode);
}
}
if(printLogs) {
printDependencyLabels(dependencyNodes);
}
return ImmutableList.copyOf(dependencyNodes);
}
private void printDependencyLabels(List<DependencyNode> dependencyNodes) {
for (DependencyNode dependencyNode : dependencyNodes) {
System.out.println(dependencyNode + " <---- " + dependencyNode.head());
}
}
private Map<String, NamedEntityType> buildNamedEntityList(String input, SentenceParse sentenceParse) {
Gson gson = new Gson();
NERResponse nerResponse = gson.fromJson(input, NERResponse.class);
Map<String, NamedEntityType> map = new HashMap<>();
List<String> words = nerResponse.words();
List<String> ners = nerResponse.tags();
List<DependencyNode> nodes = sentenceParse.dependencyNodes();
for (int i = 0; i < words.size(); i++) {
String ner = ners.get(i);
if (!(ner.equals("O") || ner.equalsIgnoreCase("MISC"))) {
String word = words.get(i);
NamedEntityType namedEntityType = getNamedEntitytype(ner);
nodes.get(i).namedEntityType(namedEntityType);
map.put(word, namedEntityType);
}
}
return map;
}
private Map<String, NamedEntityType> buildFineGrainedNamedEntityList(String input, SentenceParse sentenceParse) {
Gson gson = new Gson();
NERResponse nerResponse = gson.fromJson(input, NERResponse.class);
Map<String, NamedEntityType> map = new HashMap<>();
List<String> words = nerResponse.words();
List<String> ners = nerResponse.tags();
List<DependencyNode> dependencyNodes = sentenceParse.dependencyNodes();
for (int i = 0; i < words.size(); i++) {
String ner = ners.get(i);
if (!(ner.equals("O") || ner.equalsIgnoreCase("MISC"))) {
String word = words.get(i);
NamedEntityType namedEntityType = NamedEntityType.fromString(ner);
dependencyNodes.get(i).namedEntityType(namedEntityType);
map.put(word, namedEntityType);
}
}
return map;
}
private Set<NamedEntitySpan> buildNamedEntitySpans(String ner, SentenceParse sentenceParse) {
Gson gson = new Gson();
NERResponse nerResponse = gson.fromJson(ner, NERResponse.class);
Set<NamedEntitySpan> spanList = new HashSet<>();
List<DependencyNode> dependencyNodes = sentenceParse.dependencyNodes();
NamedEntitySpan span = null;
List<String> nerTags = nerResponse.tags();
if (nerTags.size() != dependencyNodes.size()) {
System.out.println("NER and Dependency have been tokenized separately. The number of tokens do not match. ");
}
for (int i = 0; i < nerTags.size(); i++) {
String tag = nerTags.get(i);
DependencyNode token = dependencyNodes.get(i);
if (tag.startsWith("B-") || tag.startsWith("U-")) {
if (span != null) {
span.endIndex(i); // end index is exclusive
spanList.add(span);
}
span = new NamedEntitySpan();
span.type(NamedEntityType.fromString(tag.replace("B-", "")));
span.tokens().add(token);
span.startIndex(i);
} else if (tag.startsWith("I-")) {
span.tokens().add(token);
} else if (tag.startsWith("L-")) {
span.tokens().add(token);
span.endIndex(i + 1); // end index is exclusive
spanList.add(span);
span = null;
} else if (tag.equalsIgnoreCase("O") && span != null) {
span.endIndex(i); // end index is exclusive
spanList.add(span);
span = null;
// when there is no tag, don't create any span
}
if (i == (nerTags.size() - 1) && span != null) { // add the last span
spanList.add(span);
}
}
return spanList;
}
}
|
class RoleDimension < ApplicationRecord
end
|
"""
For my project, I am interested in seeing how the fitnesses of the mutants
affect the loss of the D allele. For example, let's see the distribution of how many
generations it takes for the D allele to reach 80% of its initial frequency with
neutral mutations (you could then choose different fitness values to see how the
distribution changes)
Ideally, I would choose 0% (allele loss). But with realistic parameters, like
mutation rates of order 10e-5, the simulation is very long.
"""
import wrightfisher as wf
N = 10000
murates = (1, 1, 1, 1, 1, 1)
fitnesses = (1, 1, 1, 1)
nSamples = 1000 #number of populations
nb_generations =[] #it will store how many generations it took for each population
populations = [wf.Population(N, murates, fitnesses) for i in range(nSamples)]
for sample in populations:
sample_generations = 0
while sample.pop['ND'] > N*0.8:
sample.evolve(1)
sample_generations += 1
nb_generations.append(sample_generations)
wf.plt.xlabel('Number of generations before D decays to 0.8N')
wf.plt.ylabel('Number of populations')
wf.plt.hist(nb_generations) #very basic histogram without any parameters
wf.plt.show() #to see the plot if you run this in a terminal
|
#!/bin/bash
VROAPI=$VROENDPOINT"/d9ad2397-ac07-444d-978e-5f86c07f09d5/executions"
echo "Starting clone workflow at "$VROAPI" with user "$VROUSER
echo "Clone settings:"
echo "Host: "$CLONEHOST
echo "Cluster: "$CLONECLUSTER
echo "Base Image: "$BASEVM
#TOKEN=$(curl -s -D - -u $VROUSER:$VROPASS -k -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d '{ "parameters": [{}] }' $VROAPI | grep Location: | cut -d' ' -f2 )
TOKEN=$(curl -s -D - -u $VROUSER:$VROPASS -k -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d '{ "parameters": [{"description":"string","name":"clusterName","scope":"local","type":"string","value":{"string":{"value":"'$CLONECLUSTER'"}}},{"description":"string","name":"hostName","scope":"local","type":"string","value":{"string":{"value":"'$CLONEHOST'"}}},{"description":"string","name":"vmName","scope":"local","type":"string","value":{"string":{"value":"'$BASEVM'"}}}] }' $VROAPI | grep Location: | cut -d' ' -f2 )
#TR=$(curl -s -D - -u $VROUSER:$VROPASS -k -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d '{ "parameters": [{"description":"string","name":"clusterName","scope":"local","type":"string","value":{"string":{"value":"'$CLONECLUSTER'"}}},{"description":"string","name":"hostName","scope":"local","type":"string","value":{"string":{"value":"'$CLONEHOST'"}}},{"description":"string","name":"vmName","scope":"local","type":"string","value":{"string":{"value":"'$BASEVM'"}}}] }' $VROAPI)
#echo "Workflow start result: "$TR
#TOKEN =$($TR | grep Location: | cut -d' ' -f2)
echo "Status token: "$TOKEN
# eliminate CR from result.
TEST=""
for data in $TOKEN ;
do
data=$(echo "$data" | tr -d '\r')
TEST+="$data"
done
QUERY=$TEST"state"
echo "Querying execution state at: "$QUERY
STATE="running"
while [ "$STATE" == "running" ]; do
STATE=$(curl -u $VROUSER:$VROPASS -sS -k -X GET --header 'Content-Type: application/json' --header 'Accept: application/json' $QUERY | cut -d':' -f2 | cut -d"\"" -f2)
echo The state is: $STATE
sleep 5s
done
if [ "$STATE" == "completed" ]
then
echo Clone ready.
echo "Querying details for IP."
IP=$(curl -u $VROUSER:$VROPASS -sS -k -X GET --header 'Content-Type: application/json' --header 'Accept: application/json' $TEST | jq -r '.["output-parameters"][0].value.string.value')
echo $IP
echo $IP > cloneip/cloneip
exit 0
fi
echo Clone failed.
exit 1
|
#!/bin/bash
dest_loc=$1
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
BLUE='\033[0;34m'
NC='\033[0m'
if [ "$#" -ne 1 ]; then
printf "\n${NC}usage: $0 <destination folder>\n\n"
printf "${NC}example usage: $0 /opt/\n\n"
exit
fi
if [ -z $dest_loc ]; then
printf "${RED}No destination folder provided...\n"
printf "${NC}usage: $0 <destination folder>"
printf "${NC}example usage: $0 /opt/"
exit
fi
if [ ! -d "$dest_loc" ]; then
printf "${GREEN}Creating $dest_loc...\n"
mkdir $dest_loc
else
printf "${YELLOW}$dest_loc exists, moving forward...\n"
fi
git_tools=(
# Trolling in wargames
"jmhobbs/terminal-parrot"
# Recon
"Tib3rius/AutoRecon"
# Networking
"iphelix/dnschef"
"bitbrute/evillimiter"
# Hardware
"jopohl/urh"
"dwisiswant0/apkleaks"
"attify/firmware-analysis-toolkit"
"aircrack-ng/rtl8812au"
# Web
"ffuf/ffuf"
"maurosoria/dirsearch"
"s0md3v/Photon"
"D35m0nd142/LFISuite"
"kurobeats/fimap"
"hakluke/hakrawler"
"ChrisTruncer/EyeWitness"
"aboul3la/Sublist3r"
"s0md3v/XSStrike"
"nccgroup/shocker"
# Exploitation
"trustedsec/unicorn"
"pentestmonkey/php-reverse-shell"
"swisskyrepo/PayloadsAllTheThings"
# Post-Exploitaion
"loseys/BlackMamba"
"calebstewart/pwncat"
"Screetsec/TheFatRat"
"n1nj4sec/pupy"
"jm33-m0/emp3r0r"
"redcode-labs/Bashark"
"bats3c/shad0w"
# Active Directory
"byt3bl33d3r/pth-toolkit"
"galkan/crowbar"
"cobbr/Covenant"
"cobbr/SharpSploit"
"lgandx/Responder-Windows"
"EmpireProject/Empire"
"SecureAuthCorp/impacket"
"samratashok/nishang"
"GhostPack/Rubeus"
"GhostPack/Seatbelt"
# Automation
"JohnHammond/poor-mans-pentest"
"izar/pytm"
"Gallopsled/pwntools"
"bee-san/pyWhat"
"OWASP/Amass"
"malwaredllc/byob"
# Encryption
"Ganapati/RsaCtfTool"
# Databases
"0dayCTF/reverse-shell-generator"
# Reversing
"mentebinaria/retoolkit"
"InstinctEx/deobfuscatetools"
"beurtschipper/Depix"
"jtpereyda/boofuzz"
"icsharpcode/ILSpy"
"volatilityfoundation/volatility"
# Misc
"internetwache/GitTools"
"danielmiessler/SecLists"
"andrew-d/static-binaries"
# OSINT
"laramies/theHarvester"
"alpkeskin/mosint"
"sherlock-project/sherlock"
"qeeqbox/social-analyzer"
"twintproject/twint"
"althonos/InstaLooter"
"WebBreacher/WhatsMyName"
"GuidoBartoli/sherloq"
"lanmaster53/recon-ng"
"smicallef/spiderfoot"
"mikf/gallery-dl"
"akamhy/waybackpy"
"laramies/metagoofil"
"aliparlakci/bulk-downloader-for-reddit"
"streamlink/streamlink"
"iojw/socialscan"
"megadose/holehe"
"ytdl-org/youtube-dl"
"AmIJesse/Elasticsearch-Crawler"
# Files
"decalage2/oletools"
)
prog=1
progt=${#git_tools[@]}
for repo in ${git_tools[@]}
do
tool_name=$(echo $repo | cut -f2 -d "/")
if [[ ! -d "${dest_loc}/${tool_name}" ]]
then
printf "${GREEN}[${prog}/${progt}] ${NC}Cloning into ${BLUE}${tool_name}\n"
git clone https://github.com/${repo}.git ${dest_loc}/${tool_name} > /dev/null 2>&1
else
printf "${RED}[${prog}/${progt}] ${YELLOW}${tool_name}${NC} exists\n"
fi
prog=$((prog+1))
done
printf "${GREEN}DONE!!!"
|
<reponame>fleonasb/bootstrap-breadcrumbs
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:copyright: Copyright 2022 The American School of Barcelona
:contact: <EMAIL>
"""
from __future__ import unicode_literals
from setuptools import setup, find_packages
setup(
name='django4_bootstrap_breadcrumbs',
version='0.10.0',
url='https://github.com/fleonasb/django4-bootstrap-breadcrumbs',
license='MIT',
description='Django breadcrumbs for Bootstrap 2, 3 or 4',
long_description='Django template tags used to generate breadcrumbs html '
'using bootstrap css classes or custom template',
author='<NAME>',
author_email='<EMAIL>',
packages=find_packages(exclude=['tests']),
install_requires=[
'six',
],
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
platforms=['any'],
zip_safe=False,
include_package_data=True,
)
|
#!/bin/bash
pushd $(dirname "${BASH_SOURCE[0]}")/..
git pull
mkdir -p build
pushd build
cmake ..
make $@
make all_tests $@
source ./activate_run.sh
./tests/unit_tests
|
def temperature_stats(temps):
max_temp = max(temps)
min_temp = min(temps)
avg_temp = sum(temps) / len(temps)
print("Maximum temperature is {}. Minimum temperature is {}. Average temperature is {}.".format(max_temp, min_temp, avg_temp))
if __name__ == '__main__':
temperatures = list(map(int, input('Please enter a list of temperatures: ').split()))
temperature_stats(temperatures) |
<gh_stars>1-10
/* yarn example */
import testPackage_7 from '../src'
(async () => {
await testPackage_7()
})()
|
package counter_clock_wise;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 16491번: 대피소
*
* @see https://www.acmicpc.net/problem/16491/
*
*/
public class Boj16491 {
private static final String NEW_LINE = "\n";
private static long INF;
private static ArrayList<Long> seq = new ArrayList<>();
private static boolean[] isVisited;
private static class Point {
int x;
int y;
public Point(int x, int y) {
this.x = x;
this.y = y;
}
}
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st;
int N = Integer.parseInt(br.readLine());
INF = (long) Math.pow(10, N - 1);
Point[] robot = new Point[N];
Point[] shelter = new Point[N];
for(int i = 0; i < N; i++) {
st = new StringTokenizer(br.readLine());
robot[i] = new Point(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()));
}
for(int i = 0; i < N; i++) {
st = new StringTokenizer(br.readLine());
shelter[i] = new Point(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()));
}
System.out.println(process(N, robot, shelter));
}
private static String process(int n, Point[] robo, Point[] shel) {
StringBuilder sb = new StringBuilder();
for(int i = 0; i < n; i++) { // make sequence
isVisited = new boolean[n];
backTracking(n, i, 0, i);
}
int[] tmp = new int[n];
for(long s: seq) {
int loop = n;
if(s < INF){
loop--;
tmp[n - 1] = 0;
}
for(int i = 0; i < loop; i++) {
tmp[i] = (int) (s % 10);
s /= 10;
}
if(judgement(n, robo, shel, tmp)) continue; // two lines are intersection?
for(int idx = 0; idx < tmp.length; idx++) {
sb.append(tmp[idx] + 1).append(NEW_LINE);
}
break;
}
return sb.toString();
}
private static boolean judgement(int n, Point[] r, Point[] s, int[] perm) {
for(int i = 0; i < n; i++) {
for(int j = i + 1; j < n; j++) {
if (isIntersect(r[i], s[perm[i]], r[j], s[perm[j]])) return true;
}
}
return false;
}
private static void backTracking(int n, int current, int depth, long value) {
if(depth == n - 1) {
seq.add(value);
return;
}
if(isVisited[current]) return;
isVisited[current] = true;
for(int next = 0; next < n; next++) {
if(isVisited[next]) continue;
backTracking(n, next, depth + 1, value * 10 + next);
isVisited[next] = false;
}
}
private static boolean isIntersect(Point a, Point b, Point c, Point d) {
int ab = ccw(a, b, c) * ccw(a, b, d);
int cd = ccw(c, d, a) * ccw(c, d, b);
if (ab == 0 && cd == 0) { // judgement lines status by relative position
Point[] p = swap(a, b);
a = p[0];
b = p[1];
p = swap(c, d);
c = p[0];
d = p[1];
return compare(c, b) && compare(a, d);
}
return ab <= 0 && cd <= 0;
}
private static Point[] swap(Point p1, Point p2) {
if (p1.x > p2.x) {
Point tmp = p1;
p1 = p2;
p2 = tmp;
}
else {
if(p1.x == p2.x) {
if(p1.y > p2.y) {
Point tmp = p1;
p1 = p2;
p2 = tmp;
}
}
}
return new Point[]{p1, p2};
}
private static boolean compare(Point p1, Point p2) {
if(p1.x < p2.x) return true;
else if(p1.x > p2.x) return false;
else return p1.y <= p2.y;
}
private static int ccw(Point p1, Point p2, Point p3) {
int cost = p1.x * p2.y + p2.x * p3.y + p3.x * p1.y;
cost -= p1.y * p2.x + p2.y * p3.x + p3.y * p1.x;
if(cost < 0) return -1;
else if(cost > 0) return 1;
else return 0;
}
}
|
#include <stdio.h>
#define WND_WIDTH 1000
#define WND_HEIGHT 1000
#define PI 3.1415826535897932384626
void drawCircle(int x, int y, int radius) {
// Implementation for drawing a circle
printf("Drawing circle at (%d, %d) with radius %d\n", x, y, radius);
}
void drawRectangle(int x, int y, int width, int height) {
// Implementation for drawing a rectangle
printf("Drawing rectangle at (%d, %d) with width %d and height %d\n", x, y, width, height);
}
void drawTriangle(int x1, int y1, int x2, int y2, int x3, int y3) {
// Implementation for drawing a triangle
printf("Drawing triangle with vertices (%d, %d), (%d, %d), and (%d, %d)\n", x1, y1, x2, y2, x3, y3);
}
int main() {
// Example usage of the graphics library
drawCircle(200, 200, 50);
drawRectangle(300, 300, 100, 50);
drawTriangle(500, 500, 600, 600, 550, 650);
return 0;
} |
#!/bin/bash
#
# Set permissions of files and directories
if [[ -f "$(dirname "$(readlink -f "${0}")")/.functions" ]]; then
# shellcheck disable=SC1090
# shellcheck disable=SC1091
source "$(dirname "$(readlink -f "${0}")")/.functions"
else
echo "File does not exist!"
echo "$(dirname "$(readlink -f "${0}")")/.functions"
exit "1"
fi
# Temporary variables
argument_flag="false"
alacritty_mode="disabled"
alias_mode="disabled"
fonts_mode="disabled"
git_mode="disabled"
gnome_mode="disabled"
hardware_mode="disabled"
htop_mode="disabled"
projects_mode="disabled"
scripts_mode="disabled"
term_mode="disabled"
tmux_mode="disabled"
vim_mode="disabled"
zsh_mode="disabled"
# Process arguments
for argument in "${@}"; do
argument_flag="true"
if [[ "${argument}" == "-?" || "${argument}" == "--help" ]]; then
echo "Usage:"
echo " $(script_filename) [options]"
echo " -?, --help show list of command-line options"
echo ""
echo "OPTIONS"
echo " --alacritty force enable alacritty mode"
echo " -a, --alias force enable alias mode"
echo " --fonts force enable fonts mode"
echo " -g, --git force enable git mode"
echo " --gnome force enable gnome mode"
echo " --hardware force enable hardware mode"
echo " --htop force enable htop mode"
echo " -p, --projects force enable projects mode"
echo " -s, --scripts force enable scripts mode"
echo " --term force enable term mode"
echo " -t, --tmux force enable tmux mode"
echo " -v, --vim force enable vim mode"
echo " -z, --zsh force enable zsh mode"
exit 0
elif [[ "${argument}" == "--alacritty" ]]; then
alacritty_mode="enabled"
elif [[ "${argument}" == "-a" || "${argument}" == "--alias" ]]; then
alias_mode="enabled"
elif [[ "${argument}" == "--fonts" ]]; then
fonts_mode="enabled"
elif [[ "${argument}" == "-g" || "${argument}" == "--git" ]]; then
git_mode="enabled"
elif [[ "${argument}" == "--gnome" ]]; then
gnome_mode="enabled"
elif [[ "${argument}" == "--hardware" ]]; then
hardware_mode="enabled"
elif [[ "${argument}" == "--htop" ]]; then
htop_mode="enabled"
elif [[ "${argument}" == "-p" || "${argument}" == "--projects" ]]; then
projects_mode="enabled"
elif [[ "${argument}" == "-s" || "${argument}" == "--scripts" ]]; then
scripts_mode="enabled"
elif [[ "${argument}" == "--term" ]]; then
term_mode="enabled"
elif [[ "${argument}" == "-t" || "${argument}" == "--tmux" ]]; then
tmux_mode="enabled"
elif [[ "${argument}" == "-v" || "${argument}" == "--vim" ]]; then
vim_mode="enabled"
elif [[ "${argument}" == "-z" || "${argument}" == "--zsh" ]]; then
zsh_mode="enabled"
else
abort_script "Invalid Argument!" "" "Usage:" " $(script_filename) [options]" " -?, --help show list of command-line options"
fi
done
# Begin setting file permissions
print_stage "Setting file permissions"
# Alacritty
if [[ "${argument_flag}" == "false" || "${alacritty_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "640" "${HOME}/.dotfiles/alacritty"
fi
# Fonts
if [[ "${argument_flag}" == "false" || "${fonts_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "644" "${HOME}/.dotfiles/fonts"
fi
# Git
if [[ "${argument_flag}" == "false" || "${git_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "664" "${HOME}/.dotfiles/git"
fi
# Gnome
if [[ "${argument_flag}" == "false" || "${gnome_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "664" "${HOME}/.dotfiles/gnome"
fi
# Hardware
if [[ "${argument_flag}" == "false" || "${hardware_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "644" "${HOME}/.dotfiles/hardware"
fi
# Htop
if [[ "${argument_flag}" == "false" || "${htop_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "664" "${HOME}/.dotfiles/htop"
fi
# Projects
if [[ "${argument_flag}" == "false" || "${projects_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "644" "${HOME}/.dotfiles/projects"
fi
# Scripts
if [[ "${argument_flag}" == "false" || "${scripts_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "755" "${HOME}/.dotfiles/scripts"
fi
# Term
if [[ "${argument_flag}" == "false" || "${term_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "664" "${HOME}/.dotfiles/term"
fi
# Tmux
if [[ "${argument_flag}" == "false" || "${tmux_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "644" "${HOME}/.dotfiles/tmux"
fi
# Vim
if [[ "${argument_flag}" == "false" || "${vim_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "644" "${HOME}/.dotfiles/vim"
fi
# Zsh
if [[ "${argument_flag}" == "false" || "${zsh_mode}" == "enabled" ]]; then
# Set permissions of all files in directory
set_permissions "644" "${HOME}/.dotfiles/vim"
fi
|
<reponame>shadowbq/ruby-auth-proxy
Warden::Strategies.add(:password) do
def valid?
params['user'] && params['user']['username'] && params['user']['password']
end
def authenticate!
user = User.first(username: params['user']['username'])
if user.nil?
throw(:warden, message: "The username you entered does not exist.")
elsif user.authenticate(params['user']['password'])
success!(user)
else
throw(:warden, message: "The username and password combination is incorrect")
end
end
end
|
import Vuex from 'vuex';
import Vue from 'vue';
import Api from '@/services/api';
import _ from 'lodash';
Vue.use(Vuex);
let store = new Vuex.Store({
state: {
data: [],
},
getters: {
//filter: state => date => state.data.filter(item => item.x = date);
lastDate(state) {
let data = state.data;
if(data.length > 0) {
let last = data[0];
return last.x;
}
return null;
}
},
actions: {
appendData: ({commit, getters}, options) => {
return new Promise((resolve) => {
//let allFromDate = getters['filter']('2015-332-322');
let latestDate = getters['lastDate']
console.log(latestDate);
Api().get(options.url, {params: {latestDate: latestDate}}).then((data)=>{
console.log(data.data);
commit('appendData', data.data)
resolve(data.data)
})
})
},
loadData: function({commit}, options) {
return new Promise((resolve) => {
Api().get(options.url).then((data)=>{
commit('setData', data.data)
resolve(data.data)
})
})
},
},
mutations: {
setData(state,data) {
state.data = data;
},
appendData(state,newData) {
let data = _.cloneDeep(state.data);
newData = _.cloneDeep(newData);
state.data = [...data,...newData];
}
}
});
export default store;
|
<filename>INFO/Books Codes/Oracle PLSQL Tips and Techniques/OutputChapter16/16_12.sql
-- ***************************************************************************
-- File: 16_12.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 16_12.lis
CREATE OR REPLACE TRIGGER logon_log_trigger
AFTER LOGON
ON DATABASE
BEGIN
INSERT INTO session_logon_statistics
(user_logged, start_time)
VALUES
(USER, SYSDATE);
END;
/
SPOOL OFF
|
#!/bin/bash
export EXECUTION_ID="rowcount_binder"$@
java $DEBUG $JVM_ARGS \
-cp $ADP_LIB:$ALGORITHMS/binder/build/libs/binder-0.1.0-SNAPSHOT-database.jar \
de.metanome.cli.App \
--algorithm de.metanome.algorithms.binder.BinderDatabaseAlgorithm \
$DB \
--table-key INPUT_DATABASE \
--tables tesmaexp \
--algorithm-config INPUT_ROW_LIMIT:$@,TEMP_FOLDER_PATH:binder_temp,CLEAN_TEMP:true \
--algorithm-config DETECT_NARY:false,MAX_NARY_LEVEL:-1,FILTER_KEY_FOREIGNKEYS:false \
--algorithm-config NUM_BUCKETS_PER_COLUMN:10,MEMORY_CHECK_FREQUENCY:1000,MAX_MEMORY_USAGE_PERCENTAGE:80 \
--output file:$EXECUTION_ID
|
<gh_stars>0
import { Component, OnInit } from '@angular/core';
import {ServerService} from '../../pages/services/user.service';
import { Router } from '@angular/router';
import {HttpClient,HttpHeaders,HttpErrorResponse} from '@angular/common/http';
import { NgxSpinnerService } from 'ngx-spinner';
@Component({
selector: 'app-trends',
templateUrl: './trends.component.html',
styleUrls: ['./trends.component.scss']
})
export class TrendsComponent implements OnInit {
//variables
cityList:any = [];
user: any = {};
brandid:string;
type_of_service:any= [];
List:any = [];
defaultCity:any;
last_n_months :any = [];
is_default:boolean = true;
selectedCity:any;
constructor(private ServicingService: ServerService,private router: Router, private http: HttpClient,private spinner: NgxSpinnerService) {
const el = document.getElementById('nb-global-spinner');
if (el) {
el.style['display'] = 'none';
}
}
ngOnInit() {
this.getCurrentMonth();
console.log(this.getLastMonths(10));
this.brandid = sessionStorage.getItem('brandid');
this.getCity();
// this.getData("148");
this.type_of_service = [{
"id":0,
"service":"Pickup",
},
{
"id":1,
"service":"Dropoff",
}];
this.user.service = this.type_of_service[0].id;
}
// ngAfterViewInit(){
// console.log("After Init");
// this.getCity();
// }
getCity() {
const reqpara1 ={
requesttype: 'getcitylist',
}
const as1 = JSON.stringify(reqpara1);
this.ServicingService.webServiceCall(as1).subscribe
(res => {
if (res[0].login === 0) {
sessionStorage.removeItem('currentUser');
this.router.navigate(['/auth/login']);
}
else {
this.cityList = res[0].citylist;
this.cityList.sort();
// console.log("sort", this.cityList[0].cityName.sort());
let city_sort = this.cityList;
let fld = 'cityname';
console.log(this.cityList.sort((a, b) => (a[fld] || "").toString().localeCompare((b[fld] || "").toString())));
console.log("trend",this.cityList);
this.defaultCity = this.cityList[0];
this.user.city = this.cityList[0].cityid;
console.log("default",this.defaultCity);
this.getData(this.defaultCity.cityid);
for(var i = 0; i < this.cityList.length;i++){
if(this.cityList[i].cityid == this.defaultCity.cityid ){
this.selectedCity = this.cityList[i];
console.log("selectedcity",this.selectedCity)
}
}
}
});
}
getData(cityid){
this.List = [];
const reqpara2 ={
requesttype: 'gettrend',
cityid:cityid,
brandid:this.brandid,
puord:"0"
}
const as2 = JSON.stringify(reqpara2);
this.ServicingService.webServiceCall(as2).subscribe(res =>{
if (res[0].login === 0) {
sessionStorage.removeItem('currentUser');
this.router.navigate(['/auth/login']);
}
else {
this.List = res[0].trends;
this.spinner.hide();
console.log(this.List);
}
})
}
onServiceType($event){
console.log($event.target.value);
this.spinner.show();
this.getData(this.user.city);
}
onCityChange($event){
this.is_default = false;
console.log($event.target.value);
this.spinner.show();
this.getData($event.target.value);
for(var i = 0; i < this.cityList.length;i++){
if(this.cityList[i].cityid == $event.target.value ){
this.selectedCity = this.cityList[i];
console.log("selectedcity",this.selectedCity)
}
}
}
getCurrentMonth(){
const monthNames = ["January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November", "December"];
var d = new Date();
var n = d.getMonth();
console.log(monthNames[d.getMonth()]);
}
getLastMonths(n) {
var m =['January','February','March','April','May','June','July','August','September','October','November','December'];
// var last_n_months =[]
var d = new Date()
for(var i=0;i<n;i++){
this.last_n_months[i] = m[d.getMonth()]+ ' - ' + d.getFullYear().toString()
d.setMonth(d.getMonth()-1)
}
return this.last_n_months
}
}
|
# Replace this with hostname of remote unless testing locally
HOST=localhost
PORT=8089
# This will connect to the socket listener at which point you can issue commands
telnet $HOST $PORT
|
import { HTMLAttributes, ReactNode } from 'react';
export type TripleVerticalLayoutPropsType = {
header?: ReactNode;
footer?: ReactNode;
} & HTMLAttributes<HTMLElement>;
|
#!/bin/bash
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# A simple script to configure the Cuda tree needed for the TensorFlow GPU
# build. We need both Cuda toolkit 7.0 and Cudnn 6.5.
# Useage:
# * User edit cuda.config to point both Cuda toolkit and Cudnn libraries to their local path
# * run cuda_config.sh to generate symbolic links in the source tree to reflect
# * the file organizations needed by TensorFlow.
print_usage() {
cat << EOF
Usage: $0 [--check]
Configure TensorFlow's canonical view of Cuda libraries using cuda.config.
Arguments:
--check: Only check that the proper Cuda dependencies has already been
properly configured in the source tree. It also creates symbolic links to
the files in the gen-tree to make bazel happy.
EOF
}
CHECK_ONLY=0
# Parse the arguments. Add more arguments as the "case" line when needed.
while [[ $# -gt 0 ]]; do
argument="$1"
shift
case $argument in
--check)
CHECK_ONLY=1
;;
*)
echo "Error: unknown arguments"
print_usage
exit -1
;;
esac
done
source cuda.config || exit -1
OUTPUTDIR=${OUTPUTDIR:-../../..}
CUDA_TOOLKIT_PATH=${CUDA_TOOLKIT_PATH:-/usr/local/cuda}
CUDNN_INSTALL_PATH=${CUDNN_INSTALL_PATH:-/usr/local/cuda}
# An error message when the Cuda toolkit is not found
function CudaError {
echo ERROR: $1
cat << EOF
##############################################################################
##############################################################################
Cuda 7.0 toolkit is missing.
1. Download and install the CUDA 7.0 toolkit and CUDNN 6.5 library;
2. Run configure from the root of the source tree, before rerunning bazel;
Please refer to README.md for more details.
##############################################################################
##############################################################################
EOF
exit -1
}
# An error message when CUDNN is not found
function CudnnError {
echo ERROR: $1
cat << EOF
##############################################################################
##############################################################################
Cudnn 6.5 is missing.
1. Download and install the CUDA 7.0 toolkit and CUDNN 6.5 library;
2. Run configure from the root of the source tree, before rerunning bazel;
Please refer to README.md for more details.
##############################################################################
##############################################################################
EOF
exit -1
}
# Check that Cuda libraries has already been properly configured in the source tree.
# We still need to create links to the gen-tree to make bazel happy.
function CheckAndLinkToSrcTree {
ERROR_FUNC=$1
FILE=$2
if test ! -e $FILE; then
$ERROR_FUNC "$PWD/$FILE cannot be found"
fi
# Link the output file to the source tree, avoiding self links if they are
# the same. This could happen if invoked from the source tree by accident.
if [ ! $(readlink -f $PWD) == $(readlink -f $OUTPUTDIR/third_party/gpus/cuda) ]; then
mkdir -p $(dirname $OUTPUTDIR/third_party/gpus/cuda/$FILE)
ln -sf $PWD/$FILE $OUTPUTDIR/third_party/gpus/cuda/$FILE
fi
}
if [ "$CHECK_ONLY" == "1" ]; then
CheckAndLinkToSrcTree CudaError include/cuda.h
CheckAndLinkToSrcTree CudaError include/cublas.h
CheckAndLinkToSrcTree CudnnError include/cudnn.h
CheckAndLinkToSrcTree CudaError lib/libcudart_static.a
CheckAndLinkToSrcTree CudaError lib/libcublas.so.7.0
CheckAndLinkToSrcTree CudnnError lib/libcudnn.so.6.5
CheckAndLinkToSrcTree CudaError lib/libcudart.so.7.0
exit 0
fi
# Actually configure the source tree for TensorFlow's canonical view of Cuda
# libraries.
if test ! -e ${CUDA_TOOLKIT_PATH}/lib/libcudart.so.7.0; then
CudaError "cannot find ${CUDA_TOOLKIT_PATH}/lib/libcudart.so.7.0"
fi
if test ! -d ${CUDNN_INSTALL_PATH}; then
CudnnError "cannot find dir: ${CUDNN_INSTALL_PATH}"
fi
# Locate cudnn.h
if test -e ${CUDNN_INSTALL_PATH}/cudnn.h; then
CUDNN_HEADER_PATH=${CUDNN_INSTALL_PATH}
elif test -e ${CUDNN_INSTALL_PATH}/include/cudnn.h; then
CUDNN_HEADER_PATH=${CUDNN_INSTALL_PATH}/include
else
CudnnError "cannot find cudnn.h under: ${CUDNN_INSTALL_PATH}"
fi
# Locate libcudnn.so.6.5
if test -e ${CUDNN_INSTALL_PATH}/libcudnn.so.6.5; then
CUDNN_LIB_PATH=${CUDNN_INSTALL_PATH}
elif test -e ${CUDNN_INSTALL_PATH}/lib/libcudnn.so.6.5; then
CUDNN_LIB_PATH=${CUDNN_INSTALL_PATH}/lib
else
CudnnError "cannot find libcudnn.so.6.5 under: ${CUDNN_INSTALL_PATH}"
fi
# Helper function to build symbolic links for all files under a directory.
function LinkOneDir {
SRC_PREFIX=$1
DST_PREFIX=$2
SRC_DIR=$3
DST_DIR=$(echo $SRC_DIR | sed "s,^$SRC_PREFIX,$DST_PREFIX,")
mkdir -p $DST_DIR
FILE_LIST=$(find -L $SRC_DIR -maxdepth 1 -type f)
if test "$FILE_LIST" != ""; then
ln -sf $FILE_LIST $DST_DIR/ || exit -1
fi
}
export -f LinkOneDir
# Build links for all files under the directory, including subdirectoreis.
function LinkAllFiles {
SRC_DIR=$1
DST_DIR=$2
find -L $SRC_DIR -type d | xargs -I {} bash -c "LinkOneDir $SRC_DIR $DST_DIR {}" || exit -1
}
# Set up the symbolic links for cuda toolkit. We link at individual file level,
# not at the directory level.
# This is because the external library may have different file layout from our desired structure.
mkdir -p $OUTPUTDIR/third_party/gpus/cuda
echo "Setting up Cuda include"
LinkAllFiles ${CUDA_TOOLKIT_PATH}/include $OUTPUTDIR/third_party/gpus/cuda/include || exit -1
echo "Setting up Cuda lib"
LinkAllFiles ${CUDA_TOOLKIT_PATH}/lib $OUTPUTDIR/third_party/gpus/cuda/lib || exit -1
echo "Setting up Cuda bin"
LinkAllFiles ${CUDA_TOOLKIT_PATH}/bin $OUTPUTDIR/third_party/gpus/cuda/bin || exit -1
echo "Setting up Cuda nvvm"
LinkAllFiles ${CUDA_TOOLKIT_PATH}/nvvm $OUTPUTDIR/third_party/gpus/cuda/nvvm || exit -1
# Set up symbolic link for cudnn
ln -sf $CUDNN_HEADER_PATH/cudnn.h $OUTPUTDIR/third_party/gpus/cuda/include/cudnn.h || exit -1
ln -sf $CUDNN_LIB_PATH/libcudnn.so.6.5 $OUTPUTDIR/third_party/gpus/cuda/lib/libcudnn.so.6.5 || exit -1
|
SELECT
user_id,
MAX(activity_timestamp) AS 'recent_activity'
FROM users_activity
GROUP BY user_id
ORDER BY recent_activity DESC
LIMIT 10; |
function transposeMatrix(matrix) {
let rows = matrix.length;
let columns = matrix[0].length;
let newMatrix = [];
// Outer loop to create columns
for (let i = 0; i < columns; i++) {
let subArray = [];
// inner loop to create rows
for (let j = 0; j < rows; j++) {
subArray.push(matrix[j][i]);
}
newMatrix.push(subArray);
}
return newMatrix;
}
let matrix = [[1, 2, 3], [4, 5, 6], [7, 8, 9]];
let transposedMatrix = transposeMatrix(matrix);
console.log(transposedMatrix);
// [ [ 1, 4, 7 ], [ 2, 5, 8 ], [ 3, 6, 9 ] ] |
<reponame>maksimandrianov/cdstructures<gh_stars>10-100
// The MIT License (MIT)
// Copyright (c) 2017 <NAME>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
#define CDC_USE_SHORT_NAMES
#include "test-common.h"
#include "cdcontainers/casts.h"
#include "cdcontainers/common.h"
#include "cdcontainers/list.h"
#include <float.h>
#include <stdarg.h>
#include <CUnit/Basic.h>
static bool list_range_int_eq(list_t *l, size_t count, ...)
{
va_list args;
stat_t ret;
va_start(args, count);
for (size_t i = 0; i < count; ++i) {
void *val = NULL;
if ((ret = list_at(l, i, &val)) != CDC_STATUS_OK) {
return false;
}
int elem = va_arg(args, int);
if (elem != CDC_TO_INT(val)) {
return false;
}
}
return list_size(l) == count;
}
static inline void list_range_int_print(list_t *l)
{
printf("\n");
for (size_t i = 0; i < list_size(l); ++i) {
void *val = NULL;
if (list_at(l, i, &val) == CDC_STATUS_OK) {
printf("%d ", CDC_TO_INT(val));
}
}
printf("\n");
}
static int lt(const void *l, const void *r)
{
return CDC_TO_INT(l) < CDC_TO_INT(r);
}
static int eq(const void *l, const void *r)
{
return CDC_TO_INT(l) == CDC_TO_INT(r);
}
static int is_eq_2(const void *v)
{
return CDC_TO_INT(v) == 2;
}
void test_list_ctor()
{
list_t *l = NULL;
CU_ASSERT_EQUAL(list_ctor(&l, NULL), CDC_STATUS_OK);
CU_ASSERT(list_empty(l));
list_dtor(l);
}
void test_list_ctorl()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 4);
CU_ASSERT(list_range_int_eq(l, 4, a, b, c, d));
list_dtor(l);
}
void test_list_push_back()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2;
CU_ASSERT_EQUAL(list_ctor(&l, NULL), CDC_STATUS_OK);
CU_ASSERT(list_empty(l));
CU_ASSERT_EQUAL(list_push_back(l, CDC_FROM_INT(a)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 1);
CU_ASSERT(list_range_int_eq(l, 1, a));
CU_ASSERT_EQUAL(list_push_back(l, CDC_FROM_INT(b)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 2);
CU_ASSERT(list_range_int_eq(l, 2, a, b));
CU_ASSERT_EQUAL(list_push_back(l, CDC_FROM_INT(c)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 3);
CU_ASSERT(list_range_int_eq(l, 3, a, b, c));
list_dtor(l);
}
void test_list_push_front()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2;
CU_ASSERT_EQUAL(list_ctor(&l, NULL), CDC_STATUS_OK);
CU_ASSERT(list_empty(l));
CU_ASSERT_EQUAL(list_push_front(l, CDC_FROM_INT(a)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 1);
CU_ASSERT(list_range_int_eq(l, 1, a));
CU_ASSERT_EQUAL(list_push_front(l, CDC_FROM_INT(b)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 2);
CU_ASSERT(list_range_int_eq(l, 2, b, a));
CU_ASSERT_EQUAL(list_push_front(l, CDC_FROM_INT(c)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 3);
CU_ASSERT(list_range_int_eq(l, 3, c, b, a));
list_dtor(l);
}
void test_list_at()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2;
void *elem = NULL;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c), CDC_END),
CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_at(l, 0, &elem), CDC_STATUS_OK);
CU_ASSERT_EQUAL(CDC_TO_INT(elem), a);
CU_ASSERT_EQUAL(list_at(l, 1, &elem), CDC_STATUS_OK);
CU_ASSERT_EQUAL(CDC_TO_INT(elem), b);
CU_ASSERT_EQUAL(list_at(l, 2, &elem), CDC_STATUS_OK);
CU_ASSERT_EQUAL(CDC_TO_INT(elem), c);
size_t index = list_size(l) + 1;
CU_ASSERT_EQUAL(list_at(l, index, &elem), CDC_STATUS_OUT_OF_RANGE);
list_dtor(l);
}
void test_list_front()
{
list_t *l = NULL;
int a = 1, b = 2;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_END), CDC_STATUS_OK);
CU_ASSERT_EQUAL(CDC_TO_INT(list_front(l)), a);
list_dtor(l);
}
void test_list_back()
{
list_t *l = NULL;
int a = 1, b = 2;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_END), CDC_STATUS_OK);
CU_ASSERT_EQUAL(CDC_TO_INT(list_back(l)), b);
list_dtor(l);
}
void test_list_pop_back()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
void *elem = list_back(l);
list_pop_back(l);
CU_ASSERT_EQUAL(list_size(l), 3);
CU_ASSERT_EQUAL(CDC_TO_INT(elem), d);
elem = list_back(l);
list_pop_back(l);
CU_ASSERT_EQUAL(list_size(l), 2);
CU_ASSERT_EQUAL(CDC_TO_INT(elem), c);
elem = list_back(l);
list_pop_back(l);
CU_ASSERT_EQUAL(list_size(l), 1);
CU_ASSERT_EQUAL(CDC_TO_INT(elem), b);
elem = list_back(l);
list_pop_back(l);
CU_ASSERT(list_empty(l));
CU_ASSERT_EQUAL(CDC_TO_INT(elem), a);
list_dtor(l);
}
void test_list_pop_front()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
void *elem = list_front(l);
list_pop_front(l);
CU_ASSERT_EQUAL(list_size(l), 3);
CU_ASSERT_EQUAL(CDC_TO_INT(elem), a);
elem = list_front(l);
list_pop_front(l);
CU_ASSERT_EQUAL(list_size(l), 2);
CU_ASSERT_EQUAL(CDC_TO_INT(elem), b);
elem = list_front(l);
list_pop_front(l);
CU_ASSERT_EQUAL(list_size(l), 1);
CU_ASSERT_EQUAL(CDC_TO_INT(elem), c);
elem = list_front(l);
list_pop_front(l);
CU_ASSERT(list_empty(l));
CU_ASSERT_EQUAL(CDC_TO_INT(elem), d);
list_dtor(l);
}
void test_list_swap()
{
list_t *v = NULL;
list_t *w = NULL;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&v, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_ctorl(&w, NULL, CDC_FROM_INT(a), CDC_FROM_INT(d), CDC_END), CDC_STATUS_OK);
list_swap(v, w);
CU_ASSERT_EQUAL(list_size(v), 2);
CU_ASSERT(list_range_int_eq(v, 2, a, d));
CU_ASSERT_EQUAL(list_size(w), 4);
CU_ASSERT(list_range_int_eq(w, 4, a, b, c, d));
list_dtor(v);
list_dtor(w);
}
void test_list_insert()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2;
CU_ASSERT_EQUAL(list_ctor(&l, NULL), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_insert(l, 0, CDC_FROM_INT(a)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 1);
CU_ASSERT(list_range_int_eq(l, 1, a));
CU_ASSERT_EQUAL(list_insert(l, list_size(l), CDC_FROM_INT(c)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 2);
CU_ASSERT(list_range_int_eq(l, 2, a, c));
CU_ASSERT_EQUAL(list_insert(l, 1, CDC_FROM_INT(b)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 3);
CU_ASSERT(list_range_int_eq(l, 3, a, b, c));
CU_ASSERT_EQUAL(list_insert(l, 1, CDC_FROM_INT(c)), CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_size(l), 4);
CU_ASSERT(list_range_int_eq(l, 4, a, c, b, c));
list_dtor(l);
}
void test_list_erase()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_erase(l, 0);
CU_ASSERT_EQUAL(list_size(l), 3);
CU_ASSERT(list_range_int_eq(l, 3, b, c, d));
list_erase(l, 1);
CU_ASSERT_EQUAL(list_size(l), 2);
CU_ASSERT(list_range_int_eq(l, 2, b, d));
list_erase(l, list_size(l) - 1);
CU_ASSERT_EQUAL(list_size(l), 1);
CU_ASSERT(list_range_int_eq(l, 1, b));
list_dtor(l);
}
void test_list_ierase()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2, d = 3;
list_iter_t it = CDC_INIT_STRUCT;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_begin(l, &it);
list_ierase(&it);
CU_ASSERT_EQUAL(list_size(l), 3);
CU_ASSERT(list_range_int_eq(l, 3, b, c, d));
list_begin(l, &it);
list_iter_next(&it);
list_ierase(&it);
CU_ASSERT_EQUAL(list_size(l), 2);
CU_ASSERT(list_range_int_eq(l, 2, b, d));
list_end(l, &it);
list_iter_prev(&it);
list_ierase(&it);
CU_ASSERT_EQUAL(list_size(l), 1);
CU_ASSERT(list_range_int_eq(l, 1, b));
list_dtor(l);
}
void test_list_clear()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_clear(l);
CU_ASSERT(list_empty(l));
list_dtor(l);
}
void test_list_iterators()
{
list_t *l = NULL;
list_iter_t it = CDC_INIT_STRUCT;
list_iter_t ittmp = CDC_INIT_STRUCT;
list_riter_t rit = CDC_INIT_STRUCT;
list_riter_t rittmp = CDC_INIT_STRUCT;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_begin(l, &it);
CU_ASSERT_EQUAL(list_iter_has_next(&it), true);
CU_ASSERT_EQUAL(list_iter_has_prev(&it), false);
CU_ASSERT_EQUAL(CDC_TO_INT(list_iter_data(&it)), a);
list_riter_from(&it, &rit);
list_rend(l, &rittmp);
CU_ASSERT(list_riter_is_eq(&rittmp, &rit));
list_iter_next(&it);
CU_ASSERT(list_iter_has_next(&it));
CU_ASSERT(list_iter_has_prev(&it));
CU_ASSERT_EQUAL(CDC_TO_INT(list_iter_data(&it)), b);
list_riter_from(&it, &rit);
CU_ASSERT_EQUAL(CDC_TO_INT(list_riter_data(&rit)), a);
list_iter_next(&it);
CU_ASSERT(list_iter_has_next(&it));
CU_ASSERT(list_iter_has_prev(&it));
CU_ASSERT_EQUAL(CDC_TO_INT(list_iter_data(&it)), c);
list_riter_from(&it, &rit);
CU_ASSERT_EQUAL(CDC_TO_INT(list_riter_data(&rit)), b);
list_iter_next(&it);
CU_ASSERT(!list_iter_has_next(&it));
CU_ASSERT(list_iter_has_prev(&it));
CU_ASSERT_EQUAL(CDC_TO_INT(list_iter_data(&it)), d);
list_riter_from(&it, &rit);
CU_ASSERT_EQUAL(CDC_TO_INT(list_riter_data(&rit)), c);
list_iter_next(&it);
list_end(l, &ittmp);
CU_ASSERT(list_iter_is_eq(&ittmp, &it));
list_riter_from(&it, &rit);
list_rbegin(l, &rittmp);
CU_ASSERT(list_riter_is_eq(&rittmp, &rit));
list_iter_prev(&it);
CU_ASSERT(!list_iter_has_next(&it));
CU_ASSERT(list_iter_has_prev(&it));
CU_ASSERT_EQUAL(CDC_TO_INT(list_iter_data(&it)), d);
list_dtor(l);
}
void test_list_reverse_iterators()
{
list_t *l = NULL;
list_iter_t it = CDC_INIT_STRUCT;
list_iter_t ittmp = CDC_INIT_STRUCT;
list_riter_t rit = CDC_INIT_STRUCT;
list_riter_t rittmp = CDC_INIT_STRUCT;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_rbegin(l, &rit);
CU_ASSERT(list_riter_has_next(&rit));
CU_ASSERT(!list_riter_has_prev(&rit));
CU_ASSERT_EQUAL(CDC_TO_INT(list_riter_data(&rit)), d);
list_iter_from(&rit, &it);
list_end(l, &ittmp);
CU_ASSERT(list_iter_is_eq(&ittmp, &it));
list_riter_next(&rit);
CU_ASSERT(list_riter_has_next(&rit));
CU_ASSERT(list_riter_has_prev(&rit));
CU_ASSERT_EQUAL(CDC_TO_INT(list_riter_data(&rit)), c);
list_iter_from(&rit, &it);
CU_ASSERT_EQUAL(CDC_TO_INT(list_iter_data(&it)), d);
list_riter_next(&rit);
CU_ASSERT(list_riter_has_next(&rit));
CU_ASSERT(list_riter_has_prev(&rit));
CU_ASSERT_EQUAL(CDC_TO_INT(list_riter_data(&rit)), b);
list_iter_from(&rit, &it);
CU_ASSERT_EQUAL(CDC_TO_INT(list_iter_data(&it)), c);
list_riter_next(&rit);
CU_ASSERT(!list_riter_has_next(&rit));
CU_ASSERT(list_riter_has_prev(&rit));
CU_ASSERT_EQUAL(CDC_TO_INT(list_riter_data(&rit)), a);
list_iter_from(&rit, &it);
CU_ASSERT_EQUAL(CDC_TO_INT(list_iter_data(&it)), b);
list_riter_next(&rit);
list_rend(l, &rittmp);
CU_ASSERT(list_riter_is_eq(&rittmp, &rit));
list_iter_from(&rit, &it);
list_begin(l, &ittmp);
CU_ASSERT(list_iter_is_eq(&ittmp, &it));
list_riter_prev(&rit);
CU_ASSERT(!list_riter_has_next(&rit));
CU_ASSERT(list_riter_has_prev(&rit));
CU_ASSERT_EQUAL(CDC_TO_INT(list_riter_data(&rit)), a);
list_dtor(l);
}
void test_list_splice()
{
list_t *l1 = NULL;
list_t *l2 = NULL;
list_iter_t current = CDC_INIT_STRUCT;
list_iter_t first = CDC_INIT_STRUCT;
list_iter_t last = CDC_INIT_STRUCT;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l1, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_ctorl(&l2, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_begin(l1, ¤t);
list_iter_next(¤t);
list_begin(l2, &first);
list_iter_next(&first);
list_end(l2, &last);
list_iter_prev(&last);
list_splice(¤t, &first, &last);
CU_ASSERT_EQUAL(list_size(l1), 6);
CU_ASSERT_EQUAL(list_size(l2), 2);
CU_ASSERT(list_range_int_eq(l1, 6, a, b, c, b, c, d));
CU_ASSERT(list_range_int_eq(l2, 2, a, d));
list_begin(l2, ¤t);
list_iter_next(¤t);
list_begin(l1, &first);
list_end(l1, &last);
list_splice(¤t, &first, &last);
CU_ASSERT_EQUAL(list_size(l1), 0);
CU_ASSERT_EQUAL(list_size(l2), 8);
CU_ASSERT(list_range_int_eq(l2, 8, a, a, b, c, b, c, d, d));
list_end(l2, &last);
list_iter_prev(&last);
list_iter_prev(&last);
list_iter_prev(&last);
list_iter_prev(&last);
list_begin(l1, ¤t);
list_begin(l2, &first);
list_splice(¤t, &first, &last);
CU_ASSERT_EQUAL(list_size(l1), 4);
CU_ASSERT_EQUAL(list_size(l2), 4);
CU_ASSERT(list_range_int_eq(l1, 4, a, a, b, c));
CU_ASSERT(list_range_int_eq(l2, 4, b, c, d, d));
list_begin(l1, ¤t);
list_begin(l2, &first);
list_end(l2, &last);
list_splice(¤t, &first, &last);
CU_ASSERT_EQUAL(list_size(l1), 8);
CU_ASSERT_EQUAL(list_size(l2), 0);
CU_ASSERT(list_range_int_eq(l1, 8, b, c, d, d, a, a, b, c));
list_end(l1, &last);
list_iter_prev(&last);
list_iter_prev(&last);
list_iter_prev(&last);
list_iter_prev(&last);
list_begin(l2, ¤t);
list_begin(l1, &first);
list_splice(¤t, &first, &last);
CU_ASSERT_EQUAL(list_size(l1), 4);
CU_ASSERT_EQUAL(list_size(l2), 4);
CU_ASSERT(list_range_int_eq(l1, 4, a, a, b, c));
CU_ASSERT(list_range_int_eq(l2, 4, b, c, d, d));
list_end(l1, ¤t);
list_begin(l2, &first);
list_end(l2, &last);
list_splice(¤t, &first, &last);
CU_ASSERT_EQUAL(list_size(l1), 8);
CU_ASSERT_EQUAL(list_size(l2), 0);
CU_ASSERT(list_range_int_eq(l1, 8, a, a, b, c, b, c, d, d));
list_begin(l1, &first);
list_iter_next(&first);
list_iter_next(&first);
list_iter_next(&first);
list_iter_next(&first);
list_begin(l2, ¤t);
list_end(l1, &last);
list_splice(¤t, &first, &last);
CU_ASSERT_EQUAL(list_size(l1), 4);
CU_ASSERT_EQUAL(list_size(l2), 4);
CU_ASSERT(list_range_int_eq(l1, 4, a, a, b, c));
CU_ASSERT(list_range_int_eq(l2, 4, b, c, d, d));
list_dtor(l1);
list_dtor(l2);
}
void test_list_ssplice()
{
list_t *l1 = NULL;
list_t *l2 = NULL;
list_iter_t current = CDC_INIT_STRUCT;
list_iter_t first = CDC_INIT_STRUCT;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l1, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_ctorl(&l2, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_begin(l1, ¤t);
list_iter_next(¤t);
list_begin(l2, &first);
list_iter_next(&first);
list_ssplice(¤t, &first);
CU_ASSERT_EQUAL(list_size(l1), 7);
CU_ASSERT_EQUAL(list_size(l2), 1);
CU_ASSERT(list_range_int_eq(l1, 7, a, b, c, d, b, c, d));
CU_ASSERT(list_range_int_eq(l2, 1, a));
list_begin(l1, ¤t);
list_begin(l2, &first);
list_ssplice(¤t, &first);
CU_ASSERT_EQUAL(list_size(l1), 8);
CU_ASSERT_EQUAL(list_size(l2), 0);
CU_ASSERT(list_range_int_eq(l1, 8, a, a, b, c, d, b, c, d));
list_dtor(l1);
list_dtor(l2);
}
void test_list_lsplice()
{
list_t *l1 = NULL;
list_t *l2 = NULL;
list_iter_t current = CDC_INIT_STRUCT;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l1, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_ctorl(&l2, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_begin(l1, ¤t);
list_iter_next(¤t);
list_lsplice(¤t, l2);
CU_ASSERT_EQUAL(list_size(l1), 8);
CU_ASSERT_EQUAL(list_size(l2), 0);
CU_ASSERT(list_range_int_eq(l1, 8, a, a, b, c, d, b, c, d));
list_dtor(l1);
list_dtor(l2);
}
void test_list_merge()
{
list_t *l1 = NULL;
list_t *l2 = NULL;
int a = 0, b = 1, c = 2, d = 3, e = 4, f = 5, g = 6, h = 7;
CU_ASSERT_EQUAL(list_ctorl(&l1, NULL, CDC_FROM_INT(a), CDC_FROM_INT(c), CDC_FROM_INT(e),
CDC_FROM_INT(g), CDC_END),
CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_ctorl(&l2, NULL, CDC_FROM_INT(b), CDC_FROM_INT(d), CDC_FROM_INT(f),
CDC_FROM_INT(h), CDC_END),
CDC_STATUS_OK);
list_cmerge(l1, l2, lt);
CU_ASSERT_EQUAL(list_size(l1), 8);
CU_ASSERT_EQUAL(list_size(l2), 0);
CU_ASSERT(list_range_int_eq(l1, 8, a, b, c, d, e, f, g, h));
list_dtor(l1);
list_dtor(l2);
CU_ASSERT_EQUAL(list_ctorl(&l1, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_ctorl(&l2, NULL, CDC_FROM_INT(e), CDC_FROM_INT(f), CDC_FROM_INT(g),
CDC_FROM_INT(h), CDC_END),
CDC_STATUS_OK);
list_cmerge(l1, l2, lt);
CU_ASSERT_EQUAL(list_size(l1), 8);
CU_ASSERT_EQUAL(list_size(l2), 0);
CU_ASSERT(list_range_int_eq(l1, 8, a, b, c, d, e, f, g, h));
list_dtor(l1);
list_dtor(l2);
CU_ASSERT_EQUAL(list_ctorl(&l1, NULL, CDC_FROM_INT(e), CDC_FROM_INT(f), CDC_FROM_INT(g),
CDC_FROM_INT(h), CDC_END),
CDC_STATUS_OK);
CU_ASSERT_EQUAL(list_ctorl(&l2, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_cmerge(l1, l2, lt);
CU_ASSERT_EQUAL(list_size(l1), 8);
CU_ASSERT_EQUAL(list_size(l2), 0);
CU_ASSERT(list_range_int_eq(l1, 8, a, b, c, d, e, f, g, h));
list_dtor(l1);
list_dtor(l2);
}
void test_list_erase_if()
{
list_t *l;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(
list_ctorl(&l, NULL, CDC_FROM_INT(c), CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(c), CDC_FROM_INT(c), CDC_FROM_INT(d), CDC_FROM_INT(c), CDC_END),
CDC_STATUS_OK);
list_erase_if(l, is_eq_2);
CU_ASSERT(list_range_int_eq(l, 3, a, b, d));
list_dtor(l);
}
void test_list_reverse()
{
list_t *l = NULL;
int a = 0, b = 1, c = 2, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_reverse(l);
CU_ASSERT(list_range_int_eq(l, 4, d, c, b, a));
list_dtor(l);
}
void test_list_unique()
{
list_t *l = NULL;
int a = 2, b = 1;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(a), CDC_FROM_INT(a),
CDC_FROM_INT(a), CDC_END),
CDC_STATUS_OK);
list_punique(l, eq);
CU_ASSERT(list_range_int_eq(l, 1, a));
list_clear(l);
list_punique(l, eq);
list_dtor(l);
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(b),
CDC_FROM_INT(a), CDC_END),
CDC_STATUS_OK);
list_punique(l, eq);
CU_ASSERT(list_range_int_eq(l, 3, a, b, a));
list_dtor(l);
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(a), CDC_FROM_INT(b),
CDC_FROM_INT(b), CDC_END),
CDC_STATUS_OK);
list_punique(l, eq);
CU_ASSERT(list_range_int_eq(l, 2, a, b));
list_dtor(l);
}
void test_list_sort()
{
list_t *l = NULL;
int a = 2, b = 1, c = 4, d = 3;
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_FROM_INT(c),
CDC_FROM_INT(d), CDC_END),
CDC_STATUS_OK);
list_csort(l, lt);
CU_ASSERT(list_range_int_eq(l, 4, b, a, d, c));
list_clear(l);
list_csort(l, lt);
list_dtor(l);
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_END), CDC_STATUS_OK);
list_csort(l, lt);
CU_ASSERT(list_range_int_eq(l, 1, a));
list_dtor(l);
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_END), CDC_STATUS_OK);
list_csort(l, lt);
CU_ASSERT(list_range_int_eq(l, 2, b, a));
list_dtor(l);
CU_ASSERT_EQUAL(list_ctorl(&l, NULL, CDC_FROM_INT(c), CDC_FROM_INT(a), CDC_FROM_INT(b), CDC_END),
CDC_STATUS_OK);
list_csort(l, lt);
CU_ASSERT(list_range_int_eq(l, 3, b, a, c));
list_dtor(l);
}
|
<filename>src/index.ts
export * from 'strom';
import pump from 'pump';
export { pump };
import pumpify from 'pumpify';
export { pumpify };
|
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
package com.amazon.dataprepper.plugins.prepper.oteltrace.model;
import java.util.Random;
public class TestUtils {
private static final Random RANDOM = new Random();
public static byte[] getRandomBytes(int len) {
byte[] bytes = new byte[len];
RANDOM.nextBytes(bytes);
return bytes;
}
}
|
The bug present in the code is that there is no colon (':') after the 'for value in arr' line. The code needs it to be identified as a loop statement and hence will throw an error if run. The correct code should look like this:
arr = [1, 2, 3, 4, 5]
for value in arr:
print("Value is: + value) |
#!/usr/bin/env bats
# vim: ft=sh:sw=2:et
set -o pipefail
load os_helper
load foreman_helper
if [[ -e /etc/profile.d/puppet-agent.sh ]] ; then
. /etc/profile.d/puppet-agent.sh
fi
@test "check smart proxy is registered" {
hammer proxy info --name=$(hostname -f)
}
@test "assert puppet version" {
if tIsRedHatCompatible ; then
run grep -q puppetlabs /etc/yum.repos.d/*.repo
IS_NATIVE=$status
elif tIsDebianCompatible ; then
run grep -q puppet\.com -R /etc/apt/sources.list*
IS_NATIVE=$status
else
IS_NATIVE=1
fi
if tPackageExists puppet-agent ; then
PACKAGE=puppet-agent
else
PACKAGE=puppet
fi
tPackageExists $PACKAGE
if [[ $IS_NATIVE == 1 ]] ; then
tPackageVendor $PACKAGE | grep -v "Puppet Labs"
else
tPackageVendor $PACKAGE | grep "Puppet Labs"
fi
}
@test "wake up puppet agent" {
puppet agent -t -v
}
@test "check host is registered" {
hammer host info --name $(hostname -f) | egrep "Last report:.*[[:alnum:]]+"
}
# ENC / Puppet class apply tests
@test "install puppet module" {
modpath=/etc/puppetlabs/code/environments/production/modules
if [ ! -d $modpath -a -e /etc/puppet/environments/production/modules ]; then
modpath=/etc/puppet/environments/production/modules
fi
if [ ! -d $modpath/motd ]; then
puppet module install -i $modpath -v 0.1.0 theforeman/motd
fi
[ -e $modpath/motd/manifests/init.pp ]
}
@test "import motd puppet class" {
hammer proxy import-classes --name $(hostname -f)
count=$(hammer --csv puppet-class list --search 'name = motd' | wc -l)
[ $count -gt 1 ]
}
@test "Assign environment to default taxonomies" {
hammer environment update --name=production --locations "Default Location" --organizations "Default Organization"
}
@test "Assign proxy to default taxonomies" {
# Foreman 1.20 and earlier didn't test with taxonomies on
# Foreman 1.23 is expected to have a fix for this via https://projects.theforeman.org/issues/26092
FOREMAN_VERSION=$(tForemanVersion)
[[ $FOREMAN_VERSION != '1.21' ]] && [[ $FOREMAN_VERSION != '1.22' ]] && skip "Assignment not needed"
hammer proxy update --name=$(hostname -f) --locations "Default Location" --organizations "Default Organization"
}
@test "assign puppet class to host" {
id=$(hammer --csv puppet-class list --search 'name = motd' | tail -n1 | cut -d, -f1)
hammer host update --puppet-class-ids $id --name $(hostname -f)
}
@test "apply class with puppet agent" {
puppet agent -v -o --no-daemonize
grep -i "property of the Foreman project" /etc/motd
}
|
#!/bin/bash
SCRIPTPATH=$( cd "$(dirname "$0")" ; pwd -P )
sudo docker-compose -f $SCRIPTPATH/docker-compose-certbot.yml \
--env-file $SCRIPTPATH/.env \
run --rm certbot-renew
sudo docker-compose -f $SCRIPTPATH/docker-compose.yml restart nginx-proxy |
#!/bin/sh
#
# BASH script to generate training and validation sets using the synthetic dataset generator
# as well as a .json file containing the annotations in COCO format
# Arguments: ycb_video_data_path selected.txt
#source ~/.virtualenvs/ycb_data_gen/bin/activate
# Generate training dataset
#echo "Generating training set without augmentation"
#python dataset_generator.py --dontocclude --selected --scale --rotation --num 8 /home/IIT.LOCAL/ebunz/mask_rcnn/syndata-generation/demo_data_dir/YCB_objects_split_20k/train/ /home/IIT.LOCAL/ebunz/mask_rcnn/syndata-generation/dataset_20k_train_noOcclusions defaults_20k_train_noOcclusions.py
#echo "Generating training set with augmentation"
#python dataset_generator.py --dontocclude --selected --scale --rotation --num 8 /home/IIT.LOCAL/ebunz/mask_rcnn/syndata-generation/demo_data_dir/YCB_objects_split_20k/train/ /home/IIT.LOCAL/ebunz/mask_rcnn/syndata-generation/dataset_20k_train_noOcclusions_smallScale defaults_20k_train_noOcclusions_smallScale.py
# Convert training to binary
#echo "Convert training to binary"
#python convert_dataset_to_binary.py dataset_20k_train_noOcclusions_smallScale
# Convert training binary to COCO
#echo "Convert training binary to COCO"
#python ycb_to_coco_parallel_py2.py dataset_20k_train_noOcclusions_smallScale
# Generate validation dataset
#echo "Generating validation set without augmentation"
#python dataset_generator.py --dontocclude --selected --scale --rotation --num 8 /home/IIT.LOCAL/ebunz/mask_rcnn/syndata-generation/demo_data_dir/YCB_objects_split_20k/val/ /home/IIT.LOCAL/ebunz/mask_rcnn/syndata-generation/dataset_20k_val_noOcclusions defaults_20k_val_noOcclusions.py
echo "Generating validation set with augmentation"
python dataset_generator.py --dontocclude --selected --scale --rotation --num 8 /home/IIT.LOCAL/ebunz/mask_rcnn/syndata-generation/demo_data_dir/YCB_objects_split_20k/val/ /home/IIT.LOCAL/ebunz/mask_rcnn/syndata-generation/dataset_20k_val_noOcclusions_smallScale defaults_20k_val_noOcclusions_smallScale.py
# Convert validation to binary
echo "Convert validation to binary"
python convert_dataset_to_binary.py dataset_20k_val_noOcclusions_smallScale
# Convert validation binary to COCO
echo "Convert validation binary to COCO"
python ycb_to_coco_parallel_py2.py dataset_20k_val_noOcclusions_smallScale
|
#include <iostream>
#include <string>
class Transaction {
public:
virtual void process() = 0;
};
class DepositTransaction : public Transaction {
public:
void process() override {
std::cout << "Processing deposit transaction" << std::endl;
}
};
class WithdrawalTransaction : public Transaction {
public:
void process() override {
std::cout << "Processing withdrawal transaction" << std::endl;
}
};
class TransferTransaction : public Transaction {
public:
void process() override {
std::cout << "Processing transfer transaction" << std::endl;
}
};
class TransactionFactory {
public:
static Transaction* create_transaction(const std::string& entry) {
if (entry == "deposit") {
return new DepositTransaction();
} else if (entry == "withdrawal") {
return new WithdrawalTransaction();
} else if (entry == "transfer") {
return new TransferTransaction();
} else {
return nullptr;
}
}
};
int main() {
Transaction* deposit = TransactionFactory::create_transaction("deposit");
deposit->process();
Transaction* withdrawal = TransactionFactory::create_transaction("withdrawal");
withdrawal->process();
Transaction* transfer = TransactionFactory::create_transaction("transfer");
transfer->process();
return 0;
} |
#!/bin/sh
#SBATCH --time=4:00:00
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --ntasks-per-node=1
#SBATCH --cpus-per-task=28
#SBATCH --exclusive
#SBATCH --partition=broadwell
#SBATCH --mem-per-cpu=2200M
#SBATCH --comment="cpufreqchown"
#SBATCH -J "lulesh_sacct"
#SBATCH -A p_readex
#SBATCH --reservation=p_readex_56
#SBATCH --comment="no_monitoring"
#SBATCH --output=static_average_1_nodes_new_1.out
#SBATCH --error=static_average_1_nodes_new_1.out
cd ..
REPEAT_COUNT=3
module purge
#source ./readex_env/set_env_plain.source
source ./readex_env/set_env_saf.source
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/sw/global/libraries/cpufrequtils/gcc5.3.0/lib/
#module load scorep-hdeem/2016-12-20-hdeem-2.2.20ms
NUM_CPUS=28
#export SCOREP_METRIC_PLUGINS="hdeem_plugin"
#export SCOREP_METRIC_HDEEM_PLUGIN=*
#export SCOREP_METRIC_HDEEM_PLUGIN_VERBOSE=WARN
#export SCOREP_METRIC_HDEEM_PLUGIN_CONNECTION=INBAND
#export SCOREP_METRIC_HDEEM_PLUGIN_TIMER=BMC
change_frequency() {
for ((i = 0; i<$NUM_CPUS; i++))
do
/sw/global/libraries/cpufrequtils/gcc5.3.0/bin/cpufreq-set -c $i -f $1GHz
done
}
check_uncore_frequency() {
x86a_read -n -i Intel_UNCORE_MIN_RATIO
x86a_read -n -i Intel_UNCORE_MAX_RATIO
x86a_read -n -i Intel_UNCORE_CURRENT_RATIO
}
#change_frequency 2.4
#x86a_write -n -c 0 -i Intel_UNCORE_MAX_RATIO -V 27
#x86a_write -n -c 1 -i Intel_UNCORE_MAX_RATIO -V 27
#x86a_write -n -c 0 -i Intel_UNCORE_MIN_RATIO -V 27
#x86a_write -n -c 1 -i Intel_UNCORE_MIN_RATIO -V 27
i=1
rm -rf PLAIN_*
while [ $i -le $REPEAT_COUNT ]; do
mkdir PLAIN_$i
export MEASURE_RAPL_TARGET="PLAIN_$i"
#srun --cpu_bind=verbose,sockets measure-rapl ./test/amg2013_plain -P 2 2 2 -r 40 40 40
#srun --ntasks 8 --ntasks-per-node 1 --cpus-per-task 24 ./lulesh2.0_plain -i 500 -s 75
srun measure-rapl ./lulesh2.0_plain -i 100 -s 150
i=$(echo "$i + 1" | bc)
done
#export SCOREP_ENABLE_TRACING=true
#export SCOREP_ENABLE_PROFILING=false
#export SCOREP_TOTAL_MEMORY=3G
#export SCOREP_MPI_ENABLE_GROUPS=EXT
#cpu_freq_list=(1.2 2.0 2.4 2.5)
#uncore_freq_list=(14 22 26 30)
#for i in "${cpu_freq_list[@]}"
#do
# change_frequency $i
# for j in "${uncore_freq_list[@]}"
# do
#export MEASURE_RAPL_TARGET="TUNED_$sum"
# cpufreq-info
# x86a_write -n -c 0 -i Intel_UNCORE_MAX_RATIO -V $j
# x86a_write -n -c 1 -i Intel_UNCORE_MAX_RATIO -V $j
# x86a_write -n -c 0 -i Intel_UNCORE_MIN_RATIO -V $j
# x86a_write -n -c 1 -i Intel_UNCORE_MIN_RATIO -V $j
# check_uncore_frequency
#srun measure-rapl ./lulesh2.0_plain -i 250 -s 75
#srun -n 1 -c 24 --exclusive --mem-per-cpu 2500M -p haswell --reservation=READEX ./lulesh2.0_plain -i 50 -s 75
#sum=$sum + 1
# mpiexec --np 1 --npernode 1 --cpus-per-proc 24 ./lulesh2.0_saf -i 350 -s 75
#((sum++))
#echo $sum
# done
#done
i=1
total_time_plain=0
total_energy_plain=0
total_cpu_energy_plain=0
while [ $i -lt $REPEAT_COUNT ]; do
# echo "command sacct -j $SLURM_JOBID.$i --format="JobID,CPUTimeRAW,ConsumedEnergyRaw""
times_energys=$(sacct -j $SLURM_JOBID.$i --format="JobID,CPUTimeRAW,ConsumedEnergyRaw")
i=$(echo "$i + 1" | bc)
times_energys_array=(${times_energys[@]})
time_step=${times_energys_array[7]}
energy_step=${times_energys_array[8]}
echo "Job Time: $time_step"
echo "Job Energy: $energy_step"
total_time_plain=$(echo "${total_time_plain} + ${time_step}" | bc)
total_energy_plain=$(echo "${total_energy_plain} + ${energy_step}" | bc)
for file in PLAIN_$i/*
do
values=$( tail -1 $file | awk -F'[ ,]' '{print int($1)" "int($2)}' )
values=(${values[@]})
total_cpu_energy_plain=$[ total_cpu_energy_plain + ${values[0]} + ${values[1]} ]
done
done
#i=1
#total_time_rrl=0
#total_energy_rrl=0
#total_cpu_energy_rrl=0
#while [ $i -lt $sum ]; do
# echo "command sacct -j $SLURM_JOBID.$((i)) --format="JobID,CPUTimeRAW,ConsumedEnergyRaw""
# times_energys=$(sacct -j $SLURM_JOBID.$((i)) --format="JobID,CPUTimeRAW,ConsumedEnergyRaw")
# i=$(echo "$i + 1" | bc)
# times_energys_array=(${times_energys[@]})
# time_step=${times_energys_array[7]}
# energy_step=${times_energys_array[8]}
#total_time_rrl=$(echo "${total_time_rrl} + ${time_step}" | bc)
#total_energy_rrl=$(echo "${total_energy_rrl} + ${energy_step}" | bc)
# echo "Time for job ${time_step}"
# echo "Energy for job ${energy_step}"
#for file in TUNED_$i/*
#do
# values=$( tail -1 $file | awk -F'[ ,]' '{print int($1)" "int($2)}' )
#values=(${values[@]})
#total_cpu_energy_rrl=$[ total_cpu_energy_rrl + ${values[0]} + ${values[1]} ]
#total_cpu_energy_rrl=${values[0]} + ${values[1]}
#echo "Total Cpu Energy for one configuration : ${total_cpu_energy_rrl}"
#done
#done
echo "Total Plain Time = $total_time_plain, Total Plain Energy = $total_energy_plain"
avg_time_plain=$(echo "$total_time_plain/$((REPEAT_COUNT-1))" | bc)
avg_energy_plain=$(echo "$total_energy_plain/$((REPEAT_COUNT-1))" | bc)
echo "Average Plain Time=$avg_time_plain"
echo "Average Plain Energy=$avg_energy_plain"
rm -rf PLAIN_*
#rm -rf PLAIN_*
#rm -rf TUNED_*
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.permissions;
import org.apache.jena.graph.FrontsTriple;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.Triple;
import org.apache.jena.shared.AuthenticationRequiredException;
/**
* The secured item interface is mixed into instances of secured objects by the
* proxy. It provides the security context for the security checks as well as
* several useful shorthand methods for common checks.
*/
public interface SecuredItem {
/**
* Utilities for SecuredItem implementations.
*/
public static class Util {
/**
* Secured items are equivalent if their security evaluators and
* modelIRIs are equal.
*
* @param si1
* A secured item to check
* @param si2
* A second secured item to check
* @return true if si1 is equivalent to si2.
*/
public static boolean isEquivalent(final SecuredItem si1,
final SecuredItem si2) {
return si1.getSecurityEvaluator()
.equals(si2.getSecurityEvaluator())
&& si1.getModelIRI().equals(si2.getModelIRI());
}
public static String modelPermissionMsg(final Node modelURI) {
return String.format("Model permissions violation: %s", modelURI);
}
public static String triplePermissionMsg(final Node modelURI) {
return String.format("Triple permissions violation: %s", modelURI);
}
}
/**
* @return true if the securedModel allows items to to be created.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canCreate() throws AuthenticationRequiredException;
/**
* Return true if the triple can be created. If any s,p or o is SecNode.ANY
* then this method must return false if there are any restrictions where
* the remaining nodes and held constant and the ANY node is allowed to
* vary.
*
* See canRead(Triple t)
*
* @param t
* The triple to check
* @return true if the triple can be created.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canCreate(Triple t) throws AuthenticationRequiredException;
/**
* Return true if the fronted triple can be created.
*
* See canRead(Triple t)
*
* @param t
* The fronted triple to check
* @return true if the triple can be created.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canCreate(FrontsTriple t)
throws AuthenticationRequiredException;
/**
* @return true if the securedModel allows items to to be deleted.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canDelete() throws AuthenticationRequiredException;
/**
* Return true if the triple can be deleted. If any s,p or o is SecNode.ANY
* then this method must return false if there are any restrictions where
* the remaining nodes and held constant and the ANY node is allowed to
* vary.
*
* See canRead(Triple t)
*
* @param t
* The triple to check
* @return true if the triple can be deleted.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canDelete(Triple t) throws AuthenticationRequiredException;
/**
* Return true if the fronted triple can be deleted.
*
* See canRead(Triple t)
*
* @param t
* The fronted triple to check
* @return true if the triple can be deleted.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canDelete(FrontsTriple t)
throws AuthenticationRequiredException;
/**
* @return true if the securedModel allows items to to be read.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canRead() throws AuthenticationRequiredException;
/**
* Return true if the triple can be read. If any s,p or o is SecNode.ANY
* then this method must return false if there are any restrictions where
* the remaining nodes and held constant and the ANY node is allowed to
* vary.
*
* (S, P, O) check if S,P,O can be read. (S, P, ANY) check if there are any
* S,P,x restrictions. (S, ANY, P) check if there are any S,x,P
* restrictions. (ANY, ANY, ANY) check if there are any restricitons on
* reading.
*
* @param t
* The triple to check
* @return true if the triple can be read.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canRead(Triple t) throws AuthenticationRequiredException;
/**
* Return true if the fronted triple can be read.
*
* @param t
* The frontedtriple to check
* @return true if the triple can be read.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canRead(FrontsTriple t)
throws AuthenticationRequiredException;
/**
* @return true if the securedModel allows items to to be updated.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canUpdate() throws AuthenticationRequiredException;
/**
* Return true if the triple can be updated. If any s,p or o is SecNode.ANY
* then this method must return false if there are any restrictions where
* the remaining nodes and held constant and the ANY node is allowed to
* vary.
*
* See canRead(Triple t)
*
* @param from
* The triple that will be changed
* @param to
* The resulting triple.
* @return true if the from triple can be updated as the to triple.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canUpdate(Triple from, Triple to)
throws AuthenticationRequiredException;
/**
* Return true if the fronted triple can be updated.
*
*
* See canUpdate(Triple from, Triple to)
*
* @param from
* The fronted triple that will be changed
* @param to
* The resulting fronted triple.
* @return true if the from triple can be updated as the to triple.
* @throws AuthenticationRequiredException
* if user is not authenticated and is required to be.
*/
public boolean canUpdate(FrontsTriple from, FrontsTriple to)
throws AuthenticationRequiredException;
@Override
public boolean equals(Object o);
/**
* @return the base item that is being secured.
*/
public Object getBaseItem();
/**
* @return The IRI of the securedModel that the item belongs to.
*/
public String getModelIRI();
/**
* @return The node representation of the securedModel IRI.
*/
public Node getModelNode();
/**
* The SecurityEvaluator implementation that is being used to determine
* access.
*
* @return The SecurityEvaluator implementation.
*/
public SecurityEvaluator getSecurityEvaluator();
/**
* Return true if this secured item is equivalent to another secured item.
* Generally implemented by calling SecuredItem.Util.isEquivalent
*
* @param securedItem
* the other secured item.
* @return True if they are equivalent, false otherwise.
*/
public boolean isEquivalent(SecuredItem securedItem);
} |
import time
class GPIOEventSystem:
def __init__(self):
self.event_callbacks = {}
def add_event_detect(self, channel, edge, bouncetime):
# Simulate event detection setup
print(f"Event detection added for channel {channel} on edge {edge} with bouncetime {bouncetime}ms")
def add_event_callback(self, channel, callback):
self.event_callbacks[channel] = callback
def simulate_event(self, channel, edge):
if channel in self.event_callbacks:
print(f"Event detected on channel {channel} with edge {edge}. Executing callback...")
self.event_callbacks[channel]()
else:
print(f"No callback associated with channel {channel}")
# Example usage
def example_callback():
print("Example callback executed")
gpio_system = GPIOEventSystem()
gpio_system.add_event_detect(17, "BOTH", 300)
gpio_system.add_event_callback(17, example_callback)
# Simulate event detection
time.sleep(1) # Simulating some time passing
gpio_system.simulate_event(17, "RISING")
time.sleep(0.5) # Simulating bouncetime
gpio_system.simulate_event(17, "FALLING") |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. The ASF licenses this file to You
* under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. For additional information regarding
* copyright in this work, please see the NOTICE file in the top level
* directory of this distribution.
*
* Source file modified from the original ASF source; all changes made
* are also under Apache License.
*/
package org.tightblog.bloggerui.controller;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.core.env.Environment;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.context.request.RequestContextHolder;
import org.tightblog.bloggerui.model.*;
import org.tightblog.dao.*;
import org.tightblog.domain.*;
import org.tightblog.service.EmailService;
import org.tightblog.service.URLService;
import org.tightblog.service.UserManager;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.RollbackException;
import javax.servlet.http.HttpServletResponse;
import javax.validation.Valid;
import java.security.Principal;
import java.time.Instant;
import java.util.*;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@RestController
public class UserController {
private static final Pattern PWD_PATTERN =
Pattern.compile("^(?=.*[0-9])(?=.*[a-z])(?=.*[A-Z])(?=.*[@#$%^&+=])(?=\\S+$).{8,20}$");
private WeblogDao weblogDao;
private UserManager userManager;
private UserWeblogRoleDao userWeblogRoleDao;
private UserDao userDao;
private UserCredentialsDao userCredentialsDao;
private EmailService emailService;
private MessageSource messages;
private WebloggerPropertiesDao webloggerPropertiesDao;
private WeblogEntryCommentDao weblogEntryCommentDao;
private URLService urlService;
private Environment environment;
private StaticProperties staticProperties;
@PersistenceContext
private EntityManager entityManager;
@Autowired
public UserController(WeblogDao weblogDao, UserManager userManager,
UserWeblogRoleDao userWeblogRoleDao, MessageSource messageSource,
EmailService emailService, UserDao userDao,
UserCredentialsDao userCredentialsDao, URLService urlService,
WeblogEntryCommentDao weblogEntryCommentDao,
WebloggerPropertiesDao webloggerPropertiesDao,
Environment environment) {
this.weblogDao = weblogDao;
this.webloggerPropertiesDao = webloggerPropertiesDao;
this.userManager = userManager;
this.userWeblogRoleDao = userWeblogRoleDao;
this.userDao = userDao;
this.userCredentialsDao = userCredentialsDao;
this.weblogEntryCommentDao = weblogEntryCommentDao;
this.urlService = urlService;
this.emailService = emailService;
this.messages = messageSource;
this.environment = environment;
}
@PostConstruct
public void init() {
staticProperties = new StaticProperties();
staticProperties.setMfaEnabled(environment.getProperty("mfa.enabled", Boolean.class, false));
}
@GetMapping(value = "/tb-ui/admin/rest/useradmin/userlist")
public Map<String, String> getUserEditList() {
return createUserMap(userDao.findAll());
}
@GetMapping(value = "/tb-ui/admin/rest/useradmin/registrationapproval")
public List<User> getRegistrationsNeedingApproval() {
return userDao.findUsersToApprove();
}
@PostMapping(value = "/tb-ui/admin/rest/useradmin/registrationapproval/{id}/approve")
public void approveRegistration(@PathVariable String id, HttpServletResponse response) {
User acceptedUser = userDao.findByIdOrNull(id);
if (acceptedUser != null) {
if (!UserStatus.ENABLED.equals(acceptedUser.getStatus())) {
acceptedUser.setStatus(UserStatus.ENABLED);
userDao.saveAndFlush(acceptedUser);
userDao.evictUser(acceptedUser);
emailService.sendRegistrationApprovedNotice(acceptedUser);
}
response.setStatus(HttpServletResponse.SC_OK);
} else {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
}
@PostMapping(value = "/tb-ui/admin/rest/useradmin/registrationapproval/{id}/reject")
public void rejectRegistration(@PathVariable String id, HttpServletResponse response) {
User rejectedUser = userDao.findByIdOrNull(id);
if (rejectedUser != null) {
emailService.sendRegistrationRejectedNotice(rejectedUser);
userManager.removeUser(rejectedUser);
response.setStatus(HttpServletResponse.SC_OK);
} else {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
}
@GetMapping(value = "/tb-ui/authoring/rest/weblog/{weblogId}/potentialmembers")
@PreAuthorize("@securityService.hasAccess(#p.name, T(org.tightblog.domain.Weblog), #weblogId, 'OWNER')")
public Map<String, String> getPotentialNewBlogMembers(@PathVariable String weblogId, Principal p) {
Weblog weblog = weblogDao.getOne(weblogId);
// member list excludes inactive accounts
List<User> potentialUsers = userDao.findByStatusEnabled();
// filter out people already members
ListIterator<User> potentialIter = potentialUsers.listIterator();
List<UserWeblogRole> currentUserList = userWeblogRoleDao.findByWeblog(weblog);
while (potentialIter.hasNext() && !currentUserList.isEmpty()) {
User su = potentialIter.next();
ListIterator<UserWeblogRole> alreadyIter = currentUserList.listIterator();
while (alreadyIter.hasNext()) {
UserWeblogRole au = alreadyIter.next();
if (su.getId().equals(au.getUser().getId())) {
potentialIter.remove();
alreadyIter.remove();
break;
}
}
}
return createUserMap(potentialUsers);
}
private Map<String, String> createUserMap(List<User> users) {
Map<String, String> userMap = new TreeMap<>();
for (User user : users) {
userMap.put(user.getId(), user.getScreenName() + " (" + user.getEmailAddress() + ")");
}
return userMap.entrySet().stream().sorted(Map.Entry.comparingByValue())
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue,
(e1, e2) -> e2, LinkedHashMap::new));
}
@GetMapping(value = "/tb-ui/admin/rest/useradmin/user/{id}")
public UserData getUserData(@PathVariable String id, HttpServletResponse response) {
User user = userDao.findByIdOrNull(id);
if (user != null) {
UserData data = new UserData();
UserCredentials creds = userCredentialsDao.findByUserName(user.getUserName());
data.setUser(user);
data.setCredentials(creds);
return data;
} else {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
return null;
}
}
@GetMapping(value = "/tb-ui/authoring/rest/userprofile/{id}")
public User getProfileData(@PathVariable String id, Principal p, HttpServletResponse response) {
User user = userDao.findByIdOrNull(id);
User authenticatedUser = userDao.findEnabledByUserName(p.getName());
if (user != null && user.getId().equals(authenticatedUser.getId())) {
return user;
} else {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
return null;
}
}
@PostMapping(value = "/tb-ui/register/rest/registeruser")
public ResponseEntity<?> registerUser(@Valid @RequestBody UserData newData, Locale locale, HttpServletResponse response) {
List<Violation> errors = validateUser(null, newData, true, locale);
if (errors.size() > 0) {
return ValidationErrorResponse.badRequest(errors);
}
long userCount = userDao.count();
WebloggerProperties.RegistrationPolicy option = webloggerPropertiesDao.findOrNull().getRegistrationPolicy();
if (userCount == 0 || !WebloggerProperties.RegistrationPolicy.DISABLED.equals(option)) {
boolean mustActivate = userCount > 0;
if (mustActivate) {
newData.getUser().setActivationCode(UUID.randomUUID().toString());
newData.getUser().setStatus(UserStatus.REGISTERED);
} else {
// initial user is the Admin, is automatically enabled.
newData.getUser().setStatus(UserStatus.ENABLED);
}
User user = new User();
user.setUserName(newData.getUser().getUserName());
user.setDateCreated(Instant.now());
ResponseEntity re = saveUser(user, newData, null, response, true);
if (re.getStatusCode() == HttpStatus.OK && mustActivate) {
UserData data = (UserData) re.getBody();
if (data != null) {
emailService.sendUserActivationEmail(data.getUser());
}
}
return re;
} else {
return new ResponseEntity<>(HttpStatus.FORBIDDEN);
}
}
@GetMapping(value = "/tb-ui/bootstrap")
public @ResponseBody String bootstrap() {
return RequestContextHolder.currentRequestAttributes().getSessionId();
}
@GetMapping(value = "/tb-ui/login/rest/sessionid")
public @ResponseBody String getSessionId() {
return RequestContextHolder.currentRequestAttributes().getSessionId();
}
@PostMapping(value = "/tb-ui/authoring/rest/userprofile/{id}")
public ResponseEntity<?> updateUserProfile(@PathVariable String id, @Valid @RequestBody UserData newData, Principal p,
Locale locale, HttpServletResponse response) {
User user = userDao.findByIdOrNull(id);
User authenticatedUser = userDao.findEnabledByUserName(p.getName());
if (user != null && user.getId().equals(authenticatedUser.getId())) {
List<Violation> errors = validateUser(null, newData, false, locale);
if (errors.size() > 0) {
return ValidationErrorResponse.badRequest(errors);
}
return saveUser(user, newData, p, response, false);
} else {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
}
}
@PutMapping(value = "/tb-ui/admin/rest/useradmin/user/{id}")
public ResponseEntity<?> updateUser(@PathVariable String id, @Valid @RequestBody UserData newData, Principal p,
Locale locale, HttpServletResponse response) {
User user = userDao.findByIdOrNull(id);
List<Violation> errors = validateUser(user, newData, false, locale);
if (errors.size() > 0) {
return ValidationErrorResponse.badRequest(errors);
}
return saveUser(user, newData, p, response, false);
}
@GetMapping(value = "/tb-ui/authoring/rest/weblog/{weblogId}/members")
@PreAuthorize("@securityService.hasAccess(#p.name, T(org.tightblog.domain.Weblog), #weblogId, 'OWNER')")
public List<UserWeblogRole> getWeblogMembers(@PathVariable String weblogId, Principal p) {
Weblog weblog = weblogDao.getOne(weblogId);
return userWeblogRoleDao.findByWeblog(weblog);
}
@PostMapping(value = "/tb-ui/authoring/rest/weblog/{weblogId}/user/{userId}/role/{role}/attach")
public ResponseEntity<String> addUserToWeblog(@PathVariable String weblogId, @PathVariable String userId,
@PathVariable WeblogRole role, Principal p, Locale locale) {
User requestor = userDao.findEnabledByUserName(p.getName());
User newMember = userDao.findByIdOrNull(userId);
Weblog weblog = weblogDao.findById(weblogId).orElse(null);
if (weblog != null && newMember != null && requestor != null &&
requestor.hasEffectiveGlobalRole(GlobalRole.ADMIN)) {
userManager.grantWeblogRole(newMember, weblog, role);
return SuccessResponse.textMessage(messages.getMessage("members.userAdded", null, locale));
} else {
return ResponseEntity.status(HttpServletResponse.SC_FORBIDDEN).build();
}
}
@PostMapping(value = "/tb-ui/authoring/rest/weblog/{weblogId}/memberupdate")
public ResponseEntity<?> updateWeblogMembership(@PathVariable String weblogId, Principal p, Locale locale,
@RequestBody List<UserWeblogRole> uwrs) {
Weblog weblog = weblogDao.findById(weblogId).orElse(null);
User user = userDao.findEnabledByUserName(p.getName());
if (user != null && weblog != null && user.hasEffectiveGlobalRole(GlobalRole.ADMIN)) {
// must remain at least one admin
List<UserWeblogRole> owners = uwrs.stream()
.filter(r -> r.getWeblogRole().equals(WeblogRole.OWNER))
.collect(Collectors.toList());
if (owners.size() < 1) {
return ValidationErrorResponse.badRequest(
messages.getMessage("members.oneAdminRequired", null, locale));
}
// one iteration for each line (user) in the members table
for (UserWeblogRole uwr : uwrs) {
if (WeblogRole.NOBLOGNEEDED.equals(uwr.getWeblogRole())) {
userManager.deleteUserWeblogRole(uwr);
} else {
userManager.grantWeblogRole(
uwr.getUser(), uwr.getWeblog(), uwr.getWeblogRole());
}
}
return SuccessResponse.textMessage(
messages.getMessage("members.membersChanged", null, locale));
} else {
return new ResponseEntity<>(HttpStatus.FORBIDDEN);
}
}
private ResponseEntity saveUser(User user, UserData newData, Principal p, HttpServletResponse response, boolean add) {
if (user != null) {
user.setScreenName(newData.getUser().getScreenName().trim());
user.setEmailAddress(newData.getUser().getEmailAddress().trim());
if (!UserStatus.ENABLED.equals(user.getStatus()) && StringUtils.isNotEmpty(
newData.getUser().getActivationCode())) {
user.setActivationCode(newData.getUser().getActivationCode());
}
if (add) {
user.setStatus(newData.getUser().getStatus());
if (userDao.count() == 0) {
// first person in is always an admin
user.setGlobalRole(GlobalRole.ADMIN);
} else {
user.setGlobalRole(webloggerPropertiesDao.findOrNull().isUsersCreateBlogs() ?
GlobalRole.BLOGCREATOR : GlobalRole.BLOGGER);
}
} else {
// users can't alter own roles or status
if (!user.getUserName().equals(p.getName())) {
user.setGlobalRole(newData.getUser().getGlobalRole());
user.setStatus(newData.getUser().getStatus());
}
}
try {
userDao.saveAndFlush(user);
userDao.evictUser(user);
// reset password if set
if (newData.getCredentials() != null) {
UserCredentials credentials = newData.getCredentials();
if (!StringUtils.isEmpty(credentials.getPasswordText())) {
userManager.updateCredentials(user.getId(), credentials.getPasswordText());
}
// reset MFA secret if requested
if (credentials.isEraseMfaSecret()) {
userCredentialsDao.eraseMfaCode(user.getId());
}
}
response.setStatus(HttpServletResponse.SC_OK);
} catch (RollbackException e) {
return ResponseEntity.status(HttpServletResponse.SC_CONFLICT).body("Persistence Problem");
}
} else {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
}
UserData data = new UserData();
data.setUser(user);
UserCredentials creds = userCredentialsDao.findByUserName(user.getUserName());
data.setCredentials(creds);
return ResponseEntity.ok(data);
}
private List<Violation> validateUser(User currentUser, UserData data, boolean isAdd, Locale locale) {
List<Violation> errors = new ArrayList<>();
User testHasUserName = userDao.findByUserName(data.getUser().getUserName());
if (testHasUserName != null && !testHasUserName.getId().equals(data.getUser().getId())) {
errors.add(new Violation(messages.getMessage("error.add.user.userNameInUse",
null, locale)));
}
User testHasScreenName = userDao.findByScreenName(data.getUser().getScreenName());
if (testHasScreenName != null && !testHasScreenName.getId().equals(data.getUser().getId())) {
errors.add(new Violation(messages.getMessage("error.add.user.screenNameInUse",
null, locale)));
}
User testHasEmail = userDao.findByEmailAddress(data.getUser().getEmailAddress());
if (testHasEmail != null && !testHasEmail.getId().equals(data.getUser().getId())) {
errors.add(new Violation(messages.getMessage("error.add.user.emailAddressInUse",
null, locale)));
}
if (currentUser != null) {
UserStatus currentStatus = currentUser.getStatus();
if (currentStatus != data.getUser().getStatus()) {
switch (currentStatus) {
case ENABLED:
if (data.getUser().getStatus() != UserStatus.DISABLED) {
errors.add(new Violation(messages.getMessage(
"error.useradmin.enabled.only.disabled", null, locale)));
}
break;
case DISABLED:
if (data.getUser().getStatus() != UserStatus.ENABLED) {
errors.add(new Violation(messages.getMessage(
"error.useradmin.disabled.only.enabled", null, locale)));
}
break;
case REGISTERED:
case EMAILVERIFIED:
if (data.getUser().getStatus() != UserStatus.ENABLED) {
errors.add(new Violation(messages.getMessage(
"error.useradmin.nonenabled.only.enabled", null, locale)));
}
break;
default:
}
}
}
if (data.getCredentials() != null) {
UserCredentials credentials = data.getCredentials();
String maybePassword = credentials.getPasswordText();
if (!StringUtils.isEmpty(maybePassword)) {
if (!maybePassword.equals(credentials.getPasswordConfirm())) {
errors.add(new Violation(messages.getMessage(
"error.add.user.passwordConfirmFail", null, locale)));
} else {
if (!PWD_PATTERN.matcher(maybePassword).matches()) {
errors.add(new Violation(messages.getMessage(
"error.add.user.passwordComplexityFail", null, locale)));
}
}
} else {
if (!StringUtils.isEmpty(credentials.getPasswordConfirm())) {
// confirm provided but password field itself not filled out
errors.add(new Violation(
messages.getMessage("error.add.user.passwordConfirmFail", null, locale)));
}
}
if (isAdd && StringUtils.isEmpty(credentials.getPasswordText())) {
errors.add(new Violation(
messages.getMessage("error.add.user.missingPassword", null, locale)));
}
}
return errors;
}
@GetMapping(value = "/tb-ui/admin/rest/useradmin/user/{id}/weblogs")
public List<UserWeblogRole> getUsersWeblogs(@PathVariable String id, HttpServletResponse response) {
User user = userDao.findByIdOrNull(id);
if (user == null) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
return null;
}
List<UserWeblogRole> uwrs = userWeblogRoleDao.findByUser(user);
for (UserWeblogRole uwr : uwrs) {
entityManager.detach(uwr); // uwr now a DTO
uwr.getWeblog().setAbsoluteURL(urlService.getWeblogURL(uwr.getWeblog()));
uwr.setUser(null);
}
return uwrs;
}
@GetMapping(value = "/tb-ui/authoring/rest/loggedinuser/weblogs")
public List<UserWeblogRole> getLoggedInUsersWeblogs(Principal p, HttpServletResponse response) {
User user = userDao.findEnabledByUserName(p.getName());
if (user == null) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
return null;
}
List<UserWeblogRole> uwrs = userWeblogRoleDao.findByUser(user);
for (UserWeblogRole uwr : uwrs) {
entityManager.detach(uwr); // uwr now a DTO
uwr.getWeblog().setAbsoluteURL(urlService.getWeblogURL(uwr.getWeblog()));
uwr.getWeblog().setUnapprovedComments(
weblogEntryCommentDao.countByWeblogAndStatusUnapproved(uwr.getWeblog()));
uwr.setUser(null);
}
return uwrs;
}
@PostMapping(value = "/tb-ui/authoring/rest/weblogrole/{id}/emails/{emailComments}")
public void setEmailCommentsForWeblog(@PathVariable String id, @PathVariable boolean emailComments, Principal p,
HttpServletResponse response) {
UserWeblogRole uwr = userWeblogRoleDao.findByIdOrNull(id);
if (uwr != null && uwr.getUser().getUserName().equals(p.getName())) {
uwr.setEmailComments(emailComments);
userWeblogRoleDao.saveAndFlush(uwr);
response.setStatus(HttpServletResponse.SC_OK);
} else {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
}
@PostMapping(value = "/tb-ui/authoring/rest/weblogrole/{id}/detach")
public void resignFromWeblog(@PathVariable String id, Principal p, HttpServletResponse response) {
UserWeblogRole uwr = userWeblogRoleDao.findByIdOrNull(id);
if (uwr != null && uwr.getUser().getUserName().equals(p.getName())) {
userManager.deleteUserWeblogRole(uwr);
response.setStatus(HttpServletResponse.SC_OK);
} else {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
}
@GetMapping(value = "/tb-ui/register/rest/useradminmetadata")
public StaticProperties getStaticProperties() {
return staticProperties;
}
@GetMapping(value = "/tb-ui/authoring/rest/server/staticproperties")
public StaticProperties getStaticProperties2() {
return staticProperties;
}
}
|
import React from 'react';
class App extends React.Component {
constructor(props) {
super(props);
this.state = {
articles: [],
comments: []
};
}
render() {
return (
<div className="app">
<h1>My Blog</h1>
<Articles articles={this.state.articles} />
<Comments comments={this.state.comments} />
<ArticleForm />
<CommentForm />
</div>
);
}
}
const Articles = (props) => {
return (
<div className="articles">
{props.articles.map((article) => (
<div key={article.id} className="article">
<h2>{article.title}</h2>
<p>{article.text}</p>
</div>
))}
</div>
);
};
const ArticleForm = () => (
<form>
<h2>Create Article</h2>
<input type="text" name="title" placeholder="Article Title" />
<textarea name="text" placeholder="Article Text" />
<input type="submit" value="Submit" />
</form>
);
const Comments = (props) => {
return (
<div className="comments">
{props.comments.map((comment) => (
<div key={comment.id} className="comment">
<p>{comment.text}</p>
<p>Commenter: {comment.commenter}</p>
</div>
))}
</div>
);
};
const CommentForm = () => (
<form>
<h2>Create Comment</h2>
<textarea name="text" placeholder="Comment Text" />
<input type="text" name="commenter" placeholder="Commenter Name" />
<input type="submit" value="Submit" />
</form>
);
export default App; |
package com.peony.demo.config.core;
/**
* Created by jiangmin.wu on 17/7/20.
*/
public interface IConfig<K> {
K getId();
}
|
#ifndef INCLUDED_CORE_PROGRAM_STATE_H
#define INCLUDED_CORE_PROGRAM_STATE_H
#include "platform/singleton.h"
#include "platform/i_platform.h"
#include "actor.h"
#include "soldier_properties.h"
#include "platform/export.h"
#include "game_modes.h"
namespace core {
struct ClientData
{
int32_t mClientId;
std::string mClientName;
int32_t mControlledLocalPlayerId;
int32_t mClientActorGUID;
SoldierProperties mSoldierProperties;
int32_t mKill;
int32_t mDeath;
int32_t mAssist; // not used yet
int32_t mScore;
bool mReady;
bool mConnected;
ClientData();
ClientData( int32_t clientId, std::string const& clientName, int32_t controlledLocalPlayerId );
template<class Archive>
void serialize( Archive& ar, const unsigned int version );
};
template<class Archive>
void ClientData::serialize( Archive& ar, const unsigned int version )
{
ar& mClientId;
ar& mClientName;
ar& mControlledLocalPlayerId;
ar& mClientActorGUID;
ar& mSoldierProperties;
ar& mKill;
ar& mDeath;
ar& mAssist;
ar& mScore;
ar& mReady;
ar& mConnected;
}
class ProgramState : public platform::Singleton<ProgramState>
{
//TODO: should be split into serverstate clientstate
friend class platform::Singleton<ProgramState>;
public:
platform::ModelValue mProgramStateModel;
platform::ModelValue mIsClientModel;
platform::ModelValue mIsHostModel;
int32_t mIsClient;
ProgramState();
enum Mode
{
Local,
Client,
Server
};
// main type of this instance (local,client,server)
Mode mMode;
enum GameState
{
NotRunning,
Running
};
// which mode is the game at currently. (running etc.)
GameState mGameState;
void SetMode( ProgramState::Mode mode );
// is this client connected to server
bool mClientConnected;
// current client's name
std::string mClientName;
// current client's id (got from server in exchange for name)
int32_t mClientId;
// currently controlled actor for client //TODO: need to find a better place
int32_t mControlledActorGUID;
SoldierProperties mSoldierProperties;
// target servers ip
std::string mServerIp;
// a representation of game mode. ("ctf" or "ffa" or any later extension)
GameModes::Type mGameMode;
typedef std::vector<ClientData> ClientDatas_t;
// currently connected clients to server
ClientDatas_t mClientDatas;
// this client is the host
int32_t mIsHost;
Opt<ClientData> FindClientDataByClientId( int32_t clientId );
Opt<ClientData> FindClientDataByActorGUID( int32_t actorGUID );
Opt<ClientData> FindClientDataByClientName( std::string clientName );
};
} // namespace core
REAPING2_CLASS_EXPORT_KEY2( __core__ClientData, ::core::ClientData, "client_data" );
#endif//INCLUDED_CORE_PROGRAM_STATE_H
|
<filename>src/js/collections.js
import '../scss/collections.scss';
import Header from '../components/header/index';
import NoContentTip from '../components/no_content_tip/index';
import NewsItem from '../components/news_item/index';
import tools from '../utils/tools';
const header = new Header(),
noContentTip = new NoContentTip(),
newsItem = new NewsItem();
const App = ($) => {
const $app = $('#app'),
$list = $app.children('.list'),
collections = JSON.parse(localStorage.getItem('collections'));
const init = () => {
render().then(bindEvent);
}
const render = () => {
return new Promise((resolve, reject) => {
_renderHeader();
if (!collections || Object.keys(collections).length === 0) {
_renderNoContentTip('没有收藏新闻');
} else {
_renderList(collections);
}
resolve();
});
}
const bindEvent = () => {
$list.on('click', '.news-item', toDetailPage);
}
const _renderHeader = () => {
$app.append(header.tpl({
title: '我的收藏',
showLeftIcon: true,
showRightIcon: false
}));
}
const _renderNoContentTip = (text) => {
$app.append(noContentTip.tpl(text));
}
const _renderList = (data) => {
$list.append(newsItem.tpl(_arrangeDatas(data)));
tools.thumbShow($('.news-thumb'));
}
function toDetailPage () {
const $this = $(this),
url = $this.attr('data-url'),
uniquekey = $this.attr('data-uniquekey');
localStorage.setItem('target', JSON.stringify(collections[uniquekey]));
window.location.href = `detail.html?news_url=${url}&uniquekey=${uniquekey}`;
}
function _arrangeDatas (data) {
let _arr = [];
for (let key in data) {
_arr.push(data[key]);
}
return _arr;
}
init();
}
App(Zepto);
|
<filename>src/main/scala/com/github/kright/habrareader/utils/DateUtils.scala<gh_stars>0
package com.github.kright.habrareader.utils
import java.text.SimpleDateFormat
import java.util.{Calendar, Date}
import io.circe.syntax._
import io.circe.{Decoder, Encoder}
object DateUtils {
//todo may be rm this, store date as simple Long number in milliseconds
def now: Date = Calendar.getInstance().getTime
def convertToStr(date: Date, fmt: String = "yyyy-MM-dd HH:mm:ss.SSS Z"): String = new SimpleDateFormat(fmt).format(date)
def currentDateStr(fmt: String = "yyyy-MM-dd HH:mm:ss.SSS Z"): String = convertToStr(now, fmt)
def convertToDate(date: String, fmt: String = "yyyy-MM-dd HH:mm:ss.SSS Z"): Date = new SimpleDateFormat(fmt).parse(date)
def getLast(left: Date, right: Date): Date =
if (left.after(right))
left
else
right
implicit val dateEncoder: Encoder[Date] = (date: Date) => DateUtils.convertToStr(date).asJson
implicit val dateDecoder: Decoder[Date] = Decoder[String].map(DateUtils.convertToDate(_))
}
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello, World!'
if __name__ == '__main__':
app.run() |
<reponame>coderextreme/XREngine
import { $indexBytes, $indexType, $serializeShadow, $storeBase, $storeFlattened, $tagStore, createShadow } from "./Storage.js"
import { $componentMap, addComponent, hasComponent } from "./Component.js"
import { $entityArray, $entitySparseSet, addEntity, eidToWorld } from "./Entity.js"
import { $localEntities } from "./World.js"
export const DESERIALIZE_MODE = {
REPLACE: 0,
APPEND: 1,
MAP: 2
}
let resized = false
export const setSerializationResized = v => { resized = v }
const canonicalize = (target) => {
let componentProps = []
let changedProps = new Map()
if (Array.isArray(target)) {
componentProps = target
.map(p => {
if (!p) throw new Error('bitECS - Cannot serialize undefined component')
if (typeof p === 'function' && p.name === 'QueryChanged') {
p()[$storeFlattened].forEach(prop => {
const $ = Symbol()
createShadow(prop, $)
changedProps.set(prop, $)
})
return p()[$storeFlattened]
}
if (Object.getOwnPropertySymbols(p).includes($storeFlattened)) {
return p[$storeFlattened]
}
if (Object.getOwnPropertySymbols(p).includes($storeBase)) {
return p
}
})
.reduce((a,v) => a.concat(v), [])
}
return [componentProps, changedProps]
}
/**
* Defines a new serializer which targets the given components to serialize the data of when called on a world or array of EIDs.
*
* @param {object|array} target
* @param {number} [maxBytes=20000000]
* @returns {function} serializer
*/
export const defineSerializer = (target, maxBytes = 20000000) => {
const isWorld = Object.getOwnPropertySymbols(target).includes($componentMap)
let [componentProps, changedProps] = canonicalize(target)
// TODO: calculate max bytes based on target & recalc upon resize
const buffer = new ArrayBuffer(maxBytes)
const view = new DataView(buffer)
return (ents) => {
if (resized) {
[componentProps, changedProps] = canonicalize(target)
resized = false
}
if (isWorld) {
componentProps = []
target[$componentMap].forEach((c, component) => {
if (component[$storeFlattened])
componentProps.push(...component[$storeFlattened])
else componentProps.push(component)
})
}
let world
if (Object.getOwnPropertySymbols(ents).includes($componentMap)) {
world = ents
ents = ents[$entityArray]
} else {
world = eidToWorld.get(ents[0])
}
if (!ents.length) return
let where = 0
// iterate over component props
for (let pid = 0; pid < componentProps.length; pid++) {
const prop = componentProps[pid]
const $diff = changedProps.get(prop)
// write pid
view.setUint8(where, pid)
where += 1
// save space for entity count
const countWhere = where
where += 4
let count = 0
// write eid,val
for (let i = 0; i < ents.length; i++) {
const eid = ents[i]
// skip if entity doesn't have this component
if (!hasComponent(world, prop[$storeBase](), eid)) {
continue
}
// skip if diffing and no change
// TODO: optimize array diff
if ($diff) {
if (ArrayBuffer.isView(prop[eid])) {
let dirty = false
for (let i = 0; i < prop[eid].length; i++) {
if(prop[eid][i] !== prop[eid][$diff][i]) {
dirty = true
break
}
}
if (dirty) continue
} else if (prop[eid] === prop[$diff][eid]) continue
}
count++
// write eid
view.setUint32(where, eid)
where += 4
if (prop[$tagStore]) {
continue
}
// if property is an array
if (ArrayBuffer.isView(prop[eid])) {
const type = prop[eid].constructor.name.replace('Array', '')
const indexType = prop[eid][$indexType]
const indexBytes = prop[eid][$indexBytes]
// add space for count of dirty array elements
const countWhere2 = where
where += 1
let count2 = 0
// write index,value
for (let i = 0; i < prop[eid].length; i++) {
const value = prop[eid][i]
if ($diff && prop[eid][i] === prop[eid][$diff][i]) {
continue
}
// write array index
view[`set${indexType}`](where, i)
where += indexBytes
// write value at that index
view[`set${type}`](where, value)
where += prop[eid].BYTES_PER_ELEMENT
count2++
}
// write total element count
view[`set${indexType}`](countWhere2, count2)
} else {
// regular property values
const type = prop.constructor.name.replace('Array', '')
// set value next [type] bytes
view[`set${type}`](where, prop[eid])
where += prop.BYTES_PER_ELEMENT
// sync shadow state
if (prop[$diff]) prop[$diff][eid] = prop[eid]
}
}
view.setUint32(countWhere, count)
}
return buffer.slice(0, where)
}
}
const newEntities = new Map()
/**
* Defines a new deserializer which targets the given components to deserialize onto a given world.
*
* @param {object|array} target
* @returns {function} deserializer
*/
export const defineDeserializer = (target) => {
const isWorld = Object.getOwnPropertySymbols(target).includes($componentMap)
let [componentProps] = canonicalize(target)
return (world, packet, mode=0) => {
newEntities.clear()
if (resized) {
[componentProps] = canonicalize(target)
resized = false
}
if (isWorld) {
componentProps = []
target[$componentMap].forEach((c, component) => {
if (component[$storeFlattened])
componentProps.push(...component[$storeFlattened])
else componentProps.push(component)
})
}
const localEntities = world[$localEntities]
const view = new DataView(packet)
let where = 0
while (where < packet.byteLength) {
// pid
const pid = view.getUint8(where)
where += 1
// entity count
const entityCount = view.getUint32(where)
where += 4
// component property
const prop = componentProps[pid]
// Get the entities and set their prop values
for (let i = 0; i < entityCount; i++) {
let eid = view.getUint32(where)
where += 4
if (mode === DESERIALIZE_MODE.MAP) {
if (localEntities.has(eid)) {
eid = localEntities.get(eid)
} else if (newEntities.has(eid)) {
eid = newEntities.get(eid)
} else {
const newEid = addEntity(world)
localEntities.set(eid, newEid)
newEntities.set(eid, newEid)
eid = newEid
}
}
if (mode === DESERIALIZE_MODE.APPEND ||
mode === DESERIALIZE_MODE.REPLACE && !world[$entitySparseSet].has(eid)
) {
const newEid = newEntities.get(eid) || addEntity(world)
newEntities.set(eid, newEid)
eid = newEid
}
const component = prop[$storeBase]()
if (!hasComponent(world, component, eid)) {
addComponent(world, component, eid)
}
if (component[$tagStore]) {
continue
}
if (ArrayBuffer.isView(prop[eid])) {
const array = prop[eid]
const count = view[`get${array[$indexType]}`](where)
where += array[$indexBytes]
// iterate over count
for (let i = 0; i < count; i++) {
const index = view[`get${array[$indexType]}`](where)
where += array[$indexBytes]
const value = view[`get${array.constructor.name.replace('Array', '')}`](where)
where += array.BYTES_PER_ELEMENT
prop[eid][index] = value
}
} else {
const value = view[`get${prop.constructor.name.replace('Array', '')}`](where)
where += prop.BYTES_PER_ELEMENT
prop[eid] = value
}
}
}
}
} |
#include "../../includes/bonus/cub3d_bonus.h"
void render_item(t_game *game)
{
int color;
game->pos.x = game->file.width - 153;
game->pos_item.x = 163;
while (game->pos.x < game->file.width)
{
game->pos.y = 80;
game->pos_item.y = 0;
while (game->pos.y < 160)
{
color = get_color_item(&game->item_tex[game->item],
game->pos_item.x, game->pos_item.y);
if (color != 0x000000)
image_put_pixel(game, game->pos.x, game->pos.y, color);
game->pos.y++;
game->pos_item.y++;
}
game->pos_item.x++;
game->pos.x++;
}
}
int get_color_item(t_tex *item_tex, int item_x, int item_y)
{
return (*(unsigned int *)(item_tex->addr
+ (item_y * item_tex->l_len + item_x
* (item_tex->bpp / 8))));
}
|
package simplesettings
import (
"fmt"
"sync"
)
// SettingsSection is a structure to hold key-value pairs and process them as settings values
type SettingsSection struct {
lock sync.RWMutex
Values map[string]*settingsValue
}
func newSettingsSection() *SettingsSection {
ss := &SettingsSection{}
ss.Values = make(map[string]*settingsValue)
return ss
}
func (ss *SettingsSection) addValue(sv *settingsValue) {
ss.lock.Lock()
defer ss.lock.Unlock()
ss.Values[sv.Name] = sv
}
// DeleteValue with given name from this section
func (ss *SettingsSection) DeleteValue(name string) {
ss.lock.Lock()
defer ss.lock.Unlock()
delete(ss.Values, name)
}
func (ss *SettingsSection) String() string {
out := ""
ss.lock.RLock()
for _, val := range ss.Values {
out += fmt.Sprintf("%v\n", val)
}
ss.lock.RUnlock()
return out
}
func (ss *SettingsSection) getVal(key string) settingsValue {
ss.lock.RLock()
defer ss.lock.RUnlock()
val := ss.Values[key]
if val == nil {
panic(fmt.Sprintf("Key %v not found in this settings section", key))
}
return *val
}
// Get string value from SettingsSection object
func (ss *SettingsSection) Get(key string) string {
return ss.getVal(key).ParseString()
}
// GetInt - get integer value from SettingsSection object
func (ss *SettingsSection) GetInt(key string) int {
return ss.getVal(key).ParseInt()
}
// GetBool - get boolean value from SettingsSection object
func (ss *SettingsSection) GetBool(key string) bool {
return ss.getVal(key).ParseBool()
}
// GetArray - get []string value from SettingsSection object
func (ss *SettingsSection) GetArray(key string) []string {
return ss.getVal(key).ParseArray()
}
// Set string, int, bool, slice value to SettingsSection object
func (ss *SettingsSection) Set(key string, value interface{}) error {
val := newValue(key, value)
if val == nil {
return fmt.Errorf("can't save as value: %v", val)
}
ss.addValue(val)
return nil
}
|
from typing import List
class Node:
def __init__(self, value):
self.value = value
self.neighbour_list = []
self.parent_node = None
def traverse_graph(curr_node: Node) -> List[Node]:
open_nodes = []
closed_nodes = []
open_nodes.append(curr_node)
while open_nodes:
curr_node = open_nodes.pop(0)
closed_nodes.append(curr_node)
for node in curr_node.neighbour_list:
if node not in closed_nodes and node not in open_nodes:
node.parent_node = curr_node
open_nodes.append(node)
return open_nodes |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.one = void 0;
var one = {
"viewBox": "0 0 20 20",
"children": [{
"name": "path",
"attribs": {
"d": "M18,5H2C0.9,5,0,5.9,0,7v6c0,1.1,0.9,2,2,2h16c1.1,0,2-0.9,2-2V7C20,5.9,19.1,5,18,5z M18,13H2V7h16V13z M7,8H3v4h4V8z"
}
}]
};
exports.one = one; |
package com.honyum.elevatorMan.hb;
import android.app.Fragment;
import android.app.ProgressDialog;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.hanbang.ydtsdk.AlarmInformation;
import com.hanbang.ydtsdk.AlarmParam;
import com.hanbang.ydtsdk.YdtNetSDK;
import com.honyum.elevatorMan.R;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class AlarmFragment extends Fragment implements View.OnClickListener
{
/**
* 线程
*/
private ExecutorService threadPoll = Executors.newSingleThreadExecutor();
/**
* 一点通网络库对象
*/
YdtNetSDK mYdtNetSDK;
/**
* 设备序列号
*/
String mDeviceSn;
/**
* 开始时间
*/
long beginTime;
/**
* 结束时间
*/
long endTime;
/**
* 一天换算为毫秒
*/
final static long TIME_MILLIS_OF_DAY = 1L * 24 * 60 * 60 * 1000;
/**
* 起始位置
*/
int mStartNo = 0;
/**
* 循环一次获取的条数
*/
int mCount = 20;
/**
*设备序列号
*/
// EditText mEditText;
/**
* 报警信息列表
*/
List<AlarmParam> mAlarmParams;
/**
* 报警信息列表
*/
ListView alarmList;
ProgressDialog dialog;
/**
* 设备密码
*/
String DEVICE_PASSWORD = "<PASSWORD>";
/**
* 设备的分享类型, 0 - 自有设备, 1 - 分享给该用户的设备, 2 - 公共设备
*/
int SHARE_TYPE = 0;
/**
* 绑定标识, 1 - 绑定, 0 - 解绑, -1 - 禁止设备报警上传
*/
int BIND_FLAG = 1;
Spinner deviceSpinner;//设备选择器
List<DeviceInfo> deviceList = new ArrayList<DeviceInfo>();
//选中的设备
DeviceInfo mSelectDevice;
public void onCreate( Bundle savedInstanceState )
{
super.onCreate( savedInstanceState );
}
@Override
public View onCreateView( LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState )
{
// Inflate the layout for this fragment
mYdtNetSDK = AccountInfo.getInstance().getYdtNetSDK();
View view = inflater.inflate( R.layout.fragment_alarm_hb, container, false );
beginTime = System.currentTimeMillis() - TIME_MILLIS_OF_DAY;
endTime = System.currentTimeMillis();
mAlarmParams = new ArrayList<AlarmParam>( );
initView( view );
return view;
}
@Override
public void onHiddenChanged( boolean hidden )
{
super.onHiddenChanged( hidden );
if ( hidden )
{
}
else
{
deviceList.clear();
deviceList.addAll( AccountInfo.getInstance().getYdtDeviceInfos() );
ArrayAdapter<DeviceInfo> adapter = new ArrayAdapter<DeviceInfo>( getActivity(), android.R.layout.simple_spinner_item, deviceList );
adapter.setDropDownViewResource( android.R.layout.simple_spinner_dropdown_item );
deviceSpinner.setAdapter( adapter );
}
}
private void initView( View view )
{
// mEditText = (EditText) view.findViewById( R.id.deicesn_text );
//绑定设备报警功能
view.findViewById( R.id.bind_alarm ).setOnClickListener( this );
//获取多条报警信息
view.findViewById( R.id.get_sum_alarm ).setOnClickListener( this );
//解绑设备
view.findViewById( R.id.unbind ).setOnClickListener( this );
deviceSpinner = (Spinner) view.findViewById( R.id.device_spinner );
deviceSpinner.setSelection( 0 );
alarmList = (ListView) view.findViewById( R.id.list_alarm );
dialog = new ProgressDialog( getActivity() );
dialog.setProgressStyle( ProgressDialog.STYLE_SPINNER );
dialog.setMessage( "请稍等。。。" );
dialog.setCancelable( false );
/**
* 选择设备
*/
deviceSpinner.setOnItemSelectedListener( new AdapterView.OnItemSelectedListener()
{
@Override
public void onItemSelected( AdapterView<?> parent, View view, int position, long id )
{
deviceSpinner.setSelection( position );
mSelectDevice = deviceList.get( position );
}
@Override
public void onNothingSelected( AdapterView<?> parent )
{
}
} );
//列表点击事件,根据alarmId回去单条报警信息
alarmList.setOnItemClickListener( new AdapterView.OnItemClickListener()
{
@Override
public void onItemClick( AdapterView<?> parent, View view, final int position, long id )
{
threadPoll.execute( new Runnable()
{
@Override
public void run()
{
AlarmInformation information = mYdtNetSDK.getSingleAlarmInfo( mAlarmParams.get( position ).alarmId );
if ( information.nErrorCode == 0 )
{
//获取成功
}
}
} );
}
} );
}
@Override
public void onClick( View v )
{
switch ( v.getId() )
{
//绑定设备报警功能
case R.id.bind_alarm:
dialog.show();
threadPoll.execute( new Runnable()
{
@Override
public void run()
{
BIND_FLAG = 1;//绑定
mDeviceSn = mSelectDevice.deviceSn;
int error = mYdtNetSDK.bindDeviceAlarm( mDeviceSn, DEVICE_PASSWORD, SHARE_TYPE, BIND_FLAG );
if ( error == 0 )
{
deviceSpinner.post( new Runnable()
{
@Override
public void run()
{
Toast.makeText( getActivity(),"绑定成功",Toast.LENGTH_SHORT ).show();
}
} );
}
dialog.dismiss();
}
} );
break;
//获取多条报警信息
case R.id.get_sum_alarm:
dialog.show();
threadPoll.execute( new Runnable()
{
@Override
public void run()
{
/**
* mDeviceSn: 设备序列号
* beginTime:报警起始时间(按照自己的需求传递,此处为一天前)
* endTime:报警结束时间(此处为当前时间)
* mStartNo:起始位置
* mCount:循环一次获取的条数
*/
while ( true )
{
AlarmInformation alarmInf = mYdtNetSDK.getAlarmList( mDeviceSn, beginTime, endTime, mStartNo, mCount );
if ( null == alarmInf.alarmList || alarmInf.alarmList.size() == 0 )
{
break;
}
mAlarmParams.addAll( alarmInf.alarmList );
if ( mCount > alarmInf.alarmList.size() )
{
break;
}
mStartNo = mStartNo + mCount;
}
alarmList.post( new Runnable()
{
@Override
public void run()
{
dialog.dismiss();
AlarmAdapter adapter = new AlarmAdapter( getActivity(), mAlarmParams );
alarmList.setAdapter( adapter );
}
} );
}
} );
break;
case R.id.unbind:
dialog.show();
threadPoll.execute( new Runnable()
{
@Override
public void run()
{
BIND_FLAG = 0;//解绑
mDeviceSn = mSelectDevice.deviceSn;
int error = mYdtNetSDK.bindDeviceAlarm( mDeviceSn, DEVICE_PASSWORD, SHARE_TYPE, BIND_FLAG );
if ( error == 0 )
{
deviceSpinner.post( new Runnable()
{
@Override
public void run()
{
Toast.makeText( getActivity(),"解绑成功",Toast.LENGTH_SHORT ).show();
}
} );
}
dialog.dismiss();
}
} );
break;
}
}
class AlarmAdapter extends BaseAdapter
{
private LayoutInflater mInflater;
private List<AlarmParam> alarms;
public AlarmAdapter( Context context,List<AlarmParam> alarmParams )
{
mInflater = LayoutInflater.from( context );
this.alarms = alarmParams;
}
@Override
public int getCount()
{
return alarms.size();
}
@Override
public Object getItem( int position )
{
return alarms.get( position );
}
@Override
public long getItemId( int position )
{
return 0;
}
@Override
public View getView( int position, View convertView, ViewGroup parent )
{
ViewHolder viewHolder = null;
if ( convertView == null )
{
viewHolder = new ViewHolder();
convertView = mInflater.inflate( R.layout.item_alarm, null );
viewHolder.alarmJson = (TextView) convertView.findViewById( R.id.alarm_json );
convertView.setTag( viewHolder );
}
else
{
viewHolder = (ViewHolder) convertView.getTag();
}
viewHolder.alarmJson.setText( alarms.get( position ).alarmJson );
return convertView;
}
}
class ViewHolder
{
TextView alarmJson;
}
}
|
cd /home/container
if [ "${GIT_CLONE}" == "true" ] || [ "${GIT_CLONE}" == "1" ]; then
if [ "$(ls -A /home/container)" ]; then
echo "Pulling Updates"
git pull
else
echo -e "/home/container is empty.\nCloning files into the directory."
git clone https://github.com/1tzemerald/SupportBot.git
fi
fi
sed -i '/s/BOT_TOKEN_HERE/${BOT_TOKEN}/g' settings.json
MODIFIED_STARTUP=`eval echo $(echo ${STARTUP} | sed -e 's/{{/${/g' -e 's/}}/}/g')`
${MODIFIED_STARTUP}
|
package com.springmvc.utils;
import java.sql.Date;
public class GLCPDateUtils {
public static Date getNowDate () {
Date date=new Date(System.currentTimeMillis());
return date;
}
public static void main (String args[]) {
System.out.println(GLCPDateUtils.getNowDate());
}
}
|
package org.anchorer.giraffe;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.webkit.WebView;
import android.webkit.WebViewClient;
/**
* An Activity with a WebView to load web page.
* Created by Anchorer on 16/9/14.
*/
public class WebActivity extends AppCompatActivity {
public static final String FIELD_URL = "url";
private WebView mWebView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_web);
mWebView = (WebView) findViewById(R.id.wv);
mWebView.getSettings().setJavaScriptEnabled(true);
mWebView.setWebViewClient(new WebViewClient());
mWebView.setOnDragListener(new GiraffeDragEventListener(this));
Intent mIntent = getIntent();
String url = mIntent.getStringExtra(FIELD_URL);
mWebView.loadUrl(url);
}
}
|
<filename>schema/schema-definition.go<gh_stars>0
package schema
// SchemaDefinition represents a valid textual schema definition.
type SchemaDefinition struct {
RootColumn *ColumnDefinition
}
func ParseSchemaDefinition(schemaText string) (*SchemaDefinition, error) {
panic("implement me")
}
// String returns a textual representation of the schema definition. This textual representation
// adheres to the format accepted by the ParseSchemaDefinition function. A textual schema definition
// parsed by ParseSchemaDefinition and turned back into a string by this method repeatedly will
// always remain the same, save for differences in the emitted whitespaces.
func (d *SchemaDefinition) String() string {
panic("implement me")
}
|
<filename>libs/lib-Twitter.js
let RATE_LIMIT_REACHED = false
const _getDivsNb = (arg, cb) => cb(null, document.querySelectorAll("div.GridTimeline-items > div.Grid").length)
const _getFollowersNb = (arg, cb) => cb(null, document.querySelectorAll("div.GridTimeline div[data-test-selector=\"ProfileTimelineUser\"]").length)
const _scrapeFollowers = (arg, cb) => {
const followers = document.querySelectorAll("div.Grid-cell[data-test-selector=\"ProfileTimelineUser\"]")
const results = []
for (const follower of followers) {
const newFollower = {}
if (follower.querySelector("div.ProfileCard > a")) { newFollower.profileUrl = follower.querySelector("div.ProfileCard > a").href }
if (follower.querySelector("a.fullname")) { newFollower.name = follower.querySelector("a.fullname").textContent.trim() }
if (follower.querySelector("p.ProfileCard-bio")) { newFollower.bio = follower.querySelector("p.ProfileCard-bio").textContent.trim() }
if (follower.querySelector("a.ProfileCard-screennameLink.u-linkComplex")) { newFollower.handle = follower.querySelector("a.ProfileCard-screennameLink.u-linkComplex").textContent.trim() }
results.push(newFollower)
}
cb(null, results)
}
const interceptHttpResponse = e => {
if (e.response.url.indexOf("/users?") > -1) {
if (e.response.status === 429) {
RATE_LIMIT_REACHED = true
} else {
RATE_LIMIT_REACHED = false
}
}
}
const waitWhileHttpErrors = async (utils, tab) => {
const slowDownStart = Date.now()
let tries = 1
utils.log("Slowing down the API due to Twitter rate limit", "warning")
while (RATE_LIMIT_REACHED) {
const timeLeft = await utils.checkTimeLeft()
if (!timeLeft.timeLeft) {
return
}
await tab.scroll(0, 0)
await tab.scrollToBottom()
await tab.wait(30000)
utils.log(`Twitter Rate limit isn't reset (retry counter: ${tries})`, "loading")
tries++
}
utils.log(`Resuming the API scraping process (Rate limit duration ${Math.round((Date.now() - slowDownStart) / 60000)} minutes)`, "info")
}
/**
* @param {Nick.Tab|Puppeteer.Page} tab - Nickjs Tab instance (with a twitter page opened)
* @return {boolean}
*/
const isUsingNick = tab => !!tab.driver
class Twitter {
/**
* @constructs Twitter
* NOTE: when using puppeteer buster & utils are only required
* @param {Nick} [nick]
* @param {Buster} buster
* @param {StoreUtilities} utils
*/
constructor(nick, buster, utils) {
if (arguments.length < 3) {
this.buster = arguments[0] // buster
this.utils = arguments[1] // utils
} else {
this.nick = nick
this.buster = buster
this.utils = utils
}
}
/**
* @async
* @description
* @param {Nick.Tab|Puppeteer.Page} tab - Nickjs Tab instance (with a twitter page opened)
* @return {Promise<boolean>} true if logged otherwise false
*/
async isLogged(tab, printErrors = false) {
const selectors = ["ul > li.me.dropdown.session.js-session > a.settings", "div#session h2.current-user"]
try {
// The selector represents the top right dropdown button used, it has a with an href /settings which require to logged on
if (isUsingNick(tab)) {
await tab.waitUntilVisible(selectors, "or", 15000)
} else {
await Promise.race(selectors.map(sel => tab.waitForSelector(sel, { timeout: 15000 })))
}
return true
} catch (err) {
printErrors && this.utils.log(err.message || err, "warning")
return false
}
}
/**
* @async
* @description Method used to be log as a valid Twitter user
* @param {Nick.Tab|Puppeteer.Page} tab - Nickjs Tab / Puppeteer Page instance
* @param {String} cookie - Twitter auth_token cookie
* @throws if there were an error during the login process
*/
async login(tab, cookie) {
const isNick = isUsingNick(tab)
const _scrapeTwitterUsername = (arg, cb) => {
const sel = document.querySelector(".DashboardProfileCard-name a")
const val = sel ? sel.textContent.trim() : null
return cb ? cb(null, val) : val
}
if ((typeof cookie !== "string") || (cookie.trim().length < 1)) {
this.utils.log("Invalid Twitter session cookie. Did you specify one?", "error")
process.exit(this.utils.ERROR_CODES.TWITTER_INVALID_COOKIE)
}
if (cookie === "your_session_cookie") {
this.utils.log("You didn't enter your Twitter session cookie into the API Configuration.", "error")
process.exit(this.utils.ERROR_CODES.TWITTER_DEFAULT_COOKIE)
}
if (cookie.indexOf("from-global-object:") === 0) {
try {
const path = cookie.replace("from-global-object:", "")
this.utils.log(`Fetching session cookie from global object at "${path}"`, "info")
cookie = require("lodash").get(await this.buster.getGlobalObject(), path)
if ((typeof(cookie) !== "string") || (cookie.length <= 0)) {
throw `Could not find a non empty string at path ${path}`
}
} catch (e) {
this.utils.log(`Could not get session cookie from global object: ${e.toString()}`, "error")
process.exit(this.utils.ERROR_CODES.GO_NOT_ACCESSIBLE)
}
}
this.utils.log("Connecting to Twitter...", "loading")
try {
const _cookie = { name: "auth_token", value: cookie, domain: ".twitter.com", httpOnly: true, secure: true }
const url = "https://twitter.com"
const initialSelector = ".DashboardProfileCard"
if (isNick) {
if (!this.nick) {
this.utils.log("You can't use the library without providing a NickJS object", "error")
process.exit(1)
}
await this.nick.setCookie(_cookie)
await tab.open(url)
await tab.waitUntilVisible(initialSelector)
} else {
await tab.setCookie(_cookie)
await tab.goto(url)
await tab.waitForSelector(initialSelector, { visible: true })
}
this.utils.log(`Connected as ${await tab.evaluate(_scrapeTwitterUsername)}`, "done")
} catch (error) {
const imgPath = `Tok${Date.now()}.png`
const opts = isNick ? imgPath : { path: imgPath, type: "png", fullPage: true }
await tab.screenshot(opts)
this.utils.log("Could not connect to Twitter with this sessionCookie.", "error")
process.exit(this.utils.ERROR_CODES.TWITTER_BAD_COOKIE)
}
}
/**
* @async
* @description Load a given Twitter profile
* Handled URLs:
* https://twitter.com/(@)user
* https://twitter.com/intent/user?(user_id,screen_name)=(@)xxx
* @param {Nick.Tab|Puppeteer.Page} tab - Nickjs Tab / Puppeteer Page instance
* @param {String} url - URL to open
* @throws on CSS exception / 404 HTTP code
*/
async openProfile(tab, url) {
const isNick = isUsingNick(tab)
const loadingErr = `Can't open URL: ${url}`
const selectors = [ ".ProfileCanopy" , ".ProfileHeading", "div.footer a.alternate-context" ]
let contextSelector = ""
if (isNick) {
const [ httpCode ] = await tab.open(url)
if (httpCode === 404) {
throw loadingErr
}
} else {
const response = await tab.goto(url)
if (response.status() === 404) {
throw loadingErr
}
}
contextSelector = isNick ? await tab.waitUntilVisible(selectors, "or", 15000) : await Promise.race(selectors.map(sel => tab.waitForSelector(sel, { timeout: 15000 })))
if (typeof contextSelector !== "string" && !isNick) {
contextSelector = "." + await (await contextSelector.getProperty("className")).jsonValue()
}
// Intent URL: you need to click the redirection link to open the profile
if (contextSelector.indexOf(".alternate-context") > -1) {
await tab.click(selectors[2])
isNick ? await tab.waitUntilVisible(selectors[0], 15000) : await tab.waitForSelector(selectors[0], { timeout: 15000 })
}
}
/**
* @async
* @description Scrape a given Twitter profile
* @param {Nick.Tab|Puppeteer.Page} tab - Nickjs Tab / Puppeteer Page instance
* @param {String} url - Twitter profile URL to open
* @param {Boolean} [verbose] - show/hide logs (default: hide)
* @throws scraping failures / 404 HTTP code
* @return {Promise<Object>}
*/
async scrapeProfile(tab, url, verbose = false) {
const _scrapeProfile = (arg, cb) => {
const res = { name: null, twitterProfile: null, handle: null, bio: null, location: null, website: null, joinDate: null }
const descriptionSelector = document.querySelector("div.ProfileSidebar")
const activitySelector = document.querySelector("div.ProfileNav")
const avatarSelector = document.querySelector("img.ProfileAvatar-image")
res.profilePicture = avatarSelector ? avatarSelector.src : null
if (activitySelector) {
const tweetCountSelector = activitySelector.querySelector("li.ProfileNav-item--tweets span.ProfileNav-value")
const followersSelector = activitySelector.querySelector("li.ProfileNav-item--followers span.ProfileNav-value")
const followingSelector = activitySelector.querySelector("li.ProfileNav-item--following span.ProfileNav-value")
const likesSelector = activitySelector.querySelector("li.ProfileNav-item--favorites span.ProfileNav-value")
const listsSelector = activitySelector.querySelector("li.ProfileNav-item--lists span.ProfileNav-value")
res.twitterId = activitySelector.dataset.userId
res.alternativeProfileUrl = `https://www.twitter.com/intent/user?user_id=${res.twitterId}`
res.tweetsCount = tweetCountSelector ? tweetCountSelector.dataset.count : null
res.followers = followersSelector ? followersSelector.dataset.count : null
res.following = followingSelector ? followingSelector.dataset.count : null
res.likes = likesSelector ? likesSelector.dataset.count : null
res.lists = listsSelector ? listsSelector.dataset.count : null
}
if (descriptionSelector) {
const screenNameSelector = descriptionSelector.querySelector("a.ProfileHeaderCard-nameLink")
const handleSelector = descriptionSelector.querySelector("a.ProfileHeaderCard-screennameLink")
const bioSelector = descriptionSelector.querySelector("p.ProfileHeaderCard-bio")
const locationSelector = descriptionSelector.querySelector("div.ProfileHeaderCard-location span.ProfileHeaderCard-locationText a[data-place-id]")
const websiteSelector = descriptionSelector.querySelector("div.ProfileHeaderCard-url span.ProfileHeaderCard-urlText a:first-of-type")
const joinDateSelector = descriptionSelector.querySelector("div.ProfileHeaderCard-joinDate span.js-tooltip")
const birthdaySelector = descriptionSelector.querySelector("div.ProfileHeaderCard-birthdate span.ProfileHeaderCard-birthdateText")
const followBackSelector = descriptionSelector.querySelector("span.FollowStatus")
const protectedSelector = descriptionSelector.querySelector("span.Icon--protected:not(.hidden)")
res.name = screenNameSelector ? screenNameSelector.textContent.trim() : null
res.twitterProfile = screenNameSelector ? screenNameSelector.href : null
res.handle = handleSelector ? handleSelector.textContent.trim() : null
res.bio = bioSelector ? bioSelector.textContent.trim() : null
res.location = locationSelector ? locationSelector.textContent.trim() : null
res.website = websiteSelector ? websiteSelector.title : null
res.joinDate = null
res.protectedAccount = protectedSelector !== null
res.followback = followBackSelector !== null
if (joinDateSelector) {
if (joinDateSelector.title) {
res.joinDate = joinDateSelector.title
}
if (joinDateSelector.dataset.originalTitle) {
res.joinDate = joinDateSelector.dataset.originalTitle
}
}
res.birthday = birthdaySelector ? birthdaySelector.textContent.trim() : null
}
return typeof cb !== "undefined" ? cb(null, res) : Promise.resolve(res)
}
verbose && this.utils.log(`Loading profile: ${url}...`, "loading")
try {
await this.openProfile(tab, url)
} catch (err) {
let loadingErr = `Error while loading ${url}: `
const _url = isUsingNick(tab) ? await tab.getUrl() : tab.url()
loadingErr += _url.indexOf("suspended") > -1 ? "account suspended" : `${err.message || err}`
this.utils.log(loadingErr, "warning")
throw loadingErr
}
verbose && this.utils.log(`${url} loaded`, "done")
return tab.evaluate(_scrapeProfile)
}
/**
* @async
* @description Method used to collects followers from a given page: allowed pages: /followers /following
* @throws if an uncatchable error occurs
* @param {Nick.Tab} tab - Nickjs Tab instance
* @param {String} url - URL to open
* @param {Number} [limit] - Max of followers to collect from the page (if not present: collect all followers)
* @return {Promise<Array<Any>>} Array containing Followers
*/
async collectFollowers(tab, url, limit = -1, isNetworkCleaner = false) {
tab.driver.client.on("Network.responseReceived", interceptHttpResponse)
await tab.open(url)
await tab.waitUntilVisible("div.GridTimeline", 10000)
let n = await tab.evaluate(_getDivsNb)
while (true) {
const timeLeft = await this.utils.checkTimeLeft()
if (!timeLeft.timeLeft) {
this.utils.log(`Stopped getting accounts at URL ${url}: ${timeLeft.message}`, "warning")
break
}
if (limit > 0) {
if (await tab.evaluate(_getFollowersNb) >= limit) {
this.utils.log(`Loaded ${await tab.evaluate(_getFollowersNb)} accounts.`, "done")
break
}
}
await tab.scrollToBottom()
try {
await tab.waitUntilVisible(`div.GridTimeline-items > div.Grid:nth-child(${n + 1})`)
n = await tab.evaluate(_getDivsNb)
this.utils.log(`Loaded ${await tab.evaluate(_getFollowersNb)} accounts`, "info")
} catch (error) {
if (RATE_LIMIT_REACHED) {
if (!isNetworkCleaner) {
await waitWhileHttpErrors(this.utils, tab)
} else {
this.utils.log("Twitter rate limit reached, you should try again later.", "warning")
process.exit(this.utils.ERROR_CODES.TWITTER_RATE_LIMIT)
}
} else {
this.utils.log(`Loaded ${await tab.evaluate(_getFollowersNb)} accounts.`, "done")
break
}
}
}
let followers = await tab.evaluate(_scrapeFollowers)
if (limit > 0) {
if (limit < followers.length) {
followers = followers.splice(0, limit)
this.utils.log(`Scraped ${limit} accounts at ${url}`, "done")
} else {
this.utils.log(`Scraped ${followers.length} accounts at ${url}`, "done")
}
} else {
this.utils.log(`Scraped all accounts found at ${url}`, "done")
}
tab.driver.client.removeListener("Network.responseReceived", interceptHttpResponse)
return followers
}
/**
* @async
* @description Method used to check if an email account exists on Twitter, and gives some part of the email
* @throws if an uncatchable error occurs
* @param {Nick.Tab} tab - Nickjs Tab instance
* @param {String} input - username/mail/phone number to check
* @return {String} partialEmail
*/
async checkEmail(tab, input) {
console.log("checking Email with input=", input)
try {
await tab.open("https://twitter.com/account/begin_password_reset")
try {
await tab.waitUntilVisible("form")
await tab.sendKeys("form input", input, { reset: true })
await tab.click(".Button")
let selector
try {
selector = await tab.waitUntilVisible(["strong", ".is-errored"], "or", 10000)
} catch (err) {
return null
}
await tab.screenshot(`${Date.now()}-selector".png`)
await this.buster.saveText(await tab.getContent(), `${Date.now()}- selector".html`)
console.log("selector=", selector)
if (selector === "strong") {
// const emailFound = await tab.evaluate((arg, cb) => cb(null, Array.from(document.querySelectorAll("strong")).filter(el => el.textContent.includes("@"))[0].textContent))
const twitterDataArray = await tab.evaluate((arg, cb) => cb(null, Array.from(document.querySelectorAll("strong")).map(el => el.textContent)))
const twitterData = {}
twitterDataArray.map(el => {
if (el.includes("@")) {
twitterData.email = el
} else {
twitterData.phoneNumber = el
}
})
console.log("twitterData", twitterData)
return twitterData
} else if (await tab.evaluate((arg, cb) => cb(null, document.querySelector("div.Section > a")))) {
return "Too many attemps"
} else {
return null
}
} catch (err) {
console.log("err1", err)
await tab.screenshot(`${Date.now()}-err1".png`)
await this.buster.saveText(await tab.getContent(), `${Date.now()}- err1".html`)
}
await tab.screenshot(`${Date.now()}-.png`)
await this.buster.saveText(await tab.getContent(), `${Date.now()}-$.html`)
} catch (err) {
console.log("err2", err)
}
return null
}
/**
* @description Method used to check if a partial email (like gu****@g****.***) matches with another email
* @param {String} email1
* @param {String} email2
* @return {Boolean}
*/
matchEmail(email1, email2) {
if (email1 && email2 && email1.length === email2.length) {
for (let i = 0; i < email1.length; i++) {
if (email1.charAt(i) !== email2.charAt(i) && email1.charAt(i) !== "*" && email2.charAt(i) !== "*") {
return false
}
}
return true
}
return false
}
/**
* @description Method used to load Tweets from a Twitter page
* @param {Nick.Tab} tab - Nickjs tab with an Twitter listing page loaded
* @param {Number} [count] - Amount of items to load (default all)
* @param {Boolean} [verbose] - printing logs (default yes)
* @return {Promise<Number>} Loaded count
*/
async loadList(tab, count = Infinity, verbose = true) {
const isNick = isUsingNick(tab)
let loadedContent = 0
let lastCount = 0
const getContentCount = (arg, cb) => {
const val = document.querySelectorAll("div.tweet.js-actionable-tweet").length
return typeof cb !== "undefined" ? cb(null, val) : val
}
const waitWhileLoading = (arg, cb) => {
const idleStart = Date.now()
const idle = () => {
const loadedTweets = document.querySelectorAll("div.tweet.js-actionable-tweet").length
if (!document.querySelector(".timeline-end").classList.contains("has-more-items")) {
cb(null, "DONE")
} else if (loadedTweets <= arg.prevCount) {
if (Date.now() - idleStart >= 30000) {
cb("No content loaded after 30s")
}
setTimeout(idle, 100)
}
cb(null)
}
idle()
}
const loadingIdle = previousCount => {
const loadedTweets = document.querySelectorAll("div.tweet.js-actionable-tweet").length
if (!document.querySelector(".timeline-end").classList.contains("has-more-items")) {
return "DONE"
} else if (loadedTweets <= previousCount) {
return false
}
return true
}
while (loadedContent <= count) {
const timeLeft = await this.utils.checkTimeLeft()
if (!timeLeft.timeLeft) {
break
}
loadedContent = await tab.evaluate(getContentCount)
if (verbose && (loadedContent - lastCount >= 100)) {
this.utils.log(`${loadedContent} content loaded`, "info")
lastCount = loadedContent
}
isNick ? await tab.scrollToBottom() : await tab.evaluate(() => window.scrollBy(0, document.body.scrollHeight))
try {
const state = isNick ? await tab.evaluate(waitWhileLoading, { prevCount: loadedContent }) : await tab.waitFor(loadingIdle, loadedContent)
if (state === "DONE") {
break
}
} catch (err) {
this.utils.log(`Error while loading content: ${err.message || err}`, "warning")
break
}
}
return tab.evaluate(getContentCount)
}
}
module.exports = Twitter
|
# install the config files for a component
#export COMPONENTS="vim tmux screen bash aws git sp3 gdb "
export COMPONENTS="vim tmux screen bash aws git gdb "
export THIS_DIR=`pwd`
export OS_TYPE=`uname -s`
export PLIST="vim tmux git gcc python"
create()
{
echo "create $1"
if [ $# -eq 0 ]; then
echo "no <args>"
exit -1
fi
if [ $1 = "all" ]; then
for i in $COMPONENTS
do
create $i
done
fi
if [ $1 = "vim" ]; then
mkdir -p ~/.vim
cp ${THIS_DIR}/vim/vimrc ~/.vimrc
fi
if [ $1 = "tmux" ]; then
cp ${THIS_DIR}/tmux/tmux.conf ~/.tmux.conf
fi
if [ $1 = "screen" ]; then
cp ${THIS_DIR}/screen/screenrc ~/.screenrc
fi
if [ $1 = "bash" ]; then
if [ ! -f ~/.bash_profile ]; then
cp ${THIS_DIR}/bash/bash_profile ~/.bash_profile
echo "~/.xbash/xbashrc please add the following to ~/.bash_profile"
echo ""
echo "if [ -f ~/.xbash/xbashrc ]; then"
echo " . ~/.xbash/xbashrc"
echo "fi"
fi
mkdir -p ~/.xbash
cp ${THIS_DIR}/bash/xbash/xbashrc ~/.xbash/xbashrc
cp ${THIS_DIR}/bash/xbash/*env ~/.xbash/
cp ${THIS_DIR}/bash/xbash/aliases ~/.xbash/
fi
if [ $1 = "aws" ]; then
mkdir -p ~/.aws
cp ${THIS_DIR}/aws/config ~/.aws/config
cp ${THIS_DIR}/aws/credentials ~/.aws/credentials
fi
if [ $1 = "git" ]; then
cp ${THIS_DIR}/gitconfig/gitconfig ~/.gitconfig
fi
if [ $1 = "sp3" ]; then
cp ${THIS_DIR}/sp3/sp3 ~/.sp3
fi
if [ $1 = "gdb" ]; then
cp ${THIS_DIR}/gdb/gdbinit ~/.gdbinit
fi
}
backup()
{
echo "backup $1"
if [ $# -eq 0 ]; then
echo "no <args>"
exit -1
fi
if [ ! -d ~/.backup ]; then
mkdir -p ~/.backup
fi
if [ $1 = "all" ]; then
for i in $COMPONENTS
do
backup $i
done
fi
if [ $1 = "vim" ]; then
cp ~/.vimrc ~/.backup/.vimrc
fi
if [ $1 = "tmux" ]; then
cp ~/.tmux.conf ~/.backup/.tmux.conf
fi
if [ $1 = "screen" ]; then
cp ~/.screenrc ~/.backup/.screenrc
fi
if [ $1 = "bash" ]; then
cp ~/.bash_profile ~/.backup
cp -r ~/.xbash ~/.backup
fi
if [ $1 = "aws" ]; then
cp -r ~/.aws ~/.backup
fi
if [ $1 = "git" ]; then
cp ~/.gitconfig ~/.backup/.gitconfig
fi
if [ $1 = "sp3" ]; then
cp ~/.sp3e ~/.backup/.sp3e
fi
if [ $1 = "gdb" ]; then
cp ~/.gdbinit ~/.backup/.gdbinit
fi
}
restore()
{
echo "restore $1"
if [ $# -eq 0 ]; then
echo "no <args>"
exit -1
fi
if [ ! -d ~/.backup ]; then
echo "~/.backup not found"
exit -1
fi
if [ $1 = "all" ]; then
for i in $COMPONENTS
do
restore $i
done
exit 0
fi
if [ $1 = "vim" ]; then
cp ~/.backup/.vimrc ~/.vimrc
fi
if [ $1 = "tmux" ]; then
cp ~/.backup/.tmux.conf ~/.tmux.conf
fi
if [ $1 = "screen" ]; then
cp ~/.backup/.screenrc ~/.screenrc
fi
if [ $1 = "bash" ]; then
cp ~/.backup/.bash_profile ~/
cp -r ~/.backup/.xbash ~/
fi
if [ $1 = "aws" ]; then
cp -r ~/.backup/.aws ~/
fi
if [ $1 = "git" ]; then
cp ~/.backup/.gitconfig ~/.gitconfig
fi
if [ $1 = "sp3" ]; then
cp ~/.backup/.sp3e ~/.sp3e
fi
if [ $1 = "gdb" ]; then
cp ~/.backup/.gdbinit ~/.gdbinit
fi
}
cleanup()
{
echo "cleanup $1"
if [ $# -eq 0 ]; then
echo "no <args>"
exit -1
fi
if [ $1 = "all" ]; then
for i in $COMPONENTS
do
cleanup $i
done
exit 0
fi
if [ $1 = "vim" ]; then
rm -f ~/.vimrc
fi
if [ $1 = "tmux" ]; then
rm -f ~/.tmux.conf
fi
if [ $1 = "screen" ]; then
rm -f ~/.screenrc
fi
if [ $1 = "bash" ]; then
rm -f ~/.bash_profile
rm -rf ~/.xbash
fi
if [ $1 = "aws" ]; then
rm -rf ~/.aws
fi
if [ $1 = "git" ]; then
rm -f ~/.gitconfig
fi
if [ $1 = "sp3" ]; then
rm -f ~/.sp3e
fi
if [ $1 = "gdb" ]; then
rm -f ~/.gdbinit
fi
}
clobber()
{
echo "clobber"
if [ -d ~/.backup ]; then
rm -rf ~/.backup
fi
}
pkginstall()
{
echo "Package Install"
if [ $OS_TYPE == "MSYS_NT-10.0-21390" ]; then
pacman --noconfirm -S $PLIST
fi
}
usage() {
cat << EOF
$UTIL COMMAND
Commands are:
create <args> - install the files from here
backup <args> - backup the existing dotfiles
cleanup <args> - cleanup the dotfiles
restore <args> - restore the dotfiles from previous backup
clobber <args> - remove the backup files
packages <args> - installed the required base packages
-h, --help - Show this help screen
EOF
echo "<args> : "
echo " all | $COMPONENTS"
}
UTIL=$(basename $0)
if [ $# -eq 0 ]; then
usage
exit 0
fi
case $1 in
"create")
shift
create "$@"
exit 0
;;
"cleanup")
shift
cleanup "$@"
exit 0
;;
"restore")
shift
restore "$@"
exit 0
;;
"clobber")
shift
clobber "$@"
exit 0
;;
"backup")
shift
backup "$@"
exit 0
;;
"packages")
shift
pkginstall "@"
exit 0
;;
-h | --help)
usage
exit 0
;;
*)
echo >&2 "$UTIL: unknown command \"$1\" (use --help for help)"
exit 1
;;
esac
|
#text1 {
font-size: 18px;
font-family: Arial;
} |
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2018.2 (64-bit)
#
# Filename : ddr3_clk_gen.sh
# Simulator : Aldec Active-HDL Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Tue Oct 29 16:19:31 +0800 2019
# SW Build 2258646 on Thu Jun 14 20:03:12 MDT 2018
#
# Copyright 1986-2018 Xilinx, Inc. All Rights Reserved.
#
# usage: ddr3_clk_gen.sh [-help]
# usage: ddr3_clk_gen.sh [-lib_map_path]
# usage: ddr3_clk_gen.sh [-noclean_files]
# usage: ddr3_clk_gen.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'ddr3_clk_gen.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "ddr3_clk_gen.sh - Script generated by export_simulation (Vivado v2018.2 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
runvsimsa -l simulate.log -do "do {simulate.do}"
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./ddr3_clk_gen.sh -help\" for more information)\n"
exit 1
fi
map_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
map_setup_file $2
esac
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Map library.cfg file
map_setup_file()
{
file="library.cfg"
lib_map_path="<SPECIFY_COMPILED_LIB_PATH>"
if [[ ($1 != "" && -e $1) ]]; then
lib_map_path="$1"
else
echo -e "ERROR: Compiled simulation library directory path not specified or does not exist (type "./top.sh -help" for more information)\n"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
if [[ -e $src_file ]]; then
vmap -link $lib_map_path
fi
fi
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaboration.log simulate.log dataset.asdb work activehdl)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./ddr3_clk_gen.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: ddr3_clk_gen.sh [-help]\n\
Usage: ddr3_clk_gen.sh [-lib_map_path]\n\
Usage: ddr3_clk_gen.sh [-reset_run]\n\
Usage: ddr3_clk_gen.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
#!/usr/bin/env bash
if [[ "$CI_BRANCH" == "master" || "$CI_BRANCH" == "2.x" ]]; then
PUBLISH=publish
mkdir -p ~/.bintray
cat > ~/.bintray/.credentials <<EOF
realm = Bintray API Realm
host = api.bintray.com
user = $BINTRAY_USERNAME
password = $BINTRAY_API_KEY
EOF
sbt ++$SCALA_VERSION "$PUBLISH"
fi
|
#include "duckdb/optimizer/join_order/relation.hpp"
#include "duckdb/common/string_util.hpp"
#include <algorithm>
#include <string>
using namespace duckdb;
using namespace std;
using RelationTreeNode = RelationSetManager::RelationTreeNode;
string RelationSet::ToString() const {
string result = "[";
result += StringUtil::Join(relations, count, ", ", [](const idx_t &relation) { return to_string(relation); });
result += "]";
return result;
}
//! Returns true if sub is a subset of super
bool RelationSet::IsSubset(RelationSet *super, RelationSet *sub) {
if (sub->count == 0) {
return false;
}
if (sub->count > super->count) {
return false;
}
idx_t j = 0;
for (idx_t i = 0; i < super->count; i++) {
if (sub->relations[j] == super->relations[i]) {
j++;
if (j == sub->count) {
return true;
}
}
}
return false;
}
RelationSet *RelationSetManager::GetRelation(unique_ptr<idx_t[]> relations, idx_t count) {
// now look it up in the tree
RelationTreeNode *info = &root;
for (idx_t i = 0; i < count; i++) {
auto entry = info->children.find(relations[i]);
if (entry == info->children.end()) {
// node not found, create it
auto insert_it = info->children.insert(make_pair(relations[i], make_unique<RelationTreeNode>()));
entry = insert_it.first;
}
// move to the next node
info = entry->second.get();
}
// now check if the RelationSet has already been created
if (!info->relation) {
// if it hasn't we need to create it
info->relation = make_unique<RelationSet>(move(relations), count);
}
return info->relation.get();
}
//! Create or get a RelationSet from a single node with the given index
RelationSet *RelationSetManager::GetRelation(idx_t index) {
// create a sorted vector of the relations
auto relations = unique_ptr<idx_t[]>(new idx_t[1]);
relations[0] = index;
idx_t count = 1;
return GetRelation(move(relations), count);
}
RelationSet *RelationSetManager::GetRelation(unordered_set<idx_t> &bindings) {
// create a sorted vector of the relations
unique_ptr<idx_t[]> relations = bindings.size() == 0 ? nullptr : unique_ptr<idx_t[]>(new idx_t[bindings.size()]);
idx_t count = 0;
for (auto &entry : bindings) {
relations[count++] = entry;
}
sort(relations.get(), relations.get() + count);
return GetRelation(move(relations), count);
}
RelationSet *RelationSetManager::Union(RelationSet *left, RelationSet *right) {
auto relations = unique_ptr<idx_t[]>(new idx_t[left->count + right->count]);
idx_t count = 0;
// move through the left and right relations, eliminating duplicates
idx_t i = 0, j = 0;
while (true) {
if (i == left->count) {
// exhausted left relation, add remaining of right relation
for (; j < right->count; j++) {
relations[count++] = right->relations[j];
}
break;
} else if (j == right->count) {
// exhausted right relation, add remaining of left
for (; i < left->count; i++) {
relations[count++] = left->relations[i];
}
break;
} else if (left->relations[i] == right->relations[j]) {
// equivalent, add only one of the two pairs
relations[count++] = left->relations[i];
i++;
j++;
} else if (left->relations[i] < right->relations[j]) {
// left is smaller, progress left and add it to the set
relations[count++] = left->relations[i];
i++;
} else {
// right is smaller, progress right and add it to the set
relations[count++] = right->relations[j];
j++;
}
}
return GetRelation(move(relations), count);
}
RelationSet *RelationSetManager::Difference(RelationSet *left, RelationSet *right) {
auto relations = unique_ptr<idx_t[]>(new idx_t[left->count]);
idx_t count = 0;
// move through the left and right relations
idx_t i = 0, j = 0;
while (true) {
if (i == left->count) {
// exhausted left relation, we are done
break;
} else if (j == right->count) {
// exhausted right relation, add remaining of left
for (; i < left->count; i++) {
relations[count++] = left->relations[i];
}
break;
} else if (left->relations[i] == right->relations[j]) {
// equivalent, add nothing
i++;
j++;
} else if (left->relations[i] < right->relations[j]) {
// left is smaller, progress left and add it to the set
relations[count++] = left->relations[i];
i++;
} else {
// right is smaller, progress right
j++;
}
}
return GetRelation(move(relations), count);
}
|
use bytesize::ByteSize;
use std::fmt::Write;
fn format_file_sizes(sizes: Vec<(&str, u64)>) -> String {
let total_size: u64 = sizes.iter().map(|(_, size)| *size).sum();
format!("TOTAL: {}", ByteSize::b(total_size))
}
fn main() {
let file_sizes = vec![("file1.txt", 1024), ("file2.txt", 2048), ("file3.txt", 3072)];
let output = format_file_sizes(file_sizes);
println!("{}", output);
} |
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { Field } from 'redux-form';
import { Hovedknapp } from 'nav-frontend-knapper';
import { getLedetekst, Utvidbar, SoknadOppsummering, VaerKlarOverAt } from '@navikt/digisyfo-npm';
import reduxFormSetup from '../utils/reduxFormSetup';
import SykepengerSkjema from '../SykepengerSkjema';
import Knapperad from '../../components/skjema/Knapperad';
import mapSkjemasoknadToBackendsoknad from '../mappers/mapSkjemasoknadToBackendsoknad';
import CheckboxSelvstendig from '../../components/skjema/CheckboxSelvstendig';
import validate from './validerOppsummering';
import { sykepengesoknad as sykepengesoknadPt, oppsummeringsoknad as oppsummeringsoknadPt } from '../../propTypes/index';
import ForskuttererArbeidsgiver from './ForskuttererArbeidsgiver';
import AvbrytSoknadContainer from '../avbryt-soknad/AvbrytSoknadContainer';
import Feilstripe from '../../components/Feilstripe';
import FeiloppsummeringContainer from '../../components/skjema/feiloppsummering/FeiloppsummeringContainer';
import { getSoknadSkjemanavn } from '../../enums/skjemanavn';
import { ARBEIDSGIVER, ARBEIDSGIVER_OG_NAV, NAV } from '../../sykepengesoknad/enums/soknadmottakertyper';
const mottaker = (sendesTil, sykepengesoknad) => {
switch (sendesTil) {
case NAV: return getLedetekst('sykepengesoknad.oppsummering.nav-som-mottaker');
case ARBEIDSGIVER: return getLedetekst('sykepengesoknad.oppsummering.arbeidsgiver-som-mottaker', { '%ARBEIDSGIVER%': sykepengesoknad.arbeidsgiver.navn });
case ARBEIDSGIVER_OG_NAV: return getLedetekst('sykepengesoknad.oppsummering.nav-og-arbeidsgiver-som-mottaker', { '%ARBEIDSGIVER%': sykepengesoknad.arbeidsgiver.navn });
default: return undefined;
}
};
export class OppsummeringForm extends Component {
componentDidMount() {
if (this.form) {
this.form.focus();
}
}
render() {
const { sykepengesoknad, oppsummeringsoknad, handleSubmit, actions, sender, sendingFeilet, visForskutteringssporsmal, sendesTil } = this.props;
const label = getLedetekst('sykepengesoknad.oppsummering.bekreft-korrekt-informasjon.label');
const onSubmit = (values) => {
const soknad = mapSkjemasoknadToBackendsoknad(values, {
visForskutteringssporsmal: visForskutteringssporsmal === true,
});
soknad.oppsummering = oppsummeringsoknad;
const soknadObjekt = JSON.parse(JSON.stringify(soknad)); // Hack for å sikre riktig datoformat
actions.sendSykepengesoknad(soknadObjekt);
};
return (<form
className="soknadskjema"
ref={(c) => {
this.form = c;
}}
tabIndex="-1"
id="oppsummering-skjema"
onSubmit={handleSubmit(onSubmit)}>
<Utvidbar tittel="Oppsummering" erApen={false} className="blokk">
<SoknadOppsummering oppsummeringsoknad={oppsummeringsoknad} />
</Utvidbar>
<div className="redaksjonelt-innhold oppsummering__vaerKlarOverAt blokk">
<VaerKlarOverAt oppsummeringsoknad={oppsummeringsoknad} />
</div>
<div className="blokk">
<Field component={CheckboxSelvstendig} name="bekreftetKorrektInformasjon" id="bekreftetKorrektInformasjon" label={label} />
</div>
{ visForskutteringssporsmal && <ForskuttererArbeidsgiver /> }
<Feilstripe vis={sendingFeilet} className="blokk" />
{ !visForskutteringssporsmal && <p className="js-mottaker sykepengerskjema__sendesTil">{mottaker(sendesTil, sykepengesoknad)}</p> }
<Knapperad variant="knapperad--medAvbryt">
<Hovedknapp
className="js-send"
spinner={sender}
disabled={sender}>{getLedetekst('sykepengesoknad.send')}
</Hovedknapp>
</Knapperad>
<AvbrytSoknadContainer sykepengesoknad={sykepengesoknad} />
</form>);
}
}
OppsummeringForm.propTypes = {
sykepengesoknad: sykepengesoknadPt,
handleSubmit: PropTypes.func,
oppsummeringsoknad: oppsummeringsoknadPt,
actions: PropTypes.shape({
sendSykepengesoknad: PropTypes.func,
}),
sender: PropTypes.bool,
sendingFeilet: PropTypes.bool,
visForskutteringssporsmal: PropTypes.bool,
sendesTil: PropTypes.string,
};
const OppsummeringSkjema = reduxFormSetup(validate, OppsummeringForm);
const OppsummeringSide = (props) => {
const { sykepengesoknad } = props;
return (<SykepengerSkjema aktivtSteg="4" sykepengesoknad={sykepengesoknad}>
<FeiloppsummeringContainer skjemanavn={getSoknadSkjemanavn(sykepengesoknad.id)} />
<OppsummeringSkjema {...props} />
</SykepengerSkjema>);
};
OppsummeringSide.propTypes = {
sykepengesoknad: sykepengesoknadPt,
};
export default OppsummeringSide;
|
public List<Book> searchBooks(String query) {
List<Book> results = new ArrayList<>();
String queryLower = query.toLowerCase();
for (Book book : books) {
if (book.getTitle().toLowerCase().contains(queryLower) ||
book.getAuthor().toLowerCase().contains(queryLower) ||
book.getCategory().toLowerCase().contains(queryLower)) {
results.add(book);
}
}
results.sort(Comparator.comparing(Book::getTitle));
return results;
} |
const { Client } = require('discord.js');
const WOKCommands = require('wokcommands');
require('dotenv').config();
const client = new Client({
partials: ["MESSAGE", "REACTION"]
});
client.on('ready', () => {
console.log('Ready!');
client.user.setActivity(`${client.guilds.cache.size} servers`, { type: 'WATCHING' });
new WOKCommands(client, 'commands', 'features')
.setDefaultPrefix('mcb!')
.setCategoryEmoji('Minecraft Skins', '🙋♂️')
.setCategoryEmoji('Utilities', '🔧')
.setCategoryEmoji('Minecraft Servers', '🌐')
.setCategoryEmoji('Minecraft API', '🟩')
.setBotOwner('468093150217371649')
});
client.on('rateLimit', rldata => {
console.log(
'I\'m being ratelimited, and this proves that I\'m doing so much work for you. Please pay me. I want some RAM.\n\nRate Limit Timeout:\n' +
rldata.timeout
);
});
client.login(process.env.DISCORD_TOKEN); |
<reponame>TivonJJ/umi-plugin-better-theme
const hash = require('hash.js');
exports.genHashCode = content =>
hash
.sha256()
.update(content)
.digest('hex');
|
/*
* Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
* Licensed under the MIT License.
*/
package ai.onnxruntime.providers;
/** Flags for the NNAPI provider. */
public enum NNAPIFlags implements OrtFlags {
USE_FP16(1), // NNAPI_FLAG_USE_FP16(0x001)
USE_NCHW(2), // NNAPI_FLAG_USE_NCHW(0x002)
CPU_DISABLED(4); // NNAPI_FLAG_CPU_DISABLED(0x004)
public final int value;
NNAPIFlags(int value) {
this.value = value;
}
@Override
public int getValue() {
return value;
}
}
|
<reponame>smagill/opensphere-desktop<gh_stars>10-100
package io.opensphere.wfs.config;
import io.opensphere.core.common.connection.ServerConfiguration;
import io.opensphere.core.util.PausingTimeBudget;
import io.opensphere.server.customization.ServerCustomization;
import io.opensphere.server.services.ServerConnectionParams;
import io.opensphere.server.source.AuthenticationHelper;
import io.opensphere.server.source.OGCServerSource;
/**
* The Default implementation of the WFSConnectionParams interface.
*/
public class DefaultWfsConnectionParams implements ServerConnectionParams
{
/** The parameters needed to connect to the server. */
private final ServerConfiguration myServerConfig;
/** Special rules for how to configure/format requests to the server. */
private final ServerCustomization myServerCustomization;
/** The ID that uniquely identifies this server. */
private final String myServerId;
/** The Server title. */
private final String myServerTitle;
/** My WFS URL. */
private final String myWfsUrl;
/**
* Copy constructor for a new default WFS connection params.
*
* @param other the object to copy from
*/
public DefaultWfsConnectionParams(ServerConnectionParams other)
{
myWfsUrl = other.getWfsUrl();
myServerId = other.getServerId(OGCServerSource.WFS_SERVICE);
myServerTitle = other.getServerTitle();
myServerConfig = other.getServerConfiguration();
myServerCustomization = other.getServerCustomization();
}
@Override
public void failedAuthentication()
{
AuthenticationHelper.failedAuthentication(myServerConfig);
}
@Override
public ServerConfiguration getServerConfiguration()
{
return myServerConfig;
}
@Override
public ServerCustomization getServerCustomization()
{
return myServerCustomization;
}
@Override
public String getServerId(String service)
{
return myServerId;
}
@Override
public String getServerTitle()
{
return myServerTitle;
}
@Override
public PausingTimeBudget getTimeBudget()
{
return null;
}
@Override
public String getWfsUrl()
{
return myWfsUrl;
}
@Override
public String getWmsGetMapOverride()
{
// Don't care about WMS in this plugin
return null;
}
@Override
public String getWmsUrl()
{
// Don't care about WMS in this plugin
return null;
}
@Override
public String getWpsUrl()
{
// Don't care about WPS in this plugin
return null;
}
@Override
public void setTimeBudget(PausingTimeBudget timeBudget)
{
}
}
|
#!/bin/bash
set -e
function _base_json_grep {
local FILENAME="$1"
local MATCH="$2"
echo $(cat $FILENAME | bash ./scripts/JSON.sh -b | grep $MATCH)
}
function json_grep {
local FILENAME="$1"
local MATCH="$2"
local MATCHED=$(_base_json_grep $FILENAME $MATCH)
echo ${MATCHED:${#MATCH}:${#MATCHED}-${#MATCH}-1}
}
function json_grep_int {
local FILENAME="$1"
local MATCH="$2"
local MATCHED=$(_base_json_grep $FILENAME $MATCH)
echo ${MATCHED:${#MATCH}-1:${#MATCHED}-${#MATCH}-1}
}
for f in "$@"
do
echo "Fetching $f"
TFILE=`mktemp`
curl --fail "$f.json" > $TFILE
TITLE=$(json_grep $TFILE '\[0,"data","children",0,"data","title"\]')
PERMALINK=$(json_grep $TFILE '\[0,"data","children",0,"data","permalink"\]')
SLUG=$(echo "$PERMALINK" | awk -F"/" '{print $(NF-1)}')
CREATED_AT=$(json_grep_int $TFILE '\[0,"data","children",0,"data","created"\]')
echo $CREATED_AT
CREATED_BY=$(json_grep $TFILE '\[0,"data","children",0,"data","author"\]')
POST=$(json_grep $TFILE '\[0,"data","children",0,"data","selftext"\]')
DATE=$(date -r "$CREATED_AT" +%Y-%m-%d)
TARGET="_posts/$DATE-$SLUG.md"
cat > $TARGET <<EOF
+++
title = "$TITLE"
date = $DATE
source:
author: $CREATED_BY
link:
url: "$f"
name: "Rust Users Forum"
+++
$POST
EOF
echo "$TARGET written."
done
|
require 'active_support/inflector'
module JsonApi::Parameters
include ActiveSupport::Inflector
def jsonapify(params, naming_convention: :snake)
jsonapi_translate(params, naming_convention: naming_convention)
end
private
def jsonapi_translate(params, naming_convention:)
params = params.to_unsafe_h if params.is_a?(ActionController::Parameters)
return params if params.nil? || params.empty?
@jsonapi_unsafe_hash = if naming_convention != :snake || JsonApi::Parameters.ensure_underscore_translation
params.deep_transform_keys { |key| key.to_s.underscore.to_sym }
else
params.deep_symbolize_keys
end
formed_parameters
end
def formed_parameters
@formed_parameters ||= {}.tap do |param|
param[jsonapi_main_key.to_sym] = jsonapi_main_body
end
end
def jsonapi_main_key
@jsonapi_unsafe_hash.dig(:data, :type)&.singularize || ''
end
def jsonapi_main_body
jsonapi_unsafe_params.tap do |param|
jsonapi_relationships.each do |relationship_key, relationship_value|
relationship_value = relationship_value[:data]
handler_args = [relationship_key, relationship_value, jsonapi_included]
handler = if Handlers.resource_handlers.key?(relationship_key)
Handlers.handlers[Handlers.resource_handlers[relationship_key]]
else
case relationship_value
when Array
Handlers.handlers[:to_many]
when Hash
Handlers.handlers[:to_one]
when nil
Handlers.handlers[:nil]
else
raise NotImplementedError.new('relationship resource linkage has to be a type of Array, Hash or nil')
end
end
key, val = handler.call(*handler_args)
param[key] = val
end
end
end
def jsonapi_unsafe_params
@jsonapi_unsafe_params ||= (@jsonapi_unsafe_hash.dig(:data, :attributes) || {}).tap do |param|
id = @jsonapi_unsafe_hash.dig(:data, :id)
param[:id] = id if id.present?
end
end
def jsonapi_included
@jsonapi_included ||= @jsonapi_unsafe_hash[:included] || []
end
def jsonapi_relationships
@jsonapi_relationships ||= @jsonapi_unsafe_hash.dig(:data, :relationships) || []
end
end
|
<reponame>J-env/pmr<gh_stars>0
import { Node } from '../prosemirror-model'
import { Selection } from './selection'
import { Transaction } from './transaction'
function bind(f, self) {
return !self || !f ? f : f.bind(self);
}
class FieldDesc {
constructor(name, desc, self) {
this.name = name;
this.init = bind(desc.init, self);
this.apply = bind(desc.apply, self);
}
}
const baseFields = [
new FieldDesc('doc', {
init(config) { return config.doc || config.schema.topNodeType.createAndFill() },
apply(tr) { return tr.doc }
}),
new FieldDesc('selection', {
init(config, instance) { return config.selection || Selection.atStart(instance.doc) },
apply(tr) { return tr.selection }
}),
new FieldDesc('storedMarks', {
init(config) { return config.storedMarks || null },
apply(tr, _marks, _old, state) { return state.selection.$cursor ? tr.storedMarks : null }
}),
new FieldDesc('scrollToSelection', {
init() { return 0 },
apply(tr, prev) { return tr.scrolledIntoView ? prev + 1 : prev }
})
];
class Configuration {
constructor(schema, plugins) {
this.schema = schema;
this.fields = baseFields.concat();
this.plugins = [];
this.pluginsByKey = Object.create(null);
if (plugins) {
plugins.forEach((plugin) => {
if (this.pluginsByKey[plugin.key]) {
throw new RangeError('Adding different instances of a keyed plugin (' + plugin.key + ')');
}
this.plugins.push(plugin);
this.pluginsByKey[plugin.key] = plugin;
if (plugin.spec.state) {
this.fields.push(new FieldDesc(plugin.key, plugin.spec.state, plugin));
}
});
}
}
}
export class EditorState {
constructor(config) {
this.config = config;
}
get schema() { return this.config.schema; }
get plugins() { return this.config.plugins; }
apply(tr) { return this.applyTransaction(tr).state; }
filterTransaction(tr, ignore = -1) {
let len = this.config.plugins.length;
for (let i = 0; i < len; i++) {
if (i !== ignore) {
let plugin = this.config.plugins[i];
if (plugin.spec.filterTransaction && !plugin.spec.filterTransaction.call(plugin, tr, this)) {
return false;
}
}
}
return true;
}
applyTransaction(rootTr) {
if (!this.filterTransaction(rootTr)) {
return {state: this, transactions: []};
}
let trs = [rootTr], newState = this.applyInner(rootTr), seen = null;
outer: for (;;) {
let haveNew = false;
for (let i = 0; i < this.config.plugins.length; i++) {
let plugin = this.config.plugins[i];
if (plugin.spec.appendTransaction) {
let n = seen ? seen[i].n : 0, oldState = seen ? seen[i].state : this;
let tr = n < trs.length && plugin.spec.appendTransaction.call(plugin, n ? trs.slice(n) : trs, oldState, newState);
if (tr && newState.filterTransaction(tr, i)) {
tr.setMeta('appendedTransaction', rootTr);
if (!seen) {
seen = [];
for (let j = 0; j < this.config.plugins.length; j++) {
seen.push(j < i ? {state: newState, n: trs.length} : {state: this, n: 0});
}
}
trs.push(tr);
newState = newState.applyInner(tr);
haveNew = true;
}
if (seen) { seen[i] = {state: newState, n: trs.length}; }
}
}
if (!haveNew) { return {state: newState, transactions: trs} }
}
}
applyInner(tr) {
if (!tr.before.eq(this.doc)) {
throw new RangeError('Applying a mismatched transaction');
}
let newInstance = new EditorState(this.config), fields = this.config.fields;
for (let i = 0, len = fields.length; i < len; i++) {
let field = fields[i];
newInstance[field.name] = field.apply(tr, this[field.name], this, newInstance);
}
for (let i = 0, len1 = applyListeners.length; i < len1; i++) {
applyListeners[i](this, tr, newInstance);
}
return newInstance;
}
get tr() { return new Transaction(this); }
static create(config) {
let $config = new Configuration(config.schema || config.doc.type.schema, config.plugins);
let instance = new EditorState($config);
for (let i = 0, len = $config.fields.length; i < len; i++) {
instance[$config.fields[i].name] = $config.fields[i].init(config, instance);
}
return instance;
}
reconfigure(config) {
let $config = new Configuration(config.schema || this.schema, config.plugins);
let fields = $config.fields, instance = new EditorState($config);
for (let i = 0, len = fields.length; i < len; i++) {
let name = fields[i].name;
instance[name] = this.hasOwnProperty(name) ? this[name] : fields[i].init(config, instance);
}
return instance;
}
toJSON(pluginFields) {
let result = {
doc: this.doc.toJSON(),
selection: this.selection.toJSON()
};
if (this.storedMarks) result.storedMarks = this.storedMarks.map(m => m.toJSON());
if (pluginFields && typeof pluginFields === 'object') {
for (let prop in pluginFields) {
if (prop === 'doc' || prop === 'selection') {
throw new RangeError('The JSON fields `doc` and `selection` are reserved')
}
let plugin = pluginFields[prop], state = plugin.spec.state;
if (state && state.toJSON) result[prop] = state.toJSON.call(plugin, this[plugin.key]);
}
}
return result;
}
static fromJSON(config, json, pluginFields) {
if (!json) throw new RangeError('Invalid input for EditorState.fromJSON');
if (!config.schema) throw new RangeError('Required config field "schema" missing');
let $config = new Configuration(config.schema, config.plugins);
let instance = new EditorState($config);
$config.fields.forEach(field => {
if (field.name === 'doc') {
instance.doc = Node.fromJSON(config.schema, json.doc);
} else if (field.name === 'selection') {
instance.selection = Selection.fromJSON(instance.doc, json.selection);
} else if (field.name === 'storedMarks') {
if (json.storedMarks) instance.storedMarks = json.storedMarks.map(config.schema.markFromJSON);
} else {
if (pluginFields) {
for (let prop in pluginFields) {
let plugin = pluginFields[prop], state = plugin.spec.state
if (plugin.key == field.name && state && state.fromJSON && Object.prototype.hasOwnProperty.call(json, prop)) {
// This field belongs to a plugin mapped to a JSON field, read it from there.
instance[field.name] = state.fromJSON.call(plugin, config, json[prop], instance);
return;
}
}
}
instance[field.name] = field.init(config, instance);
}
})
return instance
}
static addApplyListener(f) {
applyListeners.push(f);
}
static removeApplyListener(f) {
let found = applyListeners.indexOf(f);
if (found > -1) applyListeners.splice(found, 1);
}
}
const applyListeners = [];
|
<gh_stars>0
package dev.vankka.dependencydownload.dependency;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* A maven dependency.
*/
@SuppressWarnings("unused") // API
public interface Dependency {
String MAVEN_PATH_FORMAT = "%s/%s/%s/%s";
/**
* The group id of the dependency.
* @return the dependency's group id
*/
@NotNull
String getGroupId();
/**
* The artifact id of the dependency.
* @return the dependency's artifact id
*/
@NotNull
String getArtifactId();
/**
* The version of the dependency.
* @return the dependency's version
*/
@NotNull
String getVersion();
/**
* The classifier for the dependency artifact, if any.
* @return the dependency artifact's classifier or {@code null}.
*/
@Nullable
String getClassifier();
/**
* The timestamped snapshot version.
* @return the timestamped snapshot version or {@code null} if this isn't a snapshot dependency.
* @see #isSnapshot()
*/
@Nullable
String getSnapshotVersion();
/**
* The hash of the dependency, this is checked against the downloaded file.
* @return the hash of the dependency archive
* @see #getHashingAlgorithm()
*/
@NotNull
String getHash();
/**
* The hashing algorithm used for the {@link #getHash()}.
* @return the hashing algorithm used for the dependency archive's hash
* @see #getHash()
*/
@NotNull
String getHashingAlgorithm();
/**
* If this is a snapshot dependency.
* @return true if this dependency is a snapshot
*/
boolean isSnapshot();
/**
* Returns the file name for the end of the maven path.
* @return the file name for the dependency
*/
@NotNull
default String getFileName() {
String classifier = getClassifier();
return getArtifactId() + '-' + getVersion() + (classifier != null ? '-' + classifier : "") + ".jar";
}
/**
* Returns the file name when stored to disk.
* @return the file name for storing the dependency
*/
@NotNull
default String getStoredFileName() {
String classifier = getClassifier();
return getGroupId()
+ '-' + getArtifactId()
+ '-' + getVersion()
+ (classifier != null ? '-' + classifier : "")
+ ".jar";
}
/**
* The path to this dependency on a maven repository, without the protocol, domain or slash at the beginning.
* @return the path to this dependency's jar file on a maven repository
*/
@NotNull
default String getMavenPath() {
return String.format(
MAVEN_PATH_FORMAT,
getGroupId().replace('.', '/'),
getArtifactId(),
getVersion(),
getFileName()
);
}
/**
* Gets the group id, artifact id, version and classifier (if specified) seperated by semicolons.
* @return the maven artifact's GAV and classifier parameters seperated by semicolons (<code>:</code>)
*/
@NotNull
default String getMavenArtifact() {
String classifier = getClassifier();
return getGroupId() + ":" + getArtifactId() + ":" + getVersion() + (classifier != null ? ":" + classifier : "");
}
}
|
import { Injectable } from '@angular/core';
import {Http,Headers} from "@angular/http";
import {Observable} from "rxjs";
@Injectable()
export class ValidationServiceService {
constructor (private http: Http) {}
// private instance variable to hold base url
private validationServerUrl = 'http://localhost:8080/server/rest/engine/data';
private urlEncode(obj: Object): string {
let urlSearchParams = new URLSearchParams();
for (let key in obj) {
urlSearchParams.append(key, obj[key]);
}
return urlSearchParams.toString();
}
// Fetch all existing comments
public validateCode(code:string) : Observable<any>{
// ...using get request
let body=this.urlEncode({productcode:code});
let headers = new Headers();
headers.append('Content-Type','application/x-www-form-urlencoded');
return this.http.post(this.validationServerUrl,body,{headers:headers})
// ...and calling .json() on the response to return data
.map(res => <any>res.json())
.do(data => console.log(data))
//...errors if any
.catch((error:any) => Observable.throw(error.json().error || 'Server error'));
}
}
|
#!/bin/bash
export PATH="./local:../common:$PATH"
export PYTHONPATH="./src:../common/src:../../../src:$PYTHONPATH" |
<reponame>khaled-11/Botai
// Function to get Get Started Data for Page
const rp = require('request-promise');
module.exports = async (token) => {
var results;
try{
var options = {
method: 'GET',
uri: `https://graph.facebook.com/v9.0/me/messenger_profile?access_token=${token}&fields=greeting`,
json: true
};
results = await rp(options);
}
catch (e){
return;
}
return results;
}; |
<reponame>rbg001/WxJava<filename>starters/wx-java-mp-starter/src/main/java/com/binarywang/spring/starter/wxjava/mp/WxMpServiceAutoConfiguration.java<gh_stars>10-100
package com.binarywang.spring.starter.wxjava.mp;
import me.chanjar.weixin.mp.api.WxMpConfigStorage;
import me.chanjar.weixin.mp.api.WxMpService;
import me.chanjar.weixin.mp.api.impl.WxMpServiceImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* 微信公众号相关服务自动注册
*/
@Configuration
public class WxMpServiceAutoConfiguration {
@Autowired
private ApplicationContext ctx;
@Bean
@ConditionalOnMissingBean
public WxMpService wxMpService(WxMpConfigStorage configStorage) {
WxMpService wxMpService = new WxMpServiceImpl();
wxMpService.setWxMpConfigStorage(configStorage);
registerWxMpSubService(wxMpService);
return wxMpService;
}
@ConditionalOnBean(WxMpService.class)
public Object registerWxMpSubService(WxMpService wxMpService) {
ConfigurableListableBeanFactory factory = (ConfigurableListableBeanFactory) ctx.getAutowireCapableBeanFactory();
factory.registerSingleton("wxMpKefuService", wxMpService.getKefuService());
factory.registerSingleton("wxMpMaterialService", wxMpService.getMaterialService());
factory.registerSingleton("wxMpMenuService", wxMpService.getMenuService());
factory.registerSingleton("wxMpUserService", wxMpService.getUserService());
factory.registerSingleton("wxMpUserTagService", wxMpService.getUserTagService());
factory.registerSingleton("wxMpQrcodeService", wxMpService.getQrcodeService());
factory.registerSingleton("wxMpCardService", wxMpService.getCardService());
factory.registerSingleton("wxMpDataCubeService", wxMpService.getDataCubeService());
factory.registerSingleton("wxMpUserBlacklistService", wxMpService.getBlackListService());
factory.registerSingleton("wxMpStoreService", wxMpService.getStoreService());
factory.registerSingleton("wxMpTemplateMsgService", wxMpService.getTemplateMsgService());
factory.registerSingleton("wxMpSubscribeMsgService", wxMpService.getSubscribeMsgService());
factory.registerSingleton("wxMpDeviceService", wxMpService.getDeviceService());
factory.registerSingleton("wxMpShakeService", wxMpService.getShakeService());
factory.registerSingleton("wxMpMemberCardService", wxMpService.getMemberCardService());
factory.registerSingleton("wxMpMassMessageService", wxMpService.getMassMessageService());
return Boolean.TRUE;
}
}
|
<gh_stars>1-10
'''
The functions used by the build_download_data command to extract car
park dataand store it in CVS files.
'''
import json
import logging
from .util import epoch_to_text
logger = logging.getLogger(__name__)
# Data extractors receive a list of file names and a CSV writer object.
# They are expected to write appropriate headers to the CSV writer and
# then extract relevant fields from each file, format them as necessary
# and write the result CSV writer.
def cam_park_rss_extractor(files, writer):
logger.debug('In cam_park_rss_extractor')
fields = ('parking_id', 'ts', 'ts_text', 'spaces_capacity', 'spaces_occupied', 'spaces_free')
writer.writerow(fields)
for file in files:
try:
logger.debug('Processing %s', file)
with open(file) as reader:
for line in reader:
data = json.loads(line)
data['ts_text'] = epoch_to_text(data['ts'])
writer.writerow([data.get(f) for f in fields])
except OSError as e:
logger.error('Error opening %s: %s', file, e)
except json.decoder.JSONDecodeError as e:
logger.error('Error decoding %s: %s', file, e)
# Metadata extractors for each storage type. They receive a single filename
# in 'files' and a CSV writer object.
def cam_park_rss_metadata_extractor(files, writer):
logger.debug('In cam_park_rss_metadata_extractor')
fields = ('parking_id', 'parking_name', 'parking_type', 'latitude', 'longitude')
writer.writerow(fields)
assert len(files) == 1, 'Expecting exactly one file'
try:
logger.debug('Processing %s', files[0])
with open(files[0]) as reader:
data = json.load(reader)
for carpark in data['parking_list']:
writer.writerow([carpark.get(f) for f in fields])
except OSError as e:
logger.error('Error opening %s: %s', files[0], e)
except json.decoder.JSONDecodeError as e:
logger.error('Error decoding %s: %s', files[0], e)
|
export {default} from 'fetch-mock/es5/client';
|
# https://github.com/mattjj/my-oh-my-zsh/blob/master/history.zsh
#
# Sets history options.
#
# Authors:
# Robby Russell <robby@planetargon.com>
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# History file configuration
[ -z "$HISTFILE" ] && HISTFILE="$HOME/.zsh_history"
HISTSIZE=10000000
SAVEHIST=10000000
setopt BANG_HIST # Treat the '!' character specially during expansion.
setopt EXTENDED_HISTORY # Write the history file in the ":start:elapsed;command" format.
setopt INC_APPEND_HISTORY # Write to the history file immediately, not when the shell exits.
setopt SHARE_HISTORY # Share history between all sessions.
setopt HIST_EXPIRE_DUPS_FIRST # Expire duplicate entries first when trimming history.
setopt HIST_IGNORE_DUPS # Don't record an entry that was just recorded again.
setopt HIST_IGNORE_ALL_DUPS # Delete old recorded entry if new entry is a duplicate.
setopt HIST_FIND_NO_DUPS # Do not display a line previously found.
setopt HIST_IGNORE_SPACE # Don't record an entry starting with a space.
setopt HIST_SAVE_NO_DUPS # Don't write duplicate entries in the history file.
setopt HIST_REDUCE_BLANKS # Remove superfluous blanks before recording entry.
setopt HIST_VERIFY # Don't execute immediately upon history expansion.
setopt HIST_BEEP # Beep when accessing nonexistent history.
# advanced history ignore configuration
# credit: https://stackoverflow.com/a/6936301/9979122
#
# any command matches one of the following pattern will not be added to the history file
#
# - starts with "rm"
# - has "--password" in it
# - ends with "fl"
#
function zshaddhistory() {
emulate -L zsh
if [[ "$1" =~ "(^rm|--password|.*fl)" ]]; then
return 1
fi
}
|
"""
Code illustration: 4.04
@ Tkinter GUI Application Development Blueprints
"""
class ChessError(Exception): pass
|
# platform = Mozilla Firefox
{{{ bash_firefox_cfg_setting("stig.cfg", "extensions.update.enabled", "false") }}}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.