text stringlengths 1 1.05M |
|---|
import { Styles } from '@0xproject/react-shared';
import * as _ from 'lodash';
import * as React from 'react';
const styles: Styles = {
badge: {
width: 50,
fontSize: 11,
height: 10,
borderRadius: 5,
lineHeight: 0.9,
fontFamily: 'Roboto Mono',
marginLeft: 3,
marginRight: 3,
},
};
export interface BadgeProps {
title: string;
backgroundColor: string;
}
export interface BadgeState {
isHovering: boolean;
}
export class Badge extends React.Component<BadgeProps, BadgeState> {
constructor(props: BadgeProps) {
super(props);
this.state = {
isHovering: false,
};
}
public render() {
const badgeStyle = {
...styles.badge,
backgroundColor: this.props.backgroundColor,
opacity: this.state.isHovering ? 0.7 : 1,
};
return (
<div
className="p1 center"
style={badgeStyle}
onMouseOver={this._setHoverState.bind(this, true)}
onMouseOut={this._setHoverState.bind(this, false)}
>
{this.props.title}
</div>
);
}
private _setHoverState(isHovering: boolean) {
this.setState({
isHovering,
});
}
}
|
/*
---
description: Markdown language
license: MIT-style
authors:
- <NAME>
- <NAME>
requires:
- Core/1.4.5
provides: [EnlighterJS.Language.markdown]
...
*/
EJS.Language.markdown = new Class ({
Extends: EJS.Language.generic,
setupLanguage: function(code){
this.patterns = {
'header1': { pattern: /^(.+)\n=+\n/gim, alias: 'st1' },
'header2': { pattern: /^(.+)\n-+\n/gim, alias: 'st2' },
'header3': { pattern: /[#]{1,6}.*/gim, alias: 'st0' },
'ul': { pattern: /^\*\s*.*/gim, alias: 'kw1' },
'ol': { pattern: /^\d+\..*/gim, alias: 'kw1' },
'italics': { pattern: /\*.*?\*/g, alias: 'kw3' },
'bold': { pattern: /\*\*.*?\*\*/g, alias: 'kw3' },
'url': { pattern: /\[[^\]]*\]\([^\)]*\)/g, alias: 'kw4' }
};
}
});
|
#!/usr/bin/env bash
source ${HOME}/arch-linux/configs/setup.conf
echo -ne"
-------------------------------------------------------------------------
Adding User
-------------------------------------------------------------------------
"
if [ $(whoami) = "root" ]; then
useradd -m -G wheel -s /bin/bash $USERNAME
echo "$USERNAME created, home directory created, added to wheel group, default shell set to /bin/bash"
# use chpasswd to enter $USERNAME:$password
echo "$USERNAME:$PASSWORD" | chpasswd
echo "$USERNAME password set"
cp -R $HOME/arch-linux /home/$USERNAME/
chown -R $USERNAME: /home/$USERNAME/arch-linux
echo "arch-linux copied to home directory"
else
echo "You are already a user proceed with installs"
fi
echo -ne "
-------------------------------------------------------------------------
CONFIGURING UFW FIREWALL
-------------------------------------------------------------------------
"
ufw disable
ufw default deny incoming
ufw default allow outgoing
ufw allow 853/tcp
ufw allow 853/udp
ufw allow 443/tcp
ufw allow 443/udp
ufw enable
echo -ne "
-------------------------------------------------------------------------
Enabling Essential Services
-------------------------------------------------------------------------
"
echo -ne "
enabling acpid.service
"
systemctl enable acpid
echo -ne "
enabling apparmor.service
"
systemctl enable apparmor
echo -ne "
enabling auditd.service
"
systemctl enable auditd
echo -ne "
enabling bluetooth.service
"
systemctl enable bluetooth
echo -ne "
enabling cpupower.service
"
systemctl enable cpupower
echo -ne "
enabling fail2ban.service
"
systemctl enable fail2ban
echo -ne "
enabling fstrim.timer
"
systemctl enable fstrim.timer
echo -ne "
enabling NetworkManager.service
"
systemctl enable NetworkManager
echo -ne "
enabling reflector.timer
"
systemctl enable reflector.timer
echo -ne "
enabling systemd-remount-fs.service
"
systemctl enable systemd-remount-fs
echo -ne "
enabling systemd-resolved.service
"
systemctl enable systemd-resolved
echo -ne "
enabling systemd-timesyncd.service
"
systemctl enable systemd-timesyncd
echo -ne "
enabling thermald.service
"
systemctl enable thermald
echo -ne "
enabling tlp.service
"
systemctl enable tlp
echo -ne "
enabling ufw.service
"
systemctl enable ufw
timedatectl --no-ask-password set-timezone ${TIME_ZONE}
timedatectl --no-ask-password set-ntp 1
echo -ne "
-------------------------------------------------------------------------
Creating Snapper Config
-------------------------------------------------------------------------
"
mkdir -p /etc/snapper/configs/
cp -rfv $HOME/arch-linux/configs/root /etc/snapper/configs/
mkdir -p /etc/conf.d/
cp -rfv $HOME/arch-linux/configs/snapper /etc/conf.d/
echo -ne "
-------------------------------------------------------------------------
Configuring Sudo Rights
-------------------------------------------------------------------------
"
sed -i 's/^# %wheel ALL=(ALL) ALL/%wheel ALL=(ALL) ALL/' /etc/sudoers
echo -ne "
-------------------------------------------------------------------------
Removing Arch Linux Setup Files
-------------------------------------------------------------------------
"
rm -r /root/arch-linux
|
#!/bin/bash
'$external_ip=""; \
while [ -z $external_ip ]; \
do echo "Waiting for end point..."; \
external_ip=$(kubectl get svc staging-voting-app-vote --template="{{range .status.loadBalancer.ingress}}{{.ip}}{{end}}"); \
[ -z "$external_ip" ] && sleep 10; done; echo "End point ready-" && echo $external_ip; cf_export endpoint=$external_ip'
|
package DAO;
import models.SightingEndangeredSpecies;
import org.sql2o.Connection;
import org.sql2o.Sql2o;
import org.sql2o.Sql2oException;
import java.util.List;
public class Sql2oSightingEndangeredSpeciesDAO implements SightingEndangeredSpeciesDAO{
private final Sql2o sql2o;
//generate constructor
public Sql2oSightingEndangeredSpeciesDAO(Sql2o sql2o) {
this.sql2o = sql2o;
}
//generate Override code
@Override
public List<SightingEndangeredSpecies> getAllEndangered() {
String sql = "SELECT * FROM sightings WHERE type = 'Endangered'";
try(Connection con = sql2o.open()){
return con.createQuery(sql)
.executeAndFetch(SightingEndangeredSpecies.class);
}catch (Sql2oException ex){
System.out.println(ex);
}
return null;
}
@Override
public void addEndangeredSpecies(SightingEndangeredSpecies sightingEndangeredSpecies) {
String sql = "INSERT INTO sightings(speciesName, speciesAge, speciesHealth, rangerId, type, locationId) values (:speciesName, :speciesAge, :speciesHealth, :rangerId, :type, :locationId)";
try(Connection conn = sql2o.open()){
int id = (int) conn.createQuery(sql, true)
.bind(sightingEndangeredSpecies)
.executeUpdate()
.getKey();
sightingEndangeredSpecies.setId(id);
}catch (Sql2oException ex){
System.out.println(ex);
}
}
}
|
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.curriculum.ui;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.form.flexible.FormItem;
import org.olat.core.gui.components.form.flexible.FormItemContainer;
import org.olat.core.gui.components.form.flexible.elements.FlexiTableElement;
import org.olat.core.gui.components.form.flexible.elements.FormLink;
import org.olat.core.gui.components.form.flexible.impl.FormBasicController;
import org.olat.core.gui.components.form.flexible.impl.FormEvent;
import org.olat.core.gui.components.form.flexible.impl.elements.table.DefaultFlexiColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableDataModelFactory;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SelectionEvent;
import org.olat.core.gui.components.form.flexible.impl.elements.table.StaticFlexiCellRenderer;
import org.olat.core.gui.components.form.flexible.impl.elements.table.StickyActionColumnModel;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.controller.BasicController;
import org.olat.core.gui.control.generic.closablewrapper.CloseableCalloutWindowController;
import org.olat.core.gui.control.generic.closablewrapper.CloseableModalController;
import org.olat.core.gui.control.generic.dtabs.Activateable2;
import org.olat.core.gui.control.generic.modal.DialogBoxController;
import org.olat.core.gui.control.generic.modal.DialogBoxUIFactory;
import org.olat.core.id.context.ContextEntry;
import org.olat.core.id.context.StateEntry;
import org.olat.core.util.StringHelper;
import org.olat.modules.curriculum.CurriculumElementType;
import org.olat.modules.curriculum.CurriculumElementTypeManagedFlag;
import org.olat.modules.curriculum.CurriculumElementTypeRef;
import org.olat.modules.curriculum.CurriculumService;
import org.olat.modules.curriculum.ui.CurriculumElementTypesTableModel.TypesCols;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
* Initial date: 11 mai 2018<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class CurriculumElementTypesEditController extends FormBasicController implements Activateable2 {
private FormLink addRootTypeButton;
private FlexiTableElement tableEl;
private CurriculumElementTypesTableModel model;
private ToolsController toolsCtrl;
private CloseableModalController cmc;
private DialogBoxController confirmDeleteDialog;
private EditCurriculumElementTypeController rootElementTypeCtrl;
private EditCurriculumElementTypeController editElementTypeCtrl;
protected CloseableCalloutWindowController toolsCalloutCtrl;
private int counter = 1;
@Autowired
private CurriculumService curriculumService;
public CurriculumElementTypesEditController(UserRequest ureq, WindowControl wControl) {
super(ureq, wControl, "admin_types");
initForm(ureq);
loadModel();
}
@Override
protected void initForm(FormItemContainer formLayout, Controller listener, UserRequest ureq) {
addRootTypeButton = uifactory.addFormLink("add.root.type", formLayout, Link.BUTTON);
addRootTypeButton.setIconLeftCSS("o_icon o_icon-fw o_icon_add");
FlexiTableColumnModel columnsModel = FlexiTableDataModelFactory.createFlexiTableColumnModel();
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(false, TypesCols.key));
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(TypesCols.identifier));
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(TypesCols.displayName));
columnsModel.addFlexiColumnModel(new DefaultFlexiColumnModel(false, TypesCols.externalId));
DefaultFlexiColumnModel editColumn = new DefaultFlexiColumnModel("table.header.edit", -1, "edit",
new StaticFlexiCellRenderer("", "edit", "o_icon o_icon-lg o_icon_edit", translate("edit")));
editColumn.setExportable(false);
columnsModel.addFlexiColumnModel(editColumn);
StickyActionColumnModel toolsColumn = new StickyActionColumnModel(TypesCols.tools);
toolsColumn.setExportable(false);
columnsModel.addFlexiColumnModel(toolsColumn);
model = new CurriculumElementTypesTableModel(columnsModel);
tableEl = uifactory.addTableElement(getWindowControl(), "types", model, 25, false, getTranslator(), formLayout);
tableEl.setEmptyTableMessageKey("table.type.empty");
tableEl.setAndLoadPersistedPreferences(ureq, "cur-el-types");
}
private void loadModel() {
List<CurriculumElementType> types = curriculumService.getCurriculumElementTypes();
List<CurriculumElementTypeRow> rows = types
.stream().map(t -> forgeRow(t))
.collect(Collectors.toList());
model.setObjects(rows);
tableEl.reset(false, true, true);
}
private CurriculumElementTypeRow forgeRow(CurriculumElementType type) {
CurriculumElementTypeRow row = new CurriculumElementTypeRow(type);
if(isToolsEnable(type)) {
FormLink toolsLink = uifactory.addFormLink("tools_" + (++counter), "tools", "", null, null, Link.NONTRANSLATED);
toolsLink.setIconLeftCSS("o_icon o_icon_actions o_icon-fws o_icon-lg");
toolsLink.setUserObject(row);
row.setToolsLink(toolsLink);
}
return row;
}
private boolean isToolsEnable(CurriculumElementType type) {
return !CurriculumElementTypeManagedFlag.isManaged(type.getManagedFlags(), CurriculumElementTypeManagedFlag.copy)
|| !CurriculumElementTypeManagedFlag.isManaged(type.getManagedFlags(), CurriculumElementTypeManagedFlag.delete);
}
@Override
public void activate(UserRequest ureq, List<ContextEntry> entries, StateEntry state) {
if(entries == null || entries.isEmpty()) return;
}
@Override
protected void event(UserRequest ureq, Controller source, Event event) {
if(rootElementTypeCtrl == source || editElementTypeCtrl == source) {
if(event == Event.DONE_EVENT) {
loadModel();
}
cmc.deactivate();
cleanUp();
} else if(confirmDeleteDialog == source) {
if (DialogBoxUIFactory.isOkEvent(event) || DialogBoxUIFactory.isYesEvent(event)) {
CurriculumElementTypeRow row = (CurriculumElementTypeRow)confirmDeleteDialog.getUserObject();
doDelete(row);
}
cleanUp();
} else if(cmc == source) {
cleanUp();
}
super.event(ureq, source, event);
}
private void cleanUp() {
removeAsListenerAndDispose(rootElementTypeCtrl);
removeAsListenerAndDispose(cmc);
rootElementTypeCtrl = null;
cmc = null;
}
@Override
protected void formInnerEvent(UserRequest ureq, FormItem source, FormEvent event) {
if(addRootTypeButton == source) {
doAddRootType(ureq);
} else if (source instanceof FormLink) {
FormLink link = (FormLink)source;
String cmd = link.getCmd();
if("tools".equals(cmd)) {
CurriculumElementTypeRow row = (CurriculumElementTypeRow)link.getUserObject();
doOpenTools(ureq, row, link);
}
} else if(tableEl == source) {
if(event instanceof SelectionEvent) {
SelectionEvent se = (SelectionEvent)event;
String cmd = se.getCommand();
if("edit".equals(cmd)) {
CurriculumElementTypeRow row = model.getObject(se.getIndex());
doEditCurriculElementType(ureq, row.getType());
}
}
}
super.formInnerEvent(ureq, source, event);
}
@Override
protected void formOK(UserRequest ureq) {
//
}
private void doOpenTools(UserRequest ureq, CurriculumElementTypeRow row, FormLink link) {
removeAsListenerAndDispose(toolsCtrl);
removeAsListenerAndDispose(toolsCalloutCtrl);
CurriculumElementType type = curriculumService.getCurriculumElementType(row);
if(type == null) {
tableEl.reloadData();
showWarning("warning.curriculum.element.type.deleted");
} else {
toolsCtrl = new ToolsController(ureq, getWindowControl(), row, type);
listenTo(toolsCtrl);
toolsCalloutCtrl = new CloseableCalloutWindowController(ureq, getWindowControl(),
toolsCtrl.getInitialComponent(), link.getFormDispatchId(), "", true, "");
listenTo(toolsCalloutCtrl);
toolsCalloutCtrl.activate();
}
}
private void doAddRootType(UserRequest ureq) {
rootElementTypeCtrl = new EditCurriculumElementTypeController(ureq, getWindowControl(), null);
listenTo(rootElementTypeCtrl);
cmc = new CloseableModalController(getWindowControl(), "close", rootElementTypeCtrl.getInitialComponent(), true, translate("add.root.type"));
listenTo(cmc);
cmc.activate();
}
private void doEditCurriculElementType(UserRequest ureq, CurriculumElementTypeRef type) {
CurriculumElementType reloadedType = curriculumService.getCurriculumElementType(type);
editElementTypeCtrl = new EditCurriculumElementTypeController(ureq, getWindowControl(), reloadedType);
listenTo(editElementTypeCtrl);
cmc = new CloseableModalController(getWindowControl(), "close", editElementTypeCtrl.getInitialComponent(), true, translate("edit"));
listenTo(cmc);
cmc.activate();
}
private void doCopy(CurriculumElementTypeRow row) {
curriculumService.cloneCurriculumElementType(row);
loadModel();
showInfo("info.copy.element.type.sucessfull", row.getDisplayName());
}
private void doConfirmDelete(UserRequest ureq, CurriculumElementTypeRow row) {
String[] args = new String[] { StringHelper.escapeHtml(row.getDisplayName()) };
String title = translate("confirmation.delete.type.title", args);
String text = translate("confirmation.delete.type", args);
confirmDeleteDialog = activateOkCancelDialog(ureq, title, text, confirmDeleteDialog);
confirmDeleteDialog.setUserObject(row);
}
private void doDelete(CurriculumElementTypeRow row) {
if(curriculumService.deleteCurriculumElementType(row)) {
showInfo("confirm.delete.element.type.sucessfull", row.getDisplayName());
loadModel();
tableEl.reset(true, true, true);
} else {
showWarning("warning.delete.element.type", row.getDisplayName());
}
}
private class ToolsController extends BasicController {
private final CurriculumElementTypeRow row;
private final VelocityContainer mainVC;
public ToolsController(UserRequest ureq, WindowControl wControl, CurriculumElementTypeRow row, CurriculumElementType type) {
super(ureq, wControl);
setTranslator(CurriculumElementTypesEditController.this.getTranslator());
this.row = row;
mainVC = createVelocityContainer("tools");
List<String> links = new ArrayList<>();
if(!CurriculumElementTypeManagedFlag.isManaged(type.getManagedFlags(), CurriculumElementTypeManagedFlag.copy)) {
addLink("details.copy", "copy", "o_icon o_icon-fw o_icon_copy", links);
}
if(!CurriculumElementTypeManagedFlag.isManaged(type.getManagedFlags(), CurriculumElementTypeManagedFlag.delete)) {
addLink("details.delete", "delete", "o_icon o_icon-fw o_icon_delete_item", links);
}
mainVC.contextPut("links", links);
putInitialPanel(mainVC);
}
private void addLink(String name, String cmd, String iconCSS, List<String> links) {
Link link = LinkFactory.createLink(name, cmd, getTranslator(), mainVC, this, Link.LINK);
if(iconCSS != null) {
link.setIconLeftCSS(iconCSS);
}
mainVC.put(name, link);
links.add(name);
}
@Override
protected void event(UserRequest ureq, Component source, Event event) {
fireEvent(ureq, Event.DONE_EVENT);
if(source instanceof Link) {
Link link = (Link)source;
String cmd = link.getCommand();
if("copy".equals(cmd)) {
close();
doCopy(row);
} else if("delete".equals(cmd)) {
close();
doConfirmDelete(ureq, row);
}
}
}
private void close() {
toolsCalloutCtrl.deactivate();
cleanUp();
}
}
}
|
package com.ibm.socialcrm.notesintegration.ui.actions;
/****************************************************************
* IBM OpenSource
*
* (C) Copyright IBM Corp. 2012
*
* Licensed under the Apache License v2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
***************************************************************/
public class CreateOpportunityAction {
public CreateOpportunityAction() {
}
}
|
#!/bin/bash
CMD=${1:-/bin/bash}
echo "-- launch started --"
cd /workspace
# Build and run?
echo "-- launch ended --"
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.coprocessor;
import static org.apache.phoenix.hbase.index.IndexRegionObserver.VERIFIED_BYTES;
import static org.apache.phoenix.hbase.index.IndexRegionObserver.removeEmptyColumn;
import static org.apache.phoenix.hbase.index.write.AbstractParallelWriterIndexCommitter.INDEX_WRITER_KEEP_ALIVE_TIME_CONF_KEY;
import static org.apache.phoenix.mapreduce.index.IndexTool.AFTER_REBUILD_EXPIRED_INDEX_ROW_COUNT_BYTES;
import static org.apache.phoenix.mapreduce.index.IndexTool.AFTER_REBUILD_INVALID_INDEX_ROW_COUNT_BYTES;
import static org.apache.phoenix.mapreduce.index.IndexTool.AFTER_REBUILD_MISSING_INDEX_ROW_COUNT_BYTES;
import static org.apache.phoenix.mapreduce.index.IndexTool.AFTER_REBUILD_VALID_INDEX_ROW_COUNT_BYTES;
import static org.apache.phoenix.mapreduce.index.IndexTool.BEFORE_REBUILD_EXPIRED_INDEX_ROW_COUNT_BYTES;
import static org.apache.phoenix.mapreduce.index.IndexTool.BEFORE_REBUILD_INVALID_INDEX_ROW_COUNT_BYTES;
import static org.apache.phoenix.mapreduce.index.IndexTool.BEFORE_REBUILD_MISSING_INDEX_ROW_COUNT_BYTES;
import static org.apache.phoenix.mapreduce.index.IndexTool.BEFORE_REBUILD_VALID_INDEX_ROW_COUNT_BYTES;
import static org.apache.phoenix.mapreduce.index.IndexTool.REBUILT_INDEX_ROW_COUNT_BYTES;
import static org.apache.phoenix.mapreduce.index.IndexTool.RESULT_TABLE_COLUMN_FAMILY;
import static org.apache.phoenix.mapreduce.index.IndexTool.SCANNED_DATA_ROW_COUNT_BYTES;
import static org.apache.phoenix.query.QueryConstants.AGG_TIMESTAMP;
import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN;
import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN_FAMILY;
import static org.apache.phoenix.query.QueryConstants.UNGROUPED_AGG_ROW_KEY;
import static org.apache.phoenix.query.QueryServices.INDEX_REBUILD_PAGE_SIZE_IN_ROWS;
import static org.apache.phoenix.query.QueryServices.MUTATE_BATCH_SIZE_ATTRIB;
import static org.apache.phoenix.query.QueryServices.MUTATE_BATCH_SIZE_BYTES_ATTRIB;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.phoenix.cache.ServerCacheClient;
import org.apache.phoenix.compile.ScanRanges;
import org.apache.phoenix.filter.SkipScanFilter;
import org.apache.phoenix.hbase.index.ValueGetter;
import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
import org.apache.phoenix.hbase.index.parallel.EarlyExitFailure;
import org.apache.phoenix.hbase.index.parallel.Task;
import org.apache.phoenix.hbase.index.parallel.TaskBatch;
import org.apache.phoenix.hbase.index.parallel.TaskRunner;
import org.apache.phoenix.hbase.index.parallel.ThreadPoolBuilder;
import org.apache.phoenix.hbase.index.parallel.ThreadPoolManager;
import org.apache.phoenix.hbase.index.parallel.WaitForCompletionTaskRunner;
import org.apache.phoenix.hbase.index.util.GenericKeyValueBuilder;
import org.apache.phoenix.index.GlobalIndexChecker;
import org.apache.phoenix.index.IndexMaintainer;
import org.apache.phoenix.index.PhoenixIndexCodec;
import org.apache.phoenix.mapreduce.index.IndexTool;
import org.apache.phoenix.query.KeyRange;
import org.apache.phoenix.query.QueryServicesOptions;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.schema.types.PVarbinary;
import org.apache.phoenix.util.PhoenixKeyValueUtil;
import org.apache.phoenix.util.ServerUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Throwables;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
public class IndexRebuildRegionScanner extends BaseRegionScanner {
public static class VerificationResult {
public static class PhaseResult {
private long validIndexRowCount = 0;
private long expiredIndexRowCount = 0;
private long missingIndexRowCount = 0;
private long invalidIndexRowCount = 0;
public void add(PhaseResult phaseResult) {
validIndexRowCount += phaseResult.validIndexRowCount;
expiredIndexRowCount += phaseResult.expiredIndexRowCount;
missingIndexRowCount += phaseResult.missingIndexRowCount;
invalidIndexRowCount += phaseResult.invalidIndexRowCount;
}
public long getTotalCount() {
return validIndexRowCount + expiredIndexRowCount + missingIndexRowCount + invalidIndexRowCount;
}
@Override
public String toString() {
return "PhaseResult{" +
"validIndexRowCount=" + validIndexRowCount +
", expiredIndexRowCount=" + expiredIndexRowCount +
", missingIndexRowCount=" + missingIndexRowCount +
", invalidIndexRowCount=" + invalidIndexRowCount +
'}';
}
}
private long scannedDataRowCount = 0;
private long rebuiltIndexRowCount = 0;
private PhaseResult before = new PhaseResult();
private PhaseResult after = new PhaseResult();
@Override
public String toString() {
return "VerificationResult{" +
"scannedDataRowCount=" + scannedDataRowCount +
", rebuiltIndexRowCount=" + rebuiltIndexRowCount +
", before=" + before +
", after=" + after +
'}';
}
public long getScannedDataRowCount() {
return scannedDataRowCount;
}
public long getRebuiltIndexRowCount() {
return rebuiltIndexRowCount;
}
public long getBeforeRebuildValidIndexRowCount() {
return before.validIndexRowCount;
}
public long getBeforeRebuildExpiredIndexRowCount() {
return before.expiredIndexRowCount;
}
public long getBeforeRebuildInvalidIndexRowCount() {
return before.invalidIndexRowCount;
}
public long getBeforeRebuildMissingIndexRowCount() {
return before.missingIndexRowCount;
}
public long getAfterRebuildValidIndexRowCount() {
return after.validIndexRowCount;
}
public long getAfterRebuildExpiredIndexRowCount() {
return after.expiredIndexRowCount;
}
public long getAfterRebuildInvalidIndexRowCount() {
return after.invalidIndexRowCount;
}
public long getAfterRebuildMissingIndexRowCount() {
return after.missingIndexRowCount;
}
private void addScannedDataRowCount(long count) {
this.scannedDataRowCount += count;
}
private void addRebuiltIndexRowCount(long count) {
this.rebuiltIndexRowCount += count;
}
private void addBeforeRebuildValidIndexRowCount(long count) {
before.validIndexRowCount += count;
}
private void addBeforeRebuildExpiredIndexRowCount(long count) {
before.expiredIndexRowCount += count;
}
private void addBeforeRebuildMissingIndexRowCount(long count) {
before.missingIndexRowCount += count;
}
private void addBeforeRebuildInvalidIndexRowCount(long count) {
before.invalidIndexRowCount += count;
}
private void addAfterRebuildValidIndexRowCount(long count) {
after.validIndexRowCount += count;
}
private void addAfterRebuildExpiredIndexRowCount(long count) {
after.expiredIndexRowCount += count;
}
private void addAfterRebuildMissingIndexRowCount(long count) {
after.missingIndexRowCount += count;
}
private void addAfterRebuildInvalidIndexRowCount(long count) {
after.invalidIndexRowCount += count;
}
private static boolean isAfterRebuildInvalidIndexRowCount(Cell cell) {
if (Bytes.compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
AFTER_REBUILD_INVALID_INDEX_ROW_COUNT_BYTES, 0,
AFTER_REBUILD_INVALID_INDEX_ROW_COUNT_BYTES.length) == 0) {
return true;
}
return false;
}
private long getValue(Cell cell) {
return Long.parseLong(Bytes.toString(cell.getValueArray(),
cell.getValueOffset(), cell.getValueLength()));
}
private void update(Cell cell) {
if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, SCANNED_DATA_ROW_COUNT_BYTES)) {
addScannedDataRowCount(getValue(cell));
} else if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, REBUILT_INDEX_ROW_COUNT_BYTES)) {
addRebuiltIndexRowCount(getValue(cell));
} else if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, BEFORE_REBUILD_VALID_INDEX_ROW_COUNT_BYTES)) {
addBeforeRebuildValidIndexRowCount(getValue(cell));
} else if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, BEFORE_REBUILD_EXPIRED_INDEX_ROW_COUNT_BYTES)) {
addBeforeRebuildExpiredIndexRowCount(getValue(cell));
} else if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, BEFORE_REBUILD_MISSING_INDEX_ROW_COUNT_BYTES)) {
addBeforeRebuildMissingIndexRowCount(getValue(cell));
} else if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, BEFORE_REBUILD_INVALID_INDEX_ROW_COUNT_BYTES)) {
addBeforeRebuildInvalidIndexRowCount(getValue(cell));
} else if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, AFTER_REBUILD_VALID_INDEX_ROW_COUNT_BYTES)) {
addAfterRebuildValidIndexRowCount(getValue(cell));
} else if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, AFTER_REBUILD_EXPIRED_INDEX_ROW_COUNT_BYTES)) {
addAfterRebuildExpiredIndexRowCount(getValue(cell));
} else if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, AFTER_REBUILD_MISSING_INDEX_ROW_COUNT_BYTES)) {
addAfterRebuildMissingIndexRowCount(getValue(cell));
} else if (CellUtil.matchingColumn(cell, RESULT_TABLE_COLUMN_FAMILY, AFTER_REBUILD_INVALID_INDEX_ROW_COUNT_BYTES)) {
addAfterRebuildInvalidIndexRowCount(getValue(cell));
}
}
public static byte[] calculateTheClosestNextRowKeyForPrefix(byte[] rowKeyPrefix) {
// Essentially we are treating it like an 'unsigned very very long' and doing +1 manually.
// Search for the place where the trailing 0xFFs start
int offset = rowKeyPrefix.length;
while (offset > 0) {
if (rowKeyPrefix[offset - 1] != (byte) 0xFF) {
break;
}
offset--;
}
if (offset == 0) {
// We got an 0xFFFF... (only FFs) stopRow value which is
// the last possible prefix before the end of the table.
// So set it to stop at the 'end of the table'
return HConstants.EMPTY_END_ROW;
}
// Copy the right length of the original
byte[] newStopRow = Arrays.copyOfRange(rowKeyPrefix, 0, offset);
// And increment the last one
newStopRow[newStopRow.length - 1]++;
return newStopRow;
}
public static VerificationResult getVerificationResult(Table hTable, long ts)
throws IOException {
VerificationResult verificationResult = new VerificationResult();
byte[] startRowKey = Bytes.toBytes(Long.toString(ts));
byte[] stopRowKey = calculateTheClosestNextRowKeyForPrefix(startRowKey);
Scan scan = new Scan();
scan.setStartRow(startRowKey);
scan.setStopRow(stopRowKey);
ResultScanner scanner = hTable.getScanner(scan);
for (Result result = scanner.next(); result != null; result = scanner.next()) {
for (Cell cell : result.rawCells()) {
verificationResult.update(cell);
}
}
return verificationResult;
}
public boolean isVerificationFailed(IndexTool.IndexVerifyType verifyType) {
if (verifyType == IndexTool.IndexVerifyType.BEFORE || verifyType == IndexTool.IndexVerifyType.NONE) {
return false;
}
if (verifyType == IndexTool.IndexVerifyType.ONLY) {
if (before.validIndexRowCount + before.expiredIndexRowCount != scannedDataRowCount) {
return true;
}
}
if (verifyType == IndexTool.IndexVerifyType.BOTH || verifyType == IndexTool.IndexVerifyType.AFTER) {
if (after.invalidIndexRowCount + after.missingIndexRowCount > 0) {
return true;
}
if (before.validIndexRowCount + before.expiredIndexRowCount +
after.expiredIndexRowCount + after.validIndexRowCount != scannedDataRowCount) {
return true;
}
}
return false;
}
public void add(VerificationResult verificationResult) {
scannedDataRowCount += verificationResult.scannedDataRowCount;
rebuiltIndexRowCount += verificationResult.rebuiltIndexRowCount;
before.add(verificationResult.before);
after.add(verificationResult.after);
}
}
private static final Logger LOGGER = LoggerFactory.getLogger(IndexRebuildRegionScanner.class);
public static final String NUM_CONCURRENT_INDEX_VERIFY_THREADS_CONF_KEY = "index.verify.threads.max";
private static final int DEFAULT_CONCURRENT_INDEX_VERIFY_THREADS = 17;
public static final String INDEX_VERIFY_ROW_COUNTS_PER_TASK_CONF_KEY = "index.verify.threads.max";
private static final int DEFAULT_INDEX_VERIFY_ROW_COUNTS_PER_TASK = 2048;
private long pageSizeInRows = Long.MAX_VALUE;
private int rowCountPerTask;
private boolean hasMore;
private final int maxBatchSize;
private UngroupedAggregateRegionObserver.MutationList mutations;
private final long maxBatchSizeBytes;
private final long blockingMemstoreSize;
private final byte[] clientVersionBytes;
private byte[] indexMetaData;
private boolean useProto = true;
private Scan scan;
private RegionScanner innerScanner;
private Region region;
private IndexMaintainer indexMaintainer;
private byte[] indexRowKey = null;
private Table indexHTable = null;
private Table outputHTable = null;
private Table resultHTable = null;
private IndexTool.IndexVerifyType verifyType = IndexTool.IndexVerifyType.NONE;
private boolean verify = false;
private Map<byte[], Put> indexKeyToDataPutMap;
private Map<byte[], Put> dataKeyToDataPutMap;
private TaskRunner pool;
private TaskBatch<Boolean> tasks;
private String exceptionMessage;
private UngroupedAggregateRegionObserver ungroupedAggregateRegionObserver;
private RegionCoprocessorEnvironment env;
private int indexTableTTL;
private VerificationResult verificationResult;
private boolean isBeforeRebuilt = true;
IndexRebuildRegionScanner (final RegionScanner innerScanner, final Region region, final Scan scan,
final RegionCoprocessorEnvironment env,
UngroupedAggregateRegionObserver ungroupedAggregateRegionObserver) throws IOException {
super(innerScanner);
final Configuration config = env.getConfiguration();
if (scan.getAttribute(BaseScannerRegionObserver.INDEX_REBUILD_PAGING) != null) {
pageSizeInRows = config.getLong(INDEX_REBUILD_PAGE_SIZE_IN_ROWS,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_PAGE_SIZE_IN_ROWS);
}
maxBatchSize = config.getInt(MUTATE_BATCH_SIZE_ATTRIB, QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE);
mutations = new UngroupedAggregateRegionObserver.MutationList(maxBatchSize);
maxBatchSizeBytes = config.getLong(MUTATE_BATCH_SIZE_BYTES_ATTRIB,
QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE_BYTES);
blockingMemstoreSize = UngroupedAggregateRegionObserver.getBlockingMemstoreSize(region, config);
clientVersionBytes = scan.getAttribute(BaseScannerRegionObserver.CLIENT_VERSION);
indexMetaData = scan.getAttribute(PhoenixIndexCodec.INDEX_PROTO_MD);
if (indexMetaData == null) {
useProto = false;
indexMetaData = scan.getAttribute(PhoenixIndexCodec.INDEX_MD);
}
if (!scan.isRaw()) {
// No need to deserialize index maintainers when the scan is raw. Raw scan is used by partial rebuilds
List<IndexMaintainer> maintainers = IndexMaintainer.deserialize(indexMetaData, true);
indexMaintainer = maintainers.get(0);
}
this.scan = scan;
this.innerScanner = innerScanner;
this.region = region;
this.env = env;
this.ungroupedAggregateRegionObserver = ungroupedAggregateRegionObserver;
indexRowKey = scan.getAttribute(BaseScannerRegionObserver.INDEX_ROW_KEY);
byte[] valueBytes = scan.getAttribute(BaseScannerRegionObserver.INDEX_REBUILD_VERIFY_TYPE);
if (valueBytes != null) {
verificationResult = new VerificationResult();
verifyType = IndexTool.IndexVerifyType.fromValue(valueBytes);
if (verifyType != IndexTool.IndexVerifyType.NONE) {
verify = true;
// Create the following objects only for rebuilds by IndexTool
indexHTable = ServerUtil.ConnectionFactory.getConnection(ServerUtil.ConnectionType.INDEX_WRITER_CONNECTION,
env).getTable(TableName.valueOf(indexMaintainer.getIndexTableName()));
indexTableTTL = indexHTable.getDescriptor().getColumnFamilies()[0].getTimeToLive();
outputHTable = ServerUtil.ConnectionFactory.getConnection(ServerUtil.ConnectionType.INDEX_WRITER_CONNECTION,
env).getTable(TableName.valueOf(IndexTool.OUTPUT_TABLE_NAME_BYTES));
resultHTable = ServerUtil.ConnectionFactory.getConnection(ServerUtil.ConnectionType.INDEX_WRITER_CONNECTION,
env).getTable(TableName.valueOf(IndexTool.RESULT_TABLE_NAME_BYTES));
indexKeyToDataPutMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
dataKeyToDataPutMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
pool = new WaitForCompletionTaskRunner(ThreadPoolManager.getExecutor(
new ThreadPoolBuilder("IndexVerify",
env.getConfiguration()).setMaxThread(NUM_CONCURRENT_INDEX_VERIFY_THREADS_CONF_KEY,
DEFAULT_CONCURRENT_INDEX_VERIFY_THREADS).setCoreTimeout(
INDEX_WRITER_KEEP_ALIVE_TIME_CONF_KEY), env));
rowCountPerTask = config.getInt(INDEX_VERIFY_ROW_COUNTS_PER_TASK_CONF_KEY,
DEFAULT_INDEX_VERIFY_ROW_COUNTS_PER_TASK);
}
}
}
@Override
public RegionInfo getRegionInfo() {
return region.getRegionInfo();
}
@Override
public boolean isFilterDone() { return false; }
private void logToIndexToolResultTable() throws IOException {
long scanMaxTs = scan.getTimeRange().getMax();
byte[] keyPrefix = Bytes.toBytes(Long.toString(scanMaxTs));
byte[] regionName = Bytes.toBytes(region.getRegionInfo().getRegionNameAsString());
// The row key for the result table is the max timestamp of the scan + the table region name + scan start row
// + scan stop row
byte[] rowKey = new byte[keyPrefix.length + regionName.length + scan.getStartRow().length +
scan.getStopRow().length];
Bytes.putBytes(rowKey, 0, keyPrefix, 0, keyPrefix.length);
Bytes.putBytes(rowKey, keyPrefix.length, regionName, 0, regionName.length);
Bytes.putBytes(rowKey, keyPrefix.length + regionName.length, scan.getStartRow(), 0,
scan.getStartRow().length);
Bytes.putBytes(rowKey, keyPrefix.length + regionName.length + scan.getStartRow().length,
scan.getStopRow(), 0, scan.getStopRow().length);
Put put = new Put(rowKey);
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, SCANNED_DATA_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.scannedDataRowCount)));
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, REBUILT_INDEX_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.rebuiltIndexRowCount)));
if (verifyType == IndexTool.IndexVerifyType.BEFORE || verifyType == IndexTool.IndexVerifyType.BOTH ||
verifyType == IndexTool.IndexVerifyType.ONLY) {
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, BEFORE_REBUILD_VALID_INDEX_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.before.validIndexRowCount)));
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, BEFORE_REBUILD_EXPIRED_INDEX_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.before.expiredIndexRowCount)));
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, BEFORE_REBUILD_MISSING_INDEX_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.before.missingIndexRowCount)));
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, BEFORE_REBUILD_INVALID_INDEX_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.before.invalidIndexRowCount)));
}
if (verifyType == IndexTool.IndexVerifyType.AFTER || verifyType == IndexTool.IndexVerifyType.BOTH) {
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, AFTER_REBUILD_VALID_INDEX_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.after.validIndexRowCount)));
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, AFTER_REBUILD_EXPIRED_INDEX_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.after.expiredIndexRowCount)));
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, AFTER_REBUILD_MISSING_INDEX_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.after.missingIndexRowCount)));
put.addColumn(RESULT_TABLE_COLUMN_FAMILY, AFTER_REBUILD_INVALID_INDEX_ROW_COUNT_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(verificationResult.after.invalidIndexRowCount)));
}
resultHTable.put(put);
}
@Override
public void close() throws IOException {
innerScanner.close();
if (verify) {
try {
logToIndexToolResultTable();
} finally {
this.pool.stop("IndexRebuildRegionScanner is closing");
indexHTable.close();
outputHTable.close();
resultHTable.close();
}
}
}
private void setMutationAttributes(Mutation m, byte[] uuidValue) {
m.setAttribute(useProto ? PhoenixIndexCodec.INDEX_PROTO_MD : PhoenixIndexCodec.INDEX_MD, indexMetaData);
m.setAttribute(PhoenixIndexCodec.INDEX_UUID, uuidValue);
m.setAttribute(BaseScannerRegionObserver.REPLAY_WRITES,
BaseScannerRegionObserver.REPLAY_INDEX_REBUILD_WRITES);
m.setAttribute(BaseScannerRegionObserver.CLIENT_VERSION, clientVersionBytes);
// Since we're replaying existing mutations, it makes no sense to write them to the wal
m.setDurability(Durability.SKIP_WAL);
}
private Delete generateDeleteMarkers(Put put) {
Set<ColumnReference> allColumns = indexMaintainer.getAllColumns();
int cellCount = put.size();
if (cellCount == allColumns.size() + 1) {
// We have all the columns for the index table. So, no delete marker is needed
return null;
}
Set<ColumnReference> includedColumns = Sets.newLinkedHashSetWithExpectedSize(cellCount);
long ts = 0;
for (List<Cell> cells : put.getFamilyCellMap().values()) {
if (cells == null) {
break;
}
for (Cell cell : cells) {
includedColumns.add(new ColumnReference(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell)));
if (ts < cell.getTimestamp()) {
ts = cell.getTimestamp();
}
}
}
Delete del = null;
for (ColumnReference column : allColumns) {
if (!includedColumns.contains(column)) {
if (del == null) {
del = new Delete(put.getRow());
}
del.addColumns(column.getFamily(), column.getQualifier(), ts);
}
}
return del;
}
private byte[] commitIfReady(byte[] uuidValue, UngroupedAggregateRegionObserver.MutationList mutationList) throws IOException {
if (ServerUtil.readyToCommit(mutationList.size(), mutationList.byteSize(), maxBatchSize, maxBatchSizeBytes)) {
ungroupedAggregateRegionObserver.checkForRegionClosingOrSplitting();
ungroupedAggregateRegionObserver.commitBatchWithRetries(region, mutationList, blockingMemstoreSize);
uuidValue = ServerCacheClient.generateId();
mutationList.clear();
}
return uuidValue;
}
private class SimpleValueGetter implements ValueGetter {
final ImmutableBytesWritable valuePtr = new ImmutableBytesWritable();
final Put put;
SimpleValueGetter (final Put put) {
this.put = put;
}
@Override
public ImmutableBytesWritable getLatestValue(ColumnReference ref, long ts) throws IOException {
List<Cell> cellList = put.get(ref.getFamily(), ref.getQualifier());
if (cellList == null || cellList.isEmpty()) {
return null;
}
Cell cell = cellList.get(0);
valuePtr.set(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
return valuePtr;
}
@Override
public byte[] getRowKey() {
return put.getRow();
}
}
private byte[] getIndexRowKey(final Put dataRow) throws IOException {
ValueGetter valueGetter = new SimpleValueGetter(dataRow);
byte[] builtIndexRowKey = indexMaintainer.buildRowKey(valueGetter, new ImmutableBytesWritable(dataRow.getRow()),
null, null, HConstants.LATEST_TIMESTAMP);
return builtIndexRowKey;
}
private boolean checkIndexRow(final byte[] indexRowKey, final Put put) throws IOException {
byte[] builtIndexRowKey = getIndexRowKey(put);
if (Bytes.compareTo(builtIndexRowKey, 0, builtIndexRowKey.length,
indexRowKey, 0, indexRowKey.length) != 0) {
return false;
}
return true;
}
private void logToIndexToolOutputTable(byte[] dataRowKey, byte[] indexRowKey, long dataRowTs, long indexRowTs,
String errorMsg) throws IOException {
logToIndexToolOutputTable(dataRowKey, indexRowKey, dataRowTs, indexRowTs,
errorMsg, null, null);
}
private void logToIndexToolOutputTable(byte[] dataRowKey, byte[] indexRowKey, long dataRowTs, long indexRowTs,
String errorMsg, byte[] expectedValue, byte[] actualValue) throws IOException {
final byte[] E_VALUE_PREFIX_BYTES = Bytes.toBytes(" E:");
final byte[] A_VALUE_PREFIX_BYTES = Bytes.toBytes(" A:");
final int PREFIX_LENGTH = 3;
final int TOTAL_PREFIX_LENGTH = 6;
final byte[] PHASE_BEFORE_VALUE = Bytes.toBytes("BEFORE");
final byte[] PHASE_AFTER_VALUE = Bytes.toBytes("AFTER");
long scanMaxTs = scan.getTimeRange().getMax();
byte[] keyPrefix = Bytes.toBytes(Long.toString(scanMaxTs));
byte[] rowKey;
// The row key for the output table is the max timestamp of the scan + data row key
if (dataRowKey != null) {
rowKey = new byte[keyPrefix.length + dataRowKey.length];
Bytes.putBytes(rowKey, 0, keyPrefix, 0, keyPrefix.length);
Bytes.putBytes(rowKey, keyPrefix.length, dataRowKey, 0, dataRowKey.length);
} else {
rowKey = new byte[keyPrefix.length];
Bytes.putBytes(rowKey, 0, keyPrefix, 0, keyPrefix.length);
}
Put put = new Put(rowKey);
put.addColumn(IndexTool.OUTPUT_TABLE_COLUMN_FAMILY, IndexTool.DATA_TABLE_NAME_BYTES,
scanMaxTs, region.getRegionInfo().getTable().getName());
put.addColumn(IndexTool.OUTPUT_TABLE_COLUMN_FAMILY, IndexTool.INDEX_TABLE_NAME_BYTES,
scanMaxTs, indexMaintainer.getIndexTableName());
if (dataRowKey != null) {
put.addColumn(IndexTool.OUTPUT_TABLE_COLUMN_FAMILY, IndexTool.DATA_TABLE_TS_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(dataRowTs)));
}
if (indexRowKey != null) {
put.addColumn(IndexTool.OUTPUT_TABLE_COLUMN_FAMILY, IndexTool.INDEX_TABLE_ROW_KEY_BYTES,
scanMaxTs, indexRowKey);
put.addColumn(IndexTool.OUTPUT_TABLE_COLUMN_FAMILY, IndexTool.INDEX_TABLE_TS_BYTES,
scanMaxTs, Bytes.toBytes(Long.toString(indexRowTs)));
}
byte[] errorMessageBytes;
if (expectedValue != null) {
errorMessageBytes = new byte[errorMsg.length() + expectedValue.length + actualValue.length +
TOTAL_PREFIX_LENGTH];
Bytes.putBytes(errorMessageBytes, 0, Bytes.toBytes(errorMsg), 0, errorMsg.length());
int length = errorMsg.length();
Bytes.putBytes(errorMessageBytes, length, E_VALUE_PREFIX_BYTES, 0, PREFIX_LENGTH);
length += PREFIX_LENGTH;
Bytes.putBytes(errorMessageBytes, length, expectedValue, 0, expectedValue.length);
length += expectedValue.length;
Bytes.putBytes(errorMessageBytes, length, A_VALUE_PREFIX_BYTES, 0, PREFIX_LENGTH);
length += PREFIX_LENGTH;
Bytes.putBytes(errorMessageBytes, length, actualValue, 0, actualValue.length);
}
else {
errorMessageBytes = Bytes.toBytes(errorMsg);
}
put.addColumn(IndexTool.OUTPUT_TABLE_COLUMN_FAMILY, IndexTool.ERROR_MESSAGE_BYTES, scanMaxTs, errorMessageBytes);
if (isBeforeRebuilt) {
put.addColumn(IndexTool.OUTPUT_TABLE_COLUMN_FAMILY, IndexTool.VERIFICATION_PHASE_BYTES, scanMaxTs, PHASE_BEFORE_VALUE);
} else {
put.addColumn(IndexTool.OUTPUT_TABLE_COLUMN_FAMILY, IndexTool.VERIFICATION_PHASE_BYTES, scanMaxTs, PHASE_AFTER_VALUE);
}
outputHTable.put(put);
}
private long getMaxTimestamp(Result result) {
long ts = 0;
for (Cell cell : result.rawCells()) {
if (ts < cell.getTimestamp()) {
ts = cell.getTimestamp();
}
}
return ts;
}
private long getMaxTimestamp(Put put) {
long ts = 0;
for (List<Cell> cells : put.getFamilyCellMap().values()) {
if (cells == null) {
break;
}
for (Cell cell : cells) {
if (ts < cell.getTimestamp()) {
ts = cell.getTimestamp();
}
}
}
return ts;
}
private boolean verifySingleIndexRow(Result indexRow, final Put dataRow) throws IOException {
ValueGetter valueGetter = new SimpleValueGetter(dataRow);
long ts = getMaxTimestamp(dataRow);
Put indexPut = indexMaintainer.buildUpdateMutation(GenericKeyValueBuilder.INSTANCE,
valueGetter, new ImmutableBytesWritable(dataRow.getRow()), ts, null, null);
if (indexPut == null) {
// This means the data row does not have any covered column values
indexPut = new Put(indexRow.getRow());
}
else {
// Remove the empty column prepared by Index codec as we need to change its value
removeEmptyColumn(indexPut, indexMaintainer.getEmptyKeyValueFamily().copyBytesIfNecessary(),
indexMaintainer.getEmptyKeyValueQualifier());
}
// Add the empty column
indexPut.addColumn(indexMaintainer.getEmptyKeyValueFamily().copyBytesIfNecessary(),
indexMaintainer.getEmptyKeyValueQualifier(), ts, VERIFIED_BYTES);
int cellCount = 0;
long currentTime = EnvironmentEdgeManager.currentTime();
for (List<Cell> cells : indexPut.getFamilyCellMap().values()) {
if (cells == null) {
break;
}
for (Cell expectedCell : cells) {
byte[] family = CellUtil.cloneFamily(expectedCell);
byte[] qualifier = CellUtil.cloneQualifier(expectedCell);
Cell actualCell = indexRow.getColumnLatestCell(family, qualifier);
if (actualCell == null) {
// Check if cell expired as per the current server's time and data table ttl
// Index table should have the same ttl as the data table, hence we might not
// get a value back from index if it has already expired between our rebuild and
// verify
// TODO: have a metric to update for these cases
if (isTimestampBeforeTTL(currentTime, expectedCell.getTimestamp())) {
continue;
}
String errorMsg = " Missing cell " + Bytes.toString(family) + ":" +
Bytes.toString(qualifier);
logToIndexToolOutputTable(dataRow.getRow(), indexRow.getRow(), ts, getMaxTimestamp(indexRow), errorMsg);
return false;
}
if (actualCell.getTimestamp() < ts) {
// Skip older cells since a Phoenix index row is composed of cells with the same timestamp
continue;
}
// Check all columns
if (!CellUtil.matchingValue(actualCell, expectedCell)) {
String errorMsg = "Not matching value for " + Bytes.toString(family) + ":" +
Bytes.toString(qualifier);
logToIndexToolOutputTable(dataRow.getRow(), indexRow.getRow(), ts, getMaxTimestamp(indexRow),
errorMsg, CellUtil.cloneValue(expectedCell), CellUtil.cloneValue(actualCell));
return false;
} else if (actualCell.getTimestamp() != ts) {
String errorMsg = "Not matching timestamp for " + Bytes.toString(family) + ":" +
Bytes.toString(qualifier) + " E: " + ts + " A: " +
actualCell.getTimestamp();
logToIndexToolOutputTable(dataRow.getRow(), indexRow.getRow(), ts, getMaxTimestamp(indexRow),
errorMsg, null, null);
return false;
}
cellCount++;
}
}
if (cellCount != indexRow.rawCells().length) {
String errorMsg = "Expected to find " + cellCount + " cells but got "
+ indexRow.rawCells().length + " cells";
logToIndexToolOutputTable(dataRow.getRow(), indexRow.getRow(), ts, getMaxTimestamp(indexRow), errorMsg);
return false;
}
return true;
}
private void verifyIndexRows(List<KeyRange> keys, Map<byte[], Put> perTaskDataKeyToDataPutMap,
VerificationResult.PhaseResult verificationPhaseResult) throws IOException {
int expectedRowCount = keys.size();
ScanRanges scanRanges = ScanRanges.createPointLookup(keys);
Scan indexScan = new Scan();
indexScan.setTimeRange(scan.getTimeRange().getMin(), scan.getTimeRange().getMax());
scanRanges.initializeScan(indexScan);
SkipScanFilter skipScanFilter = scanRanges.getSkipScanFilter();
indexScan.setFilter(skipScanFilter);
int rowCount = 0;
try (ResultScanner resultScanner = indexHTable.getScanner(indexScan)) {
for (Result result = resultScanner.next(); (result != null); result = resultScanner.next()) {
Put dataPut = indexKeyToDataPutMap.get(result.getRow());
if (dataPut == null) {
// This should never happen
String errorMsg = "Missing data row";
logToIndexToolOutputTable(null, result.getRow(), 0, getMaxTimestamp(result), errorMsg);
exceptionMessage = "Index verify failed - Missing data row - " + indexHTable.getName();
throw new IOException(exceptionMessage);
}
if (verifySingleIndexRow(result, dataPut)) {
verificationPhaseResult.validIndexRowCount++;
perTaskDataKeyToDataPutMap.remove(dataPut.getRow());
} else {
verificationPhaseResult.invalidIndexRowCount++;
}
rowCount++;
}
} catch (Throwable t) {
ServerUtil.throwIOException(indexHTable.getName().toString(), t);
}
// Check if any expected rows from index(which we didn't get) are already expired due to TTL
// TODO: metrics for expired rows
if (!perTaskDataKeyToDataPutMap.isEmpty()) {
Iterator<Entry<byte[], Put>> itr = perTaskDataKeyToDataPutMap.entrySet().iterator();
long currentTime = EnvironmentEdgeManager.currentTime();
while(itr.hasNext()) {
Entry<byte[], Put> entry = itr.next();
long ts = getMaxTimestamp(entry.getValue());
if (isTimestampBeforeTTL(currentTime, ts)) {
itr.remove();
rowCount++;
verificationPhaseResult.expiredIndexRowCount++;
}
}
}
if (rowCount != expectedRowCount) {
for (Map.Entry<byte[], Put> entry : perTaskDataKeyToDataPutMap.entrySet()) {
String errorMsg = "Missing index row";
logToIndexToolOutputTable(entry.getKey(), null, getMaxTimestamp(entry.getValue()),
0, errorMsg);
}
verificationPhaseResult.missingIndexRowCount += expectedRowCount - rowCount;
}
}
private boolean isTimestampBeforeTTL(long currentTime, long tsToCheck) {
if (indexTableTTL == HConstants.FOREVER) {
return false;
}
return tsToCheck < (currentTime - (long) indexTableTTL * 1000);
}
private void addVerifyTask(final List<KeyRange> keys, final Map<byte[], Put> perTaskDataKeyToDataPutMap,
final VerificationResult.PhaseResult verificationPhaseResult) {
tasks.add(new Task<Boolean>() {
@Override
public Boolean call() throws Exception {
try {
if (Thread.currentThread().isInterrupted()) {
exceptionMessage = "Pool closed, not attempting to verify index rows! " + indexHTable.getName();
throw new IOException(exceptionMessage);
}
verifyIndexRows(keys, perTaskDataKeyToDataPutMap, verificationPhaseResult);
} catch (Exception e) {
throw e;
}
return Boolean.TRUE;
}
});
}
private void parallelizeIndexVerify(VerificationResult.PhaseResult verificationPhaseResult) throws IOException {
for (Mutation mutation : mutations) {
indexKeyToDataPutMap.put(getIndexRowKey((Put)mutation), (Put)mutation);
}
int taskCount = (indexKeyToDataPutMap.size() + rowCountPerTask - 1) / rowCountPerTask;
tasks = new TaskBatch<>(taskCount);
List<Map<byte[], Put>> dataPutMapList = new ArrayList<>(taskCount);
List<VerificationResult.PhaseResult> verificationPhaseResultList = new ArrayList<>(taskCount);
List<KeyRange> keys = new ArrayList<>(rowCountPerTask);
Map<byte[], Put> perTaskDataKeyToDataPutMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
dataPutMapList.add(perTaskDataKeyToDataPutMap);
VerificationResult.PhaseResult perTaskVerificationPhaseResult = new VerificationResult.PhaseResult();
verificationPhaseResultList.add(perTaskVerificationPhaseResult);
for (Map.Entry<byte[], Put> entry: indexKeyToDataPutMap.entrySet()) {
keys.add(PVarbinary.INSTANCE.getKeyRange(entry.getKey()));
perTaskDataKeyToDataPutMap.put(entry.getValue().getRow(), entry.getValue());
if (keys.size() == rowCountPerTask) {
addVerifyTask(keys, perTaskDataKeyToDataPutMap, perTaskVerificationPhaseResult);
keys = new ArrayList<>(rowCountPerTask);
perTaskDataKeyToDataPutMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
dataPutMapList.add(perTaskDataKeyToDataPutMap);
perTaskVerificationPhaseResult = new VerificationResult.PhaseResult();
verificationPhaseResultList.add(perTaskVerificationPhaseResult);
}
}
if (keys.size() > 0) {
addVerifyTask(keys, perTaskDataKeyToDataPutMap, perTaskVerificationPhaseResult);
}
List<Boolean> taskResultList = null;
try {
LOGGER.debug("Waiting on index verify tasks to complete...");
taskResultList = this.pool.submitUninterruptible(tasks);
} catch (ExecutionException e) {
throw new RuntimeException("Should not fail on the results while using a WaitForCompletionTaskRunner", e);
} catch (EarlyExitFailure e) {
throw new RuntimeException("Stopped while waiting for batch, quitting!", e);
}
for (Boolean result : taskResultList) {
if (result == null) {
// there was a failure
throw new IOException(exceptionMessage);
}
}
if (verifyType == IndexTool.IndexVerifyType.BEFORE || verifyType == IndexTool.IndexVerifyType.BOTH) {
for (Map<byte[], Put> dataPutMap : dataPutMapList) {
dataKeyToDataPutMap.putAll(dataPutMap);
}
}
for (VerificationResult.PhaseResult result : verificationPhaseResultList) {
verificationPhaseResult.add(result);
}
}
private void rebuildIndexRows(UngroupedAggregateRegionObserver.MutationList mutationList) throws IOException {
byte[] uuidValue = ServerCacheClient.generateId();
UngroupedAggregateRegionObserver.MutationList currentMutationList =
new UngroupedAggregateRegionObserver.MutationList(maxBatchSize);
for (Mutation mutation : mutationList) {
Put put = (Put) mutation;
currentMutationList.add(mutation);
setMutationAttributes(put, uuidValue);
uuidValue = commitIfReady(uuidValue, currentMutationList);
Delete deleteMarkers = generateDeleteMarkers(put);
if (deleteMarkers != null) {
setMutationAttributes(deleteMarkers, uuidValue);
currentMutationList.add(deleteMarkers);
uuidValue = commitIfReady(uuidValue, currentMutationList);
}
}
if (!currentMutationList.isEmpty()) {
ungroupedAggregateRegionObserver.checkForRegionClosingOrSplitting();
ungroupedAggregateRegionObserver.commitBatchWithRetries(region, currentMutationList, blockingMemstoreSize);
}
}
private void verifyAndOrRebuildIndex() throws IOException {
VerificationResult nextVerificationResult = new VerificationResult();
nextVerificationResult.scannedDataRowCount = mutations.size();
if (verifyType == IndexTool.IndexVerifyType.AFTER || verifyType == IndexTool.IndexVerifyType.NONE) {
// For these options we start with rebuilding index rows
rebuildIndexRows(mutations);
nextVerificationResult.rebuiltIndexRowCount = mutations.size();
isBeforeRebuilt = false;
}
if (verifyType == IndexTool.IndexVerifyType.NONE) {
return;
}
if (verifyType == IndexTool.IndexVerifyType.BEFORE || verifyType == IndexTool.IndexVerifyType.BOTH ||
verifyType == IndexTool.IndexVerifyType.ONLY) {
VerificationResult.PhaseResult verificationPhaseResult = new VerificationResult.PhaseResult();
// For these options we start with verifying index rows
parallelizeIndexVerify(verificationPhaseResult);
nextVerificationResult.before.add(verificationPhaseResult);
if (mutations.size() != verificationPhaseResult.getTotalCount()) {
throw new DoNotRetryIOException(
"mutations.size() != verificationPhaseResult.getTotalCount() at the before phase " +
nextVerificationResult + " dataKeyToDataPutMap.size() = " + dataKeyToDataPutMap.size());
}
}
if (verifyType == IndexTool.IndexVerifyType.BEFORE || verifyType == IndexTool.IndexVerifyType.BOTH) {
// For these options, we have identified the rows to be rebuilt and now need to rebuild them
// At this point, dataKeyToDataPutMap includes mapping only for the rows to be rebuilt
mutations.clear();
for (Map.Entry<byte[], Put> entry: dataKeyToDataPutMap.entrySet()) {
mutations.add(entry.getValue());
}
rebuildIndexRows(mutations);
nextVerificationResult.rebuiltIndexRowCount += mutations.size();
isBeforeRebuilt = false;
}
if (verifyType == IndexTool.IndexVerifyType.AFTER || verifyType == IndexTool.IndexVerifyType.BOTH) {
// We have rebuilt index row and now we need to verify them
indexKeyToDataPutMap.clear();
VerificationResult.PhaseResult verificationPhaseResult = new VerificationResult.PhaseResult();
parallelizeIndexVerify(verificationPhaseResult);
nextVerificationResult.after.add(verificationPhaseResult);
if (mutations.size() != verificationPhaseResult.getTotalCount()) {
throw new DoNotRetryIOException(
"mutations.size() != verificationPhaseResult.getTotalCount() at the after phase " +
nextVerificationResult + " dataKeyToDataPutMap.size() = " + dataKeyToDataPutMap.size());
}
}
indexKeyToDataPutMap.clear();
verificationResult.add(nextVerificationResult);
}
@Override
public boolean next(List<Cell> results) throws IOException {
Cell lastCell = null;
int rowCount = 0;
region.startRegionOperation();
try {
// Partial rebuilds by MetadataRegionObserver use raw scan. Inline verification is not supported for them
boolean partialRebuild = scan.isRaw();
byte[] uuidValue = ServerCacheClient.generateId();
synchronized (innerScanner) {
do {
List<Cell> row = new ArrayList<Cell>();
hasMore = innerScanner.nextRaw(row);
if (!row.isEmpty()) {
lastCell = row.get(0);
Put put = null;
Delete del = null;
for (Cell cell : row) {
if (KeyValue.Type.codeToType(cell.getTypeByte()) == KeyValue.Type.Put) {
if (put == null) {
put = new Put(CellUtil.cloneRow(cell));
mutations.add(put);
}
put.add(cell);
} else {
if (del == null) {
del = new Delete(CellUtil.cloneRow(cell));
mutations.add(del);
}
del.addDeleteMarker(cell);
}
}
if (partialRebuild) {
if (put != null) {
setMutationAttributes(put, uuidValue);
}
if (del != null) {
setMutationAttributes(del, uuidValue);
}
uuidValue = commitIfReady(uuidValue, mutations);
}
if (indexRowKey != null) {
if (put != null) {
setMutationAttributes(put, uuidValue);
}
Delete deleteMarkers = generateDeleteMarkers(put);
if (deleteMarkers != null) {
setMutationAttributes(deleteMarkers, uuidValue);
mutations.add(deleteMarkers);
uuidValue = commitIfReady(uuidValue, mutations);
}
// GlobalIndexChecker passed the index row key. This is to build a single index row.
// Check if the data table row we have just scanned matches with the index row key.
// If not, there is no need to build the index row from this data table row,
// and just return zero row count.
if (checkIndexRow(indexRowKey, put)) {
rowCount = GlobalIndexChecker.RebuildReturnCode.INDEX_ROW_EXISTS.getValue();
} else {
rowCount = GlobalIndexChecker.RebuildReturnCode.NO_INDEX_ROW.getValue();
}
break;
}
rowCount++;
}
} while (hasMore && rowCount < pageSizeInRows);
if (!partialRebuild && indexRowKey == null) {
verifyAndOrRebuildIndex();
} else {
if (!mutations.isEmpty()) {
ungroupedAggregateRegionObserver.checkForRegionClosingOrSplitting();
ungroupedAggregateRegionObserver.commitBatchWithRetries(region, mutations, blockingMemstoreSize);
}
}
}
} catch (IOException e) {
LOGGER.error("IOException during rebuilding: " + Throwables.getStackTraceAsString(e));
throw e;
} finally {
region.closeRegionOperation();
mutations.clear();
if (verify) {
indexKeyToDataPutMap.clear();
dataKeyToDataPutMap.clear();
}
}
byte[] rowCountBytes = PLong.INSTANCE.toBytes(Long.valueOf(rowCount));
final Cell aggKeyValue;
if (lastCell == null) {
aggKeyValue = PhoenixKeyValueUtil.newKeyValue(UNGROUPED_AGG_ROW_KEY, SINGLE_COLUMN_FAMILY,
SINGLE_COLUMN, AGG_TIMESTAMP, rowCountBytes, 0, rowCountBytes.length);
} else {
aggKeyValue = PhoenixKeyValueUtil.newKeyValue(CellUtil.cloneRow(lastCell), SINGLE_COLUMN_FAMILY,
SINGLE_COLUMN, AGG_TIMESTAMP, rowCountBytes, 0, rowCountBytes.length);
}
results.add(aggKeyValue);
return hasMore;
}
@Override
public long getMaxResultSize() {
return scan.getMaxResultSize();
}
} |
package io.github.apace100.origins.power.factory;
import com.google.gson.JsonObject;
import io.github.apace100.origins.power.Power;
import io.github.apace100.origins.power.PowerType;
import io.github.apace100.origins.power.factory.condition.ConditionFactory;
import io.github.apace100.origins.util.SerializableData;
import io.github.apace100.origins.util.SerializableDataType;
import me.shedaniel.architectury.core.RegistryEntry;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.network.PacketByteBuf;
import net.minecraft.util.Identifier;
import java.util.function.BiFunction;
import java.util.function.Function;
public class PowerFactory<P extends Power> extends RegistryEntry<PowerFactory<?>> {
private final Identifier id;
private boolean hasConditions = false;
protected SerializableData data;
protected Function<SerializableData.Instance, BiFunction<PowerType<P>, PlayerEntity, P>> factoryConstructor;
public PowerFactory(Identifier id, SerializableData data, Function<SerializableData.Instance, BiFunction<PowerType<P>, PlayerEntity, P>> factoryConstructor) {
this.id = id;
this.data = data;
this.factoryConstructor = factoryConstructor;
}
public PowerFactory<P> allowCondition() {
if(!hasConditions) {
hasConditions = true;
data.add("condition", SerializableDataType.ENTITY_CONDITION, null);
}
return this;
}
public Identifier getSerializerId() {
return id;
}
public class Instance implements BiFunction<PowerType<P>, PlayerEntity, P> {
private final SerializableData.Instance dataInstance;
private Instance(SerializableData.Instance data) {
this.dataInstance = data;
}
public void write(PacketByteBuf buf) {
buf.writeIdentifier(id);
data.write(buf, dataInstance);
}
@Override
public P apply(PowerType<P> pPowerType, PlayerEntity playerEntity) {
BiFunction<PowerType<P>, PlayerEntity, P> powerFactory = factoryConstructor.apply(dataInstance);
P p = powerFactory.apply(pPowerType, playerEntity);
if(hasConditions && dataInstance.isPresent("condition")) {
p.addCondition(dataInstance.<ConditionFactory<PlayerEntity>.Instance>get("condition"));
}
return p;
}
}
public Instance read(JsonObject json) {
return new Instance(data.read(json));
}
public Instance read(PacketByteBuf buffer) {
return new Instance(data.read(buffer));
}
}
|
<reponame>lexfaraday/hamburgo<gh_stars>0
package test.backend.www.controllers;
import java.util.List;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.extern.slf4j.Slf4j;
import test.backend.www.model.Airport;
import test.backend.www.model.AirportLocator;
import test.backend.www.model.GeoPoint;
import test.backend.www.model.RelativeDistance;
@Data
@EqualsAndHashCode(callSuper = false)
@Slf4j
@Controller
// Ej: http://localhost:8080/locate/39.577251/2.633764/
public class LocatorsController
{
@Autowired
AirportLocator airportLocator;
@ResponseBody
@RequestMapping(value = "/locate/{latitude}/{longitude}/", produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseStatus(HttpStatus.OK)
public ResponseEntity<List<RelativeDistance>> locate(@PathVariable(value = "latitude") double latitude,
@PathVariable(value = "longitude") double longitude, @RequestParam(value = "limitKm", required = false, defaultValue = "300") long limitKm,
@RequestParam(value = "max", required = false, defaultValue = "5") int max,
@RequestParam(value = "withIataCode", required = false, defaultValue = "true") boolean withIataCode)
{
log.info("Locating airports...");
return new ResponseEntity<>(airportLocator.getClosestAirports(new GeoPoint(latitude, longitude), max, limitKm, withIataCode), HttpStatus.OK);
}
@ResponseBody
@RequestMapping(value = "/list", produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseStatus(HttpStatus.OK)
public ResponseEntity<List<Airport>> list(@RequestParam(value = "from", required = false, defaultValue = "0") int from,
@RequestParam(value = "to", required = false, defaultValue = "25") int to)
{
log.info("Listing airports...");
int realFrom = Math.max(0, from);
int limit = Math.min(Math.abs(to) - realFrom, 25);
List<Airport> result = airportLocator.getAirports().values().stream().sorted().skip(realFrom).limit(limit).collect(Collectors.toList());
return new ResponseEntity<>(result, HttpStatus.OK);
}
}
|
/**
* @license Copyright (c) 2017, CKSource - <NAME>. All rights reserved.
* For licensing, see LICENSE.md.
*/
'use strict';
const path = require( 'path' );
/**
* Adds `src/asserts.js` and `chai.js` file paths to files array which results
* in Karma including them in the test context.
*
* @param {Array} files Array of files loaded by Karma.
* @returns {undefined} undefined
*/
function loadAsserts( files ) {
files.unshift( {
pattern: path.join( __dirname, 'src/asserts.js' ),
included: true,
served: true,
watched: false,
nocache: false
} );
files.unshift( {
pattern: require.resolve( 'chai/chai.js' ),
included: true,
served: true,
watched: false,
nocache: false
} );
}
loadAsserts.$inject = [ 'config.files' ];
module.exports = {
'framework:ckeditor4-yui-to-chai': [ 'factory', loadAsserts ]
};
|
<reponame>muthukumaravel7/armnn
var structarmnn_1_1_debug_queue_descriptor =
[
[ "DebugQueueDescriptor", "structarmnn_1_1_debug_queue_descriptor.xhtml#aa3a1f59f5a8e8ab4ca6dc4044fc776df", null ],
[ "Validate", "structarmnn_1_1_debug_queue_descriptor.xhtml#a041e495449e22774a34d92b0904c10bf", null ],
[ "m_Guid", "structarmnn_1_1_debug_queue_descriptor.xhtml#a04f1026633579df1913f80db2877ce8c", null ],
[ "m_LayerName", "structarmnn_1_1_debug_queue_descriptor.xhtml#a2b4595e9b6c840810995558801be6f6c", null ],
[ "m_SlotIndex", "structarmnn_1_1_debug_queue_descriptor.xhtml#ab53d42063d018ffebfceb019b26bf65a", null ]
]; |
<reponame>DhananjayMukhedkar/feature-store-api
package com.logicalclocks.hsfs;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.Maps;
import com.logicalclocks.hsfs.constructor.Query;
import com.logicalclocks.hsfs.engine.FeatureGroupUtils;
import com.logicalclocks.hsfs.engine.FeatureViewEngine;
import com.logicalclocks.hsfs.engine.VectorServer;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.Setter;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@NoArgsConstructor
public class FeatureView {
@Getter
@Setter
@JsonIgnore
private Integer id;
@Getter
@Setter
private String name;
@Getter
@Setter
private Integer version;
@Getter
@Setter
private String description;
@Getter
@Setter
private List<TrainingDatasetFeature> features;
@Getter
@Setter
@JsonIgnore
private FeatureStore featureStore;
@Getter
@Setter
private Query query;
@Getter
@Setter
@JsonIgnore
private List<String> labels;
private static FeatureViewEngine featureViewEngine = new FeatureViewEngine();
private static VectorServer vectorServer = new VectorServer();
public static class FeatureViewBuilder {
private String name;
private Integer version;
private String description;
private FeatureStore featureStore;
private Query query;
private List<String> labels;
public FeatureViewBuilder(FeatureStore featureStore) {
this.featureStore = featureStore;
}
public FeatureViewBuilder name(String name) {
this.name = name;
return this;
}
public FeatureViewBuilder version(Integer version) {
this.version = version;
return this;
}
public FeatureViewBuilder description(String description) {
this.description = description;
return this;
}
public FeatureViewBuilder query(Query query) {
this.query = query;
return this;
}
public FeatureViewBuilder labels(List<String> labels) {
this.labels = labels;
return this;
}
public FeatureView build() throws FeatureStoreException, IOException {
FeatureView featureView = new FeatureView(name, version, query, description, featureStore, labels);
featureViewEngine.save(featureView);
return featureView;
}
}
public FeatureView(@NonNull String name, Integer version, @NonNull Query query, String description,
@NonNull FeatureStore featureStore, List<String> labels) {
this.name = name;
this.version = version;
this.query = query;
this.description = description;
this.featureStore = featureStore;
this.labels = labels != null ? labels.stream().map(String::toLowerCase).collect(Collectors.toList()) : null;
}
public void delete() throws FeatureStoreException, IOException {
featureViewEngine.delete(this.featureStore, this.name, this.version);
}
public FeatureView update(FeatureView other) throws FeatureStoreException, IOException {
return featureViewEngine.update(other);
}
public void initServing() throws FeatureStoreException, IOException, SQLException, ClassNotFoundException {
vectorServer.initServing(this, false);
}
public void initServing(Boolean batch, Boolean external)
throws FeatureStoreException, IOException, SQLException, ClassNotFoundException {
vectorServer.initServing(this, batch, external);
}
@JsonIgnore
public List<Object> getFeatureVector(Map<String, Object> entry)
throws SQLException, FeatureStoreException, IOException, ClassNotFoundException {
return vectorServer.getFeatureVector(this, entry);
}
@JsonIgnore
public List<Object> getFeatureVector(Map<String, Object> entry, boolean external)
throws SQLException, FeatureStoreException, IOException, ClassNotFoundException {
return vectorServer.getFeatureVector(this, entry, external);
}
@JsonIgnore
public List<List<Object>> getFeatureVectors(Map<String, List<Object>> entry)
throws SQLException, FeatureStoreException, IOException, ClassNotFoundException {
return vectorServer.getFeatureVectors(this, entry);
}
@JsonIgnore
public List<List<Object>> getFeatureVectors(Map<String, List<Object>> entry, boolean external)
throws SQLException, FeatureStoreException, IOException, ClassNotFoundException {
return vectorServer.getFeatureVectors(this, entry, external);
}
public List<Object> previewFeatureVector()
throws SQLException, FeatureStoreException, IOException, ClassNotFoundException {
return previewFeatureVectors(1).get(0);
}
public List<Object> previewFeatureVector(boolean external)
throws SQLException, FeatureStoreException, IOException, ClassNotFoundException {
return previewFeatureVectors(1, external).get(0);
}
public List<List<Object>> previewFeatureVectors(Integer n)
throws SQLException, FeatureStoreException, IOException, ClassNotFoundException {
return vectorServer.previewFeatureVectors(this, n);
}
public List<List<Object>> previewFeatureVectors(Integer n, boolean external)
throws SQLException, FeatureStoreException, IOException, ClassNotFoundException {
return vectorServer.previewFeatureVectors(this, external, n);
}
@JsonIgnore
public String getBatchQuery() throws FeatureStoreException, IOException, ParseException {
return getBatchQuery(null, null);
}
@JsonIgnore
public String getBatchQuery(String startTime, String endTime)
throws FeatureStoreException, IOException, ParseException {
return featureViewEngine.getBatchQueryString(
this,
startTime != null ? FeatureGroupUtils.getDateFromDateString(startTime) : null,
endTime != null ? FeatureGroupUtils.getDateFromDateString(endTime) : null);
}
@JsonIgnore
public Dataset<Row> getBatchData(String startTime, String endTime)
throws FeatureStoreException, IOException, ParseException {
return getBatchData(startTime, endTime, Maps.newHashMap());
}
public Dataset<Row> getBatchData(String startTime, String endTime, Map<String, String> readOptions)
throws FeatureStoreException, IOException, ParseException {
return featureViewEngine.getBatchData(
this,
startTime != null ? FeatureGroupUtils.getDateFromDateString(startTime) : null,
endTime != null ? FeatureGroupUtils.getDateFromDateString(endTime) : null,
readOptions);
}
/**
* Add name/value tag to the feature view.
*
* @param name
* name of the tag
* @param value
* value of the tag. The value of a tag can be any valid json - primitives, arrays or json objects
* @throws FeatureStoreException
* @throws IOException
*/
public void addTag(String name, Object value) throws FeatureStoreException, IOException {
featureViewEngine.addTag(this, name, value);
}
/**
* Get all tags of the feature view.
*
* @return a map of tag name and values. The value of a tag can be any valid json - primitives, arrays or json objects
* @throws FeatureStoreException
* @throws IOException
*/
@JsonIgnore
public Map<String, Object> getTags() throws FeatureStoreException, IOException {
return featureViewEngine.getTags(this);
}
/**
* Get a single tag value of the feature view.
*
* @param name
* name of the tag
* @return The value of a tag can be any valid json - primitives, arrays or json objects
* @throws FeatureStoreException
* @throws IOException
*/
@JsonIgnore
public Object getTag(String name) throws FeatureStoreException, IOException {
return featureViewEngine.getTag(this, name);
}
/**
* Delete a tag of the feature view.
*
* @param name
* name of the tag to be deleted
* @throws FeatureStoreException
* @throws IOException
*/
public void deleteTag(String name) throws FeatureStoreException, IOException {
featureViewEngine.deleteTag(this, name);
}
public TrainingDatasetBundle getTrainingDataset(Integer version)
throws IOException, FeatureStoreException, ParseException {
TrainingDataset trainingDataset =
this.featureStore
.createTrainingDataset()
.name("") // name is set in the backend
.version(version)
.build();
return featureViewEngine.getTrainingDataset(this, trainingDataset, Maps.newHashMap());
}
public TrainingDatasetBundle getTrainingDataset(String startTime, String endTime)
throws IOException, FeatureStoreException, ParseException {
TrainingDataset trainingDataset =
this.featureStore
.createTrainingDataset()
.name("") // name is set in the backend
.trainingDatasetType(TrainingDatasetType.IN_MEMORY_TRAINING_DATASET)
.eventStartTime(startTime)
.eventEndTime(endTime)
.build();
return featureViewEngine.getTrainingDataset(this, trainingDataset, Maps.newHashMap());
}
public TrainingDatasetBundle getTrainingDataset(
String startTime, String endTime, Map<String, Float> splits, String trainSplit
) throws IOException, FeatureStoreException, ParseException {
TrainingDataset trainingDataset =
this.featureStore
.createTrainingDataset()
.name("") // name is set in the backend
.trainingDatasetType(TrainingDatasetType.IN_MEMORY_TRAINING_DATASET)
.eventStartTime(startTime)
.eventEndTime(endTime)
.splits(splits != null
? splits.entrySet().stream().map(entry -> new Split(entry.getKey(), entry.getValue())).collect(
Collectors.toList()) : null)
.trainSplit(trainSplit)
.build();
return featureViewEngine.getTrainingDataset(this, trainingDataset, Maps.newHashMap());
}
public TrainingDatasetBundle getTrainingDataset(
Integer version, String startTime, String endTime, String description, Map<String, Float> splits,
String trainSplit, StatisticsConfig statisticsConfig, Map<String, String> readOptions
) throws IOException, FeatureStoreException, ParseException {
TrainingDataset trainingDataset =
this.featureStore
.createTrainingDataset()
.name("") // name is set in the backend
.trainingDatasetType(TrainingDatasetType.IN_MEMORY_TRAINING_DATASET)
.version(version)
.eventStartTime(startTime)
.eventEndTime(endTime)
.description(description)
.splits(splits != null
? splits.entrySet().stream().map(entry -> new Split(entry.getKey(), entry.getValue())).collect(
Collectors.toList()) : null)
.trainSplit(trainSplit)
.statisticsConfig(statisticsConfig)
.build();
return featureViewEngine.getTrainingDataset(this, trainingDataset, readOptions);
}
public TrainingDatasetBundle createTrainingDataset(
String startTime, String endTime, DataFormat dataFormat, StorageConnector storageConnector
) throws IOException, FeatureStoreException, ParseException {
TrainingDataset trainingDataset =
this.featureStore
.createTrainingDataset()
.name("") // name is set in the backend
.eventStartTime(startTime)
.eventEndTime(endTime)
.dataFormat(dataFormat)
.storageConnector(storageConnector)
.build();
return featureViewEngine.createTrainingDataset(this, trainingDataset, Maps.newHashMap());
}
public TrainingDatasetBundle createTrainingDataset(
String startTime, String endTime, DataFormat dataFormat, StorageConnector storageConnector,
Map<String, Float> splits, String trainSplit
) throws IOException, FeatureStoreException, ParseException {
TrainingDataset trainingDataset =
this.featureStore
.createTrainingDataset()
.name("") // name is set in the backend
.eventStartTime(startTime)
.eventEndTime(endTime)
.dataFormat(dataFormat)
.storageConnector(storageConnector)
.splits(splits.entrySet().stream().map(entry -> new Split(entry.getKey(), entry.getValue())).collect(
Collectors.toList()))
.trainSplit(trainSplit)
.build();
return featureViewEngine.createTrainingDataset(this, trainingDataset, Maps.newHashMap());
}
public TrainingDatasetBundle createTrainingDataset(
Integer version, String startTime, String endTime, String description, DataFormat dataFormat,
Boolean coalesce, StorageConnector storageConnector, String location, Map<String, Float> splits,
String trainSplit, Long seed, StatisticsConfig statisticsConfig, Map<String, String> writeOptions
) throws IOException, FeatureStoreException, ParseException {
TrainingDataset trainingDataset =
this.featureStore
.createTrainingDataset()
.name("") // name is set in the backend
.version(version)
.eventStartTime(startTime)
.eventEndTime(endTime)
.description(description)
.dataFormat(dataFormat)
.coalesce(coalesce)
.storageConnector(storageConnector)
.location(location)
.splits(splits != null
? splits.entrySet().stream().map(entry -> new Split(entry.getKey(), entry.getValue())).collect(
Collectors.toList()) : null)
.trainSplit(trainSplit)
.seed(seed)
.statisticsConfig(statisticsConfig)
.build();
return featureViewEngine.createTrainingDataset(this, trainingDataset, writeOptions);
}
public void recreateTrainingDataset(Integer version, Map<String, String> writeOptions)
throws FeatureStoreException, IOException {
featureViewEngine.recreateTrainingDataset(this, version, writeOptions);
}
public void purgeTrainingData(Integer version) throws FeatureStoreException, IOException {
featureViewEngine.deleteTrainingDatasetOnly(this, version);
}
public void purgeAllTrainingData() throws FeatureStoreException, IOException {
featureViewEngine.deleteTrainingDatasetOnly(this);
}
public void deleteTrainingDataset(Integer version) throws FeatureStoreException, IOException {
featureViewEngine.deleteTrainingData(this, version);
}
public void deleteAllTrainingDatasets() throws FeatureStoreException, IOException {
featureViewEngine.deleteTrainingData(this);
}
/**
* Add name/value tag to the training dataset.
*
* @param name
* name of the tag
* @param value
* value of the tag. The value of a tag can be any valid json - primitives, arrays or json objects
* @throws FeatureStoreException
* @throws IOException
*/
public void addTrainingDatasetTag(Integer version, String name, Object value) throws FeatureStoreException,
IOException {
featureViewEngine.addTag(this, name, value, version);
}
/**
* Get all tags of the training dataset.
*
* @return a map of tag name and values. The value of a tag can be any valid json - primitives, arrays or json objects
* @throws FeatureStoreException
* @throws IOException
*/
@JsonIgnore
public Map<String, Object> getTrainingDatasetTags(Integer version) throws FeatureStoreException, IOException {
return featureViewEngine.getTags(this, version);
}
/**
* Get a single tag value of the training dataset.
*
* @param name
* name of the tag
* @return The value of a tag can be any valid json - primitives, arrays or json objects
* @throws FeatureStoreException
* @throws IOException
*/
@JsonIgnore
public Object getTrainingDatasetTag(Integer version, String name) throws FeatureStoreException, IOException {
return featureViewEngine.getTag(this, name, version);
}
/**
* Delete a tag of the training dataset.
*
* @param name
* name of the tag to be deleted
* @throws FeatureStoreException
* @throws IOException
*/
public void deleteTrainingDatasetTag(Integer version, String name) throws FeatureStoreException, IOException {
featureViewEngine.deleteTag(this, name, version);
}
}
|
<gh_stars>1-10
#include <iostream>
#include <unistd.h>
#include <fcntl.h>
#include <sys/wait.h>
#include <string>
using namespace std;
int main(int argc, char* argv[]){
if (argc < 2) {
cout << "number of arguments < 2!" << endl;
return -1;
}
string action = argv[1];
char** nums = new char*[argc];
nums = argv + 1;
if (!(action == "max" or action == "min" or action == "average")) {
cout << "command " << action << " is not defined!" << endl;
return -1;
}
if (fork() == 0) {
string out_path = "./" + action + ".out";
string src_path = action + ".c";
const char* out_p = out_path.data();
const char* src_p = src_path.data();
if (access(out_p, F_OK) == -1) {
cout << "program " << action << " is not exist, trying to compile..." << endl;
if (fork() == 0) {
execl("/usr/bin/gcc", "gcc", src_p, "-o", out_p, NULL);
}
else {
wait(NULL);
cout << "successful" << endl;
}
}
execve(out_p, nums, NULL);
}
else {
wait(NULL);
}
return 0;
}
|
#!/usr/bin/env python
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-0.x/blob/master/LICENSE
import struct
import unittest
import numpy
from awkward0 import *
import awkward0.type
class Test(unittest.TestCase):
def runTest(self):
pass
def test_virtual_nbytes(self):
assert isinstance(VirtualArray(lambda: [1, 2, 3]).nbytes, int)
assert VirtualArray(lambda: [1, 2, 3], nbytes=12345).nbytes == 12345
def test_virtual_nocache(self):
a = VirtualArray(lambda: [1, 2, 3])
assert not a.ismaterialized
assert numpy.array_equal(a[:], numpy.array([1, 2, 3]))
assert a.ismaterialized
a = VirtualArray(lambda: range(10))
assert not a.ismaterialized
assert numpy.array_equal(a[::2], numpy.array([0, 2, 4, 6, 8]))
assert a.ismaterialized
a = VirtualArray(lambda: range(10))
assert not a.ismaterialized
assert numpy.array_equal(a[[5, 3, 6, 0, 6]], numpy.array([5, 3, 6, 0, 6]))
assert a.ismaterialized
a = VirtualArray(lambda: range(10))
assert not a.ismaterialized
assert numpy.array_equal(a[[True, False, True, False, True, False, True, False, True, False]], numpy.array([0, 2, 4, 6, 8]))
assert a.ismaterialized
def test_virtual_transientcache(self):
cache = {}
a = VirtualArray(lambda: [1, 2, 3], cache=cache)
assert not a.ismaterialized
a[:]
assert a.ismaterialized
assert list(cache) == [a.TransientKey(id(a))]
assert list(cache) == [a.key]
assert numpy.array_equal(cache[a.key], numpy.array([1, 2, 3]))
del a
def test_virtual_persistentcache(self):
cache = {}
a = VirtualArray(lambda: [1, 2, 3], cache=cache, persistentkey="<PASSWORD>")
assert not a.ismaterialized
a[:]
assert a.ismaterialized
assert list(cache) == ["find-me-again"]
assert list(cache) == [a.key]
assert numpy.array_equal(cache[a.key], numpy.array([1, 2, 3]))
del a
def test_virtual_dontmaterialize(self):
a = VirtualArray(lambda: [1, 2, 3], type=awkward0.type.fromnumpy(3, int))
assert not a.ismaterialized
assert a.dtype == numpy.dtype(int)
assert a.shape == (3,)
assert len(a) == 3
assert a._array == None
assert not a.ismaterialized
assert numpy.array_equal(a[:], numpy.array([1, 2, 3]))
assert a.ismaterialized
|
#Trying to get the table I want but this produces an array of 6 tables!
doc.css('[id = "template-main-content"]').css("table").size
#I believe this is getting me the table I want
table = doc.css('[id = "content_dgAirportSearch"]')
#This oroduces the second row which is the first content row
table.css("tr")[1]
#This provides the airport ID from the first row
table.css("tr")[1].css("td")[0].text
#produces the airport name
table.css("tr")[1].css("td")[1].text
#produces bad weather support indicator
table.css("tr")[1].css("td")[2].text |
import numpy as np
from datumaro.components.extractor import (Extractor, DatasetItem, Label,
Mask, Bbox, Points, Caption)
from datumaro.components.project import Dataset
from datumaro.components.operations import mean_std, compute_ann_statistics
from unittest import TestCase
class TestOperations(TestCase):
def test_mean_std(self):
expected_mean = [100, 50, 150]
expected_std = [20, 50, 10]
class TestExtractor(Extractor):
def __iter__(self):
return iter([
DatasetItem(id=1, image=np.random.normal(
expected_mean, expected_std,
size=(w, h, 3))
)
for i, (w, h) in enumerate([
(3000, 100), (800, 600), (400, 200), (700, 300)
])
])
actual_mean, actual_std = mean_std(TestExtractor())
for em, am in zip(expected_mean, actual_mean):
self.assertAlmostEqual(em, am, places=0)
for estd, astd in zip(expected_std, actual_std):
self.assertAlmostEqual(estd, astd, places=0)
def test_stats(self):
dataset = Dataset.from_iterable([
DatasetItem(id=1, image=np.ones((5, 5, 3)), annotations=[
Caption('hello'),
Caption('world'),
Label(2, attributes={ 'x': 1, 'y': '2', }),
Bbox(1, 2, 2, 2, label=2, attributes={ 'score': 0.5, }),
Bbox(5, 6, 2, 2, attributes={
'x': 1, 'y': '3', 'occluded': True,
}),
Points([1, 2, 2, 0, 1, 1], label=0),
Mask(label=3, image=np.array([
[0, 0, 1, 1, 1],
[0, 0, 1, 1, 1],
[0, 0, 1, 1, 1],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
])),
]),
DatasetItem(id=2, image=np.ones((2, 4, 3)), annotations=[
Label(2, attributes={ 'x': 2, 'y': '2', }),
Bbox(1, 2, 2, 2, label=3, attributes={ 'score': 0.5, }),
Bbox(5, 6, 2, 2, attributes={
'x': 2, 'y': '3', 'occluded': False,
}),
]),
DatasetItem(id=3),
], categories=['label_%s' % i for i in range(4)])
expected = {
'images count': 3,
'annotations count': 10,
'unannotated images count': 1,
'unannotated images': ['3'],
'annotations by type': {
'label': { 'count': 2, },
'polygon': { 'count': 0, },
'polyline': { 'count': 0, },
'bbox': { 'count': 4, },
'mask': { 'count': 1, },
'points': { 'count': 1, },
'caption': { 'count': 2, },
},
'annotations': {
'labels': {
'count': 6,
'distribution': {
'label_0': [1, 1/6],
'label_1': [0, 0.0],
'label_2': [3, 3/6],
'label_3': [2, 2/6],
},
'attributes': {
'x': {
'count': 2, # unnotations with no label are skipped
'values count': 2,
'values present': ['1', '2'],
'distribution': {
'1': [1, 1/2],
'2': [1, 1/2],
},
},
'y': {
'count': 2, # unnotations with no label are skipped
'values count': 1,
'values present': ['2'],
'distribution': {
'2': [2, 2/2],
},
},
# must not include "special" attributes like "occluded"
}
},
'segments': {
'avg. area': (4 * 2 + 9 * 1) / 3,
'area distribution': [
{'min': 4.0, 'max': 4.5, 'count': 2, 'percent': 2/3},
{'min': 4.5, 'max': 5.0, 'count': 0, 'percent': 0.0},
{'min': 5.0, 'max': 5.5, 'count': 0, 'percent': 0.0},
{'min': 5.5, 'max': 6.0, 'count': 0, 'percent': 0.0},
{'min': 6.0, 'max': 6.5, 'count': 0, 'percent': 0.0},
{'min': 6.5, 'max': 7.0, 'count': 0, 'percent': 0.0},
{'min': 7.0, 'max': 7.5, 'count': 0, 'percent': 0.0},
{'min': 7.5, 'max': 8.0, 'count': 0, 'percent': 0.0},
{'min': 8.0, 'max': 8.5, 'count': 0, 'percent': 0.0},
{'min': 8.5, 'max': 9.0, 'count': 1, 'percent': 1/3},
],
'pixel distribution': {
'label_0': [0, 0.0],
'label_1': [0, 0.0],
'label_2': [4, 4/17],
'label_3': [13, 13/17],
},
}
},
}
actual = compute_ann_statistics(dataset)
self.assertEqual(expected, actual) |
import React, { Component } from 'react'
import { autobind } from 'core-decorators'
import { observer } from 'mobx-react'
import './AddMessage.scss'
@observer
export default class AddMessage extends Component {
@autobind
addMessage(e){
this.props.store.addMessage(this.newMessage.value)
this.newMessage.value = ''
this.newMessage.focus()
e.preventDefault()
return false
}
render() {
return (
<div className="add-message">
<textarea name="addMessage" placeholder="Type your message..." defaultValue="" ref={(ref) => this.newMessage = ref}></textarea>
<button onClick={this.addMessage}>Send</button>
</div>
);
}
}
|
<gh_stars>0
import React, { FunctionComponent, InputHTMLAttributes, TextareaHTMLAttributes, useState } from "react"
import { css } from "@emotion/react"
import { IconType } from "react-icons"
import { backgroundPrimaryColor, primaryColor } from "./variables"
const style = css`
text-align: left;
margin-top: 0.6rem;
margin-bottom: 0.6rem;
label {
display: inline-block;
margin-bottom: 0.5rem;
}
.form-control {
overflow: visible;
display: block;
width: 100%;
padding: 2px 13.5px;
line-height: 1.5;
color: #495057;
background-color: ${backgroundPrimaryColor};
background-clip: padding-box;
border: 1px solid #ced4da;
transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out;
}
.form-control:focus {
color: #495057;
background-color: #fff;
border-color: ${primaryColor};
outline: 0;
box-shadow: 0 0 0 0.2rem rgba(212, 234, 220, 0.25);
}
label {
margin: 0 0 0.25rem 0;
font-size: 0.7rem;
color: #495057;
font-weight: bold;
}
`
const iconStyle = css`
&.active svg {
color: ${primaryColor};
}
svg {
color: #aeabae;
position: absolute;
top: 7px;
left: 10px;
width: 15px;
}
.form-control {
padding-left: 30px;
}
`
export const Input: FunctionComponent<
InputHTMLAttributes<any> & {
Icon?: IconType
label?: string
inputClassName?: string
}
> = ({ Icon, ...props }) => {
if (!props.id) {
console.warn("Please add an id into this component")
}
const [state, setState] = useState("")
return (
<div className={`form-group ${state === "in" ? "active" : ""} ${props.className}`} css={[style, Icon && iconStyle]}>
{props.label && <label htmlFor={props.id}>{props.label}</label>}
<div className="relative">
{Icon && <Icon />}
<input
{...props}
className={`form-control ${props.inputClassName}`}
onFocus={() => setState("in")}
onBlur={() => setState("out")}
/>
</div>
</div>
)
}
export const Textarea: FunctionComponent<
TextareaHTMLAttributes<any> & {
Icon?: IconType
label?: string
}
> = ({ Icon, ...props }) => {
if (!props.id) {
console.warn("Please add an id into this component")
}
const [state, setState] = useState("")
return (
<div className={`form-group ${state === "in" ? "active" : ""}`} css={[style, Icon && iconStyle]}>
{props.label && <label htmlFor={props.id}>{props.label}</label>}
<div className="relative">
{Icon && <Icon />}
<textarea
rows={3}
{...props}
className={`form-control ${props.className}`}
onFocus={() => setState("in")}
onBlur={() => setState("out")}
/>
</div>
</div>
)
}
const checkboxStyle = css`
position: relative;
display: flex;
align-items: center;
margin-top: 0.6rem;
margin-bottom: 0.6rem;
input {
box-sizing: border-box;
padding: 0;
}
.form-check-input,
.form-check-label {
cursor: pointer;
}
.form-check-input {
margin-right: 5px;
}
`
export const Checkbox: FunctionComponent<InputHTMLAttributes<any> & { label: string }> = ({ label, ...props }) => {
return (
<div css={checkboxStyle}>
<input className="form-check-input" type="checkbox" {...props} />
<label className="form-check-label" htmlFor={props.id}>
{label}
</label>
</div>
)
}
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/control/action/ContextSingleActionProvider.java
package io.opensphere.core.control.action;
/**
* Interface for providers of single actions for a context.
*
* @param <T> The context menu key type.
*/
public interface ContextSingleActionProvider<T>
{
/**
* Do the action which this provider performs.
*
* @param contextId the context for the menus.
* @param key Key for which the action will be performed.
* @param x The x position of the mouse event.
* @param y The y position of the mouse event.
*/
void doAction(String contextId, T key, int x, int y);
/**
* Notify this provider that it is no longer valid and will not be called
* when the action associated with the context is executed.
*/
void invalidated();
}
|
#!/bin/bash
# Copyright 2012 Chao Weng
# 2016 Alibaba Robotics Corp. (Author: Xingyu Na)
# Apache 2.0
# This is a shell script, but it's recommended that you run the commands one by
# one by copying and pasting into the shell.
# Caution: some of the graph creation steps use quite a bit of memory, so you
# should run this on a machine that has sufficient memory.
. cmd.sh
# Data Preparation,
local/hkust_data_prep.sh /export/corpora/LDC/LDC2005S15/ /export/corpora/LDC/LDC2005T32/
# Lexicon Preparation,
local/hkust_prepare_dict.sh || exit 1;
# Phone Sets, questions, L compilation
utils/prepare_lang.sh data/local/dict "<UNK>" data/local/lang data/lang
# LM training
local/hkust_train_lms.sh
# G compilation, check LG composition
local/hkust_format_data.sh
# Now make MFCC plus pitch features.
# mfccdir should be some place with a largish disk where you
# want to store MFCC features.
mfccdir=mfcc
for x in train dev; do
steps/make_mfcc_pitch_online.sh --cmd "$train_cmd" --nj 10 data/$x exp/make_mfcc/$x $mfccdir || exit 1;
steps/compute_cmvn_stats.sh data/$x exp/make_mfcc/$x $mfccdir || exit 1;
done
# after this, the next command will remove the small number of utterances
# that couldn't be extracted for some reason (e.g. too short; no such file).
utils/fix_data_dir.sh data/train || exit 1;
utils/subset_data_dir.sh --first data/train 100000 data/train_100k || exit 1;
steps/train_mono.sh --cmd "$train_cmd" --nj 10 \
data/train data/lang exp/mono0a || exit 1;
# Monophone decoding
utils/mkgraph.sh data/lang_test exp/mono0a exp/mono0a/graph || exit 1
steps/decode.sh --cmd "$decode_cmd" --config conf/decode.config --nj 10 \
exp/mono0a/graph data/dev exp/mono0a/decode
# Get alignments from monophone system.
steps/align_si.sh --cmd "$train_cmd" --nj 10 \
data/train data/lang exp/mono0a exp/mono_ali || exit 1;
# train tri1 [first triphone pass]
steps/train_deltas.sh --cmd "$train_cmd" \
2500 20000 data/train data/lang exp/mono_ali exp/tri1 || exit 1;
# decode tri1
utils/mkgraph.sh data/lang_test exp/tri1 exp/tri1/graph || exit 1;
steps/decode.sh --cmd "$decode_cmd" --config conf/decode.config --nj 10 \
exp/tri1/graph data/dev exp/tri1/decode
# align tri1
steps/align_si.sh --cmd "$train_cmd" --nj 10 \
data/train data/lang exp/tri1 exp/tri1_ali || exit 1;
# train tri2 [delta+delta-deltas]
steps/train_deltas.sh --cmd "$train_cmd" \
2500 20000 data/train data/lang exp/tri1_ali exp/tri2 || exit 1;
# decode tri2
utils/mkgraph.sh data/lang_test exp/tri2 exp/tri2/graph
steps/decode.sh --cmd "$decode_cmd" --config conf/decode.config --nj 10 \
exp/tri2/graph data/dev exp/tri2/decode
# train and decode tri2b [LDA+MLLT]
steps/align_si.sh --cmd "$train_cmd" --nj 10 \
data/train data/lang exp/tri2 exp/tri2_ali || exit 1;
steps/align_si.sh --cmd "$train_cmd" --nj 10 \
data/train_100k data/lang exp/tri2 exp/tri2_ali_100k || exit 1;
# Train tri3a, which is LDA+MLLT,
steps/train_lda_mllt.sh --cmd "$train_cmd" \
2500 20000 data/train data/lang exp/tri2_ali exp/tri3a || exit 1;
utils/mkgraph.sh data/lang_test exp/tri3a exp/tri3a/graph || exit 1;
steps/decode.sh --cmd "$decode_cmd" --nj 10 --config conf/decode.config \
exp/tri3a/graph data/dev exp/tri3a/decode
# From now, we start building a more serious system (with SAT), and we'll
# do the alignment with fMLLR.
steps/align_fmllr.sh --cmd "$train_cmd" --nj 10 \
data/train data/lang exp/tri3a exp/tri3a_ali || exit 1;
steps/train_sat.sh --cmd "$train_cmd" \
2500 20000 data/train data/lang exp/tri3a_ali exp/tri4a || exit 1;
utils/mkgraph.sh data/lang_test exp/tri4a exp/tri4a/graph
steps/decode_fmllr.sh --cmd "$decode_cmd" --nj 10 --config conf/decode.config \
exp/tri4a/graph data/dev exp/tri4a/decode
steps/align_fmllr.sh --cmd "$train_cmd" --nj 10 \
data/train data/lang exp/tri4a exp/tri4a_ali
# Building a larger SAT system.
steps/train_sat.sh --cmd "$train_cmd" \
3500 100000 data/train data/lang exp/tri4a_ali exp/tri5a || exit 1;
utils/mkgraph.sh data/lang_test exp/tri5a exp/tri5a/graph || exit 1;
steps/decode_fmllr.sh --cmd "$decode_cmd" --nj 10 --config conf/decode.config \
exp/tri5a/graph data/dev exp/tri5a/decode || exit 1;
steps/align_fmllr.sh --cmd "$train_cmd" --nj 10 \
data/train data/lang exp/tri5a exp/tri5a_ali || exit 1;
# discriminative training
# local/run_discriminative.sh
# SGMM system [sgmm5a]
# local/run_sgmm.sh
# nnet1 dnn
# local/nnet/run_dnn.sh
# online nnet2
local/online/run_nnet2_ms.sh
# online nnet3
local/nnet3/run_tdnn.sh
# online chain
local/chain/run_tdnn.sh
# getting results (see RESULTS file)
for x in exp/*/decode; do [ -d $x ] && grep WER $x/cer_* | utils/best_wer.sh; done 2>/dev/null
exit 0;
|
<filename>examples/website/src/shared/utility.ts
import {useRef, useEffect, useState, useMemo, useCallback} from 'react'
import {createRenderer} from 'fela'
import felaPrefixer from 'fela-plugin-prefixer'
import felaFallbackValue from 'fela-plugin-fallback-value'
import {onlyMobileMediaQuery, desktopMediaQuery, tabletMediaQuery} from './style/helpers'
export const PODCAST_SLUG = 'piepston'
export function useScript(src: string, checkIfLoaded: () => boolean, crossOrigin: boolean = false) {
if (typeof window != 'object') return {isLoaded: false, isLoading: false, load: () => {}}
const scriptRef = useRef<HTMLScriptElement | null>(null)
const [isLoading, setLoading] = useState(false)
const [isLoaded, setLoaded] = useState(() => checkIfLoaded())
useEffect(() => {
if (isLoading && !isLoaded && !scriptRef.current) {
const script = document.createElement('script')
script.src = src
script.async = true
script.defer = true
script.onload = () => setLoaded(true)
script.crossOrigin = crossOrigin ? 'anonymous' : null
document.head.appendChild(script)
scriptRef.current = script
}
}, [isLoading])
const load = useCallback(() => {
setLoading(true)
}, [])
return useMemo(
() => ({
isLoading,
isLoaded,
load
}),
[isLoading, isLoaded, load]
)
}
// TODO: Move into Component which can auto update
export function getHumanReadableTimePassed(date: Date): string {
const now = new Date()
const diff = now.getTime() - date.getTime()
const diffInDHMS = convertMSToDHMS(diff)
if (diffInDHMS.day > 3) {
const FormattedDate = Intl.DateTimeFormat('de-CH', {
year: '2-digit',
month: '2-digit',
day: '2-digit'
}).format(date)
const FormattedTime = Intl.DateTimeFormat('de-CH', {
hour: '2-digit',
minute: '2-digit'
}).format(date)
return `${FormattedDate}, ${FormattedTime}`
}
if (diffInDHMS.day === 1) {
return `vor ${diffInDHMS.day} Tag`
}
if (diffInDHMS.day > 1) {
return `vor ${diffInDHMS.day} Tagen`
}
if (diffInDHMS.hour === 1) {
return `vor ${diffInDHMS.hour} Stunde`
}
if (diffInDHMS.hour > 1) {
return `vor ${diffInDHMS.hour} Stunden`
}
return `vor ${diffInDHMS.minute} Minuten`
}
export function convertMSToDHMS(milliseconds: number) {
let seconds = Math.floor(milliseconds / 1000)
let minute = Math.floor(seconds / 60)
seconds = seconds % 60
let hour = Math.floor(minute / 60)
minute = minute % 60
let day = Math.floor(hour / 24)
hour = hour % 24
return {
day: day,
hour: hour,
minute: minute,
seconds: seconds
}
}
export function createStyleRenderer() {
return createRenderer({
devMode: process.env.NODE_ENV !== 'production',
mediaQueryOrder: [onlyMobileMediaQuery, tabletMediaQuery, desktopMediaQuery],
plugins: [felaPrefixer(), felaFallbackValue()]
})
}
export function transformCssStringToObject(styleCustom: string): object {
const styleRules = styleCustom.split(';')
if (styleRules.length === 0) return {}
return styleRules.reduce((previousValue: object, currentValue: string) => {
const [key, value] = currentValue.split(':')
if (key && value) {
return Object.assign(previousValue, {[key.trim()]: value.trim()})
}
return previousValue
}, {})
}
|
<reponame>nibles1998/Doan_TN
const Sequelize = require('sequelize')
const Model = Sequelize.Model
class TypeTour extends Model {
constructor(...args) {
super(...args)
}
};
const attrs = {
id: {
primaryKey: true,
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
validate: {
isUUID: 4
}
},
type: {
type: Sequelize.STRING,
allowNull: false,
validate: {
isAlpha: true,
notEmpty: true
}
}
}
const options = {}
module.exports = {
init: async (instanceDB) => {
TypeTour.init(attrs, { ...options, sequelize: instanceDB });
await TypeTour.sync()
.then(async () => {
const typeDiscovery = await TypeTour.findAll({ where: { type: "Discovery" } });
if (typeDiscovery.length === 0) {
await TypeTour.create({ type: "Discovery" });
}
const typeHoneyMoon = await TypeTour.findAll({ where: { type: "HoneyMoon" } });
if (typeHoneyMoon.length === 0) {
await TypeTour.create({ type: "HoneyMoon" });
}
});
},
model: TypeTour,
type: "postgresql"
} |
#!/bin/bash -
# Usage: ./jib-maven-plugin/scripts/prepare_release.sh <release version> [<post-release version>]
set -o errexit
EchoRed() {
echo "$(tput setaf 1; tput bold)$1$(tput sgr0)"
}
EchoGreen() {
echo "$(tput setaf 2; tput bold)$1$(tput sgr0)"
}
Die() {
EchoRed "$1"
exit 1
}
DieUsage() {
Die "Usage: ./jib-maven-plugin/scripts/prepare_release.sh <release version> [<post-release version>]"
}
# Usage: CheckVersion <version>
CheckVersion() {
[[ $1 =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z]+)?$ ]] || Die "Version: $1 not in ###.###.###[-XXX] format."
}
[ $# -ne 1 ] && [ $# -ne 2 ] && DieUsage
EchoGreen '===== RELEASE SETUP SCRIPT ====='
VERSION=$1
CheckVersion ${VERSION}
if [ -n "$2" ]; then
POST_RELEASE_VERSION=$2
CheckVersion ${POST_RELEASE_VERSION}
fi
if [[ $(git status -uno --porcelain) ]]; then
Die 'There are uncommitted changes.'
fi
# Runs checks integration tests.
./gradlew jib-maven-plugin:check jib-maven-plugin:integrationTest --info --stacktrace
# Checks out a new branch for this version release (eg. 1.5.7).
BRANCH=maven_release_v${VERSION}
git checkout -b ${BRANCH}
# Changes the version for release and creates the commits/tags.
echo | ./gradlew jib-maven-plugin:release -Prelease.releaseVersion=${VERSION} ${POST_RELEASE_VERSION:+"-Prelease.newVersion=${POST_RELEASE_VERSION}"}
# Pushes the release branch and tag to Github.
git push origin ${BRANCH}
git push origin v${VERSION}-maven
# File a PR on Github for the new branch. Have someone LGTM it, which gives you permission to continue.
EchoGreen 'File a PR for the new release branch:'
echo https://github.com/GoogleContainerTools/jib/pull/new/${BRANCH}
EchoGreen "Merge the PR after the plugin is released."
EchoGreen "Run './jib-maven-plugin/scripts/update_gcs_latest.sh ${VERSION}' when the release is complete to update the latest version string on GCS."
|
<reponame>babushka1999/Marinus<filename>web_server/routes/admin.js
'use strict';
/**
* Copyright 2018 Adobe. All rights reserved.
* This file is licensed to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
* OF ANY KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
const express = require('express');
const router = express.Router();
const crypto = require('crypto');
const htmlEscape = require('secure-filters').html;
const user = require('../config/models/user');
const group = require('../config/models/group');
const zone = require('../config/models/zone');
const ipZone = require('../config/models/ip_zone');
const ipv6Zone = require('../config/models/ipv6_zone');
const jobs = require('../config/models/jobs');
const marinusConfig = require('../config/models/config.js')
const statusValues = ['confirmed', 'unconfirmed', 'false_positive', 'expired'];
/**
* Generates a random string of hex characters that is len characters long.
* It will have the security of a len/2 password
*
* @param {Number} len The length of the API key to generate.
* @return {String} A string representing len # of random bytes.
*/
function createAPIKey(len) {
return crypto.randomBytes(Math.ceil(len / 2))
.toString('hex') // convert to hexadecimal format
.slice(0, len); // return required number of characters
}
/**
* Validates the user identified in the request session is an admin.
* Ends the request and returns an error if they are not.
* @param {Object} req The Express request object
* @param {Object} res The Express response object
*/
function checkAdmin(req, res) {
if (req.session && ((req.session.groups === undefined) ||
(req.session.groups.indexOf('admin') === -1))) {
res.status(401).json({
'message': 'You do not appear to be an admin',
});
} else if (!req.session) {
res.status(401).json({
'message': 'You do not appear to have a session',
});
}
}
/**
* Validates the user identified in the request session is a data admin.
* Ends the request and returns an error if they are not.
* @param {*} req The Express request object
* @param {*} res The Express response object
*/
function checkDataAdmin(req, res) {
if (req.session &&
(req.session.groups === undefined) ||
((req.session.groups.indexOf('admin') === -1) &&
(req.session.groups.indexOf('data_admin') === -1))) {
res.status(401).json({
'message': 'You do not appear to have modify permissions',
});
} else if (!req.session) {
res.status(401).json({
'message': 'You do not appear to have a session',
});
}
}
/**
* @swagger
*
* definitions:
* UserRecord:
* type: object
* properties:
* userid:
* type: string
* example: "marinus"
* creation_date:
* type: string
* example: 2016-06-22T02:08:46.893Z
* updated:
* type: string
* example: 2016-06-22T02:08:46.893Z
* apiKey:
* type: string
* description: The apiKey for the user which is a 32 byte random string
* status:
* type: string
* example: "active"
* description: Whether the user account is currently active
*
* GroupRecord:
* type: object
* properties:
* name:
* type: string
* example: "admin"
* description: The name of the group
* creation_date:
* type: string
* example: 2016-06-22T02:08:46.893Z
* updated:
* type: string
* example: 2016-06-22T02:08:46.893Z
* admins:
* type: array
* description: The array of people allowed to modify the group
* items:
* type: string
* example: admin
* status:
* type: string
* example: "active"
* description: Whether the user account is currently active
* members:
* type: array
* description: The array of people within the group
* items:
* type: string
* example: admin
*
* JobRecord:
* type: object
* properties:
* job_name:
* type: string
* example: "common_crawl_graph"
* description: The name of the job
* updated:
* type: string
* example: 2016-06-22T02:08:46.893Z
* status:
* type: string
* example: "COMPLETED"
* description: The status of the job
*
* ConfigRecord:
* type: object
* properties:
* updated:
* type: string
* example: 2016-06-22T02:08:46.893Z
* DNS_Admins:
* description: An array of the DNS Admins.
* type: array
* items:
* type: string
* example: "<EMAIL>"
* SSL_Orgs:
* description: An array of the TLS Organizations
* type: array
* items:
* type: string
* example: "Acme, Inc."
* Whois_Orgs:
* description: An array of the Whois Organizations
* type: array
* items:
* type: string
* example: "Acme, Inc."
*/
module.exports = function(envConfig) {
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Retrieve your own user record
* description: Retrieve the user record of the current user
*
* /api/v1.0/admin/self:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: Retrieves the information of the currently logged in user. Admin privileges not required.
* tags: [Admin - Retrieve your own user record]
* produces:
* - application/json
* responses:
* 200:
* description: An individual user record for the current user.
* type: object
* schema:
* $ref: '#/definitions/UserRecord'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/self')
.get(function(req, res) {
let userPromise;
if (typeof req.session.userid !== 'undefined') {
userPromise = user.getUserIdPromise(req.session.userid, true);
} else {
userPromise = user.getUserIdPromise(req.session.passport.user.userid, true);
}
userPromise.then(
function(userInDB) {
if (!userInDB) {
res.status(404).json({
'message': 'User not found!',
});
return;
}
res.status(200).json(userInDB);
return;
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Retrieve your own group
* description: Retrieve the group data of the current user
*
* /api/v1.0/admin/self_group:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: Retrieves the group information of the currently logged in user. Admin privileges not required.
* tags: [Admin - Retrieve your own group]
* produces:
* - application/json
* responses:
* 200:
* description: An individual user record for the current user.
* type: object
* schema:
* $ref: '#/definitions/GroupRecord'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/self_group')
.get(function(req, res) {
let groupPromise;
if (typeof req.session.userid !== 'undefined') {
groupPromise = group.getGroupsByUserPromise(req.session.userid, true);
} else {
groupPromise = group.getGroupsByUserPromise(req.session.passport.user.userid, true);
}
groupPromise.then(
function(groups) {
if (!groups) {
res.status(404).json({
'message': 'Group not found!',
});
return;
}
res.status(200).json(groups);
return;
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Retrieve a given user record
* description: "[Admin-only] Retrieve the user record of the provided user."
*
* /api/v1.0/admin/users/{userid}:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Retrieve the user record of the provided active user. This does not retrieve inactive users. You must be an admin to use this API."
* tags: [Admin - Retrieve a given user record]
* produces:
* - application/json
* parameters:
* - name: userid
* type: string
* required: true
* description: The userid to find in the database.
* in: path
* responses:
* 200:
* description: An individual user record for the current user.
* type: object
* schema:
* $ref: '#/definitions/UserRecord'
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/users/:userid')
.get(function(req, res) {
checkAdmin(req, res);
if (!(req.params.hasOwnProperty('userid'))) {
res.status(400).json({'message': 'A userid must be provided!'});
return;
}
let userPromise = user.getUserIdPromise(req.params.userid, false);
userPromise.then(
function(userInDB) {
if (!userInDB) {
res.status(404).json({
'message': 'User not found!',
});
return;
}
res.status(200).json(userInDB);
return;
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Retrieve a list of current users
* description: "[Admin-only] Retrieve the list of user records."
* - name: Admin - Add a new user
* description: "[Admin-only] Adds a new user to the database."
*
* /api/v1.0/admin/users:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Retrieves the list of users. This will not return their apiKeys in the response. You must be an admin to use this API."
* tags: [Admin - Retrieve a list of current users]
* produces:
* - application/json
* parameters:
* - name: active
* type: string
* required: false
* description: Whether to retrieve only active users
* in: query
* responses:
* 200:
* description: An array of current users without their apiKeys.
* type: array
* items:
* $ref: '#/definitions/UserRecord'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
* post:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Adds a new user to Marinus. The user will automatically be set to active and have an apiKey created."
* tags: [Admin - Add a new user]
* produces:
* - application/json
* parameters:
* - name: userid
* type: string
* required: true
* description: The userid to create in the database.
* in: body
* responses:
* 201:
* description: A message indicating whether the addition succeeded.
* type: object
* properties:
* message:
* type: string
* example: "User created!"
* description: A status message indicating success or failure
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/users')
.get(function(req, res) {
checkAdmin(req, res);
let activeOnly = true;
if (req.query.hasOwnProperty('active')) {
if (req.query.active === 'false') {
activeOnly = false;
}
}
let userPromise = user.getUserListPromise(activeOnly);
userPromise.then(
function(userInDB) {
if (!userInDB) {
res.status(404).json({
'message': 'Users not found!',
});
return;
}
res.status(200).json(userInDB);
return;
});
})
.post(function(req, res) {
checkAdmin(req, res);
if (!('userid' in req.body)) {
res.status(400).json({'message': 'A userid must be provided!'});
return;
}
let userPromise = user.getUserIdPromise(req.body.userid);
userPromise.then(
function(userInDB) {
if (!userInDB) {
let newUser = new user.UserModel();
newUser.userid = req.body.userid.replace(/ /g, '');
newUser.apiKey = createAPIKey(envConfig.api_key_length);
newUser.creation_date = Date.now();
newUser.updated = Date.now();
newUser.status = 'active';
// save the user and check for errors
newUser.save(function(err) {
if (err) {
res.status(500).send(err);
return;
}
res.status(201).json({
message: 'User created!',
});
});
} else {
res.status(400).json({
message: 'User ' + htmlEscape(userInDB.userid) + ' already exists!',
});
}
}).catch(function(errorMsg) {
res.status(500).json({
'message': errorMsg.toString(),
});
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Retrieve a list of current groups
* description: "[Admin-only] Retrieve the list of group records."
* - name: Admin - Add a new group
* description: "[Admin-only] Adds a new group to the database."
*
* /api/v1.0/admin/groups:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Retrieves the list of groups. You must be an admin to use this API."
* tags: [Admin - Retrieve a list of current groups]
* produces:
* - application/json
* responses:
* 200:
* description: The list of current groups.
* type: array
* items:
* $ref: '#/definitions/GroupRecord'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
* post:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Adds a new group to Marinus."
* tags: [Admin - Add a new group]
* produces:
* - application/json
* parameters:
* - name: name
* type: string
* required: true
* description: The name of the new group to create in the database.
* in: body
* responses:
* 201:
* description: A message indicating whether the group addition succeeded.
* type: object
* properties:
* message:
* type: string
* example: "Group created!"
* description: A status message indicating success or failure
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/groups')
.get(function(req, res) {
checkAdmin(req, res);
let groupPromise = group.getAllGroups();
groupPromise.then(function(groups) {
if (!groups) {
res.status(404).json({
'message': 'Error in lookup!',
});
return;
}
res.json(groups);
return;
});
})
.post(function(req, res) {
checkAdmin(req, res);
if (!('name' in req.body)) {
res.status(400).json({'message': 'A name must be provided!'});
return;
}
let groupPromise = group.getGroupByNamePromise(req.body.name);
groupPromise.then(function(groupInDB) {
if (!groupInDB) {
let newGroup = new group.GroupModel();
if (req.session.passport && req.session.passport.user.userid) {
newGroup.creator = req.session.passport.user.userid;
} else if (req.session.userid) {
newGroup.creator = req.session.userid;
}
newGroup.groupID = 1;
newGroup.name = req.body.name.replace(/ /g, '');
newGroup.admins = [newGroup.creator];
newGroup.status = 'active';
newGroup.members = [newGroup.creator];
newGroup.creation_date = Date.now();
newGroup.updated = Date.now();
// save the domain and check for errors
newGroup.save(function(err) {
if (err) {
res.status(500).send(err);
return;
}
res.status(201).json({
message: 'Group created!',
});
});
} else {
res.status(500).json({
'message': 'Group ' + htmlEscape(groupInDB.name) + ' already exists!',
});
}
}).catch(function(errorMsg) {
res.status(500).json({
'message': errorMsg.toString(),
});
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Add a user to a group record
* description: "[Admin-only] Add the user to the provided group."
*
* /api/v1.0/admin/groups/{group}:
* patch:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Add a new member to a group record."
* tags: [Admin - Add a user to a group record]
* produces:
* - application/json
* parameters:
* - name: group
* type: string
* required: true
* description: The group to modify in the database.
* in: path
* - name: member
* type: string
* required: true
* description: The new member to add to the group.
* in: body
* responses:
* 201:
* description: A message indicating whether the group addition succeeded.
* type: object
* properties:
* message:
* type: string
* example: "Group updated!"
* description: A status message indicating success or failure
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/groups/:group')
.patch(function(req, res) {
checkAdmin(req, res);
if (!(req.params.hasOwnProperty('group'))) {
res.status(400).json({'message': 'A group must be provided!'});
return;
}
if (!('member' in req.body) ||
req.body.member.length === 0) {
res.status(400).json({'message': 'A member must be provided!'});
return;
}
let groupPromise = group.getGroupByNamePromise(req.params.group);
groupPromise.then(function(group) {
if (!group) {
res.status(404).json({
'message': 'Group not found!',
});
return;
}
if ('member' in req.body) {
group['members'].push(req.body.member.replace(/ /g, ''));
}
group.updated = Date.now();
group.save(function(err) {
if (err) {
res.status(500).send(err);
return;
}
res.status(201).json({
message: 'Group updated!',
});
});
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Add a zone to Marinus
* description: "[Admin-only] Add a new zone to Marinus"
*
* /api/v1.0/admin/zones:
* post:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Manually add a new zone for Marinus to track."
* tags: [Admin - Add a zone to Marinus]
* produces:
* - application/json
* parameters:
* - name: zone
* type: string
* required: true
* description: The new zone to add to Marinus.
* example: "example.org"
* in: body
* responses:
* 201:
* description: A message indicating whether the zone addition succeeded.
* type: object
* properties:
* message:
* type: string
* example: "Zone created!"
* description: A status message indicating success or failure
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/zones')
.post(function(req, res) {
checkAdmin(req, res);
if (!('zone' in req.body) ||
req.body.zone.length === 0) {
res.status(400).json({'message': 'A zone must be provided!'});
return;
}
let zonePromise = zone.getZoneByNamePromise(req.body.zone);
zonePromise.then(function(zoneInDB) {
if (!zoneInDB) {
let newZone = new zone.ZoneModel();
newZone.updated = Date.now();
newZone.created = Date.now();
newZone.status = 'confirmed';
newZone.source = 'manual';
newZone.zone = req.body.zone;
newZone.save(function(err) {
if (err) {
res.status(500);
res.send(err);
return;
}
res.status(201);
res.json({
message: 'Zone created!',
});
});
} else {
res.status(500).json({
'message': 'Zone ' + htmlEscape(zoneInDB.zone) + ' already exists!',
});
}
}).catch(function (errorMsg) {
res.status(500).json({
'message': errorMsg.toString(),
});
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Modify a zone in Marinus
* description: "[DataAdmin-only] Modify an existing zone in Marinus"
*
* /api/v1.0/admin/zones/{zone}:
* patch:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[DataAdmin-only] Change the notes or status of a zone in Marinus. You must be a DataAdmin to make this change."
* tags: [Admin - Modify a zone in Marinus]
* produces:
* - application/json
* parameters:
* - name: zone
* type: string
* required: true
* description: The zone to modify in Marinus
* example: "example.org"
* in: body
* - name: notes
* type: string
* required: false
* description: A short string to add to the collection of notes for the zone.
* example: "This zone belongs a new acquisition"
* in: body
* - name: status
* type: string
* required: false
* description: "The status for the zone. Must be either 'confirmed', 'unconfirmed', 'expired', or 'false_positive'."
* example: "false_positive"
* in: body
* responses:
* 201:
* description: A message indicating whether the zone modification succeeded.
* type: object
* properties:
* message:
* type: string
* example: "Zone updated!"
* description: A status message indicating success or failure
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/zones/:zone')
.patch(function(req, res) {
checkDataAdmin(req, res);
if (!(req.params.hasOwnProperty('zone'))) {
res.status(400).json({'message': 'A zone must be provided!'});
return;
}
let zonePromise = zone.getZoneByIdPromise(req.params.zone);
zonePromise.then(function(zoneInDB) {
if (zoneInDB) {
if ('notes' in req.body) {
zoneInDB['notes'].push(req.body.notes);
}
if ('status' in req.body) {
if (statusValues.indexOf(req.body.status) === -1) {
res.status(400).json({
'message': 'A bad status_value was provided.',
});
return;
}
zoneInDB['status'] = req.body.status;
}
zoneInDB.updated = Date.now();
zoneInDB.save(function(err) {
if (err) {
res.status(500).send(err);
return;
}
res.status(201).json({
message: 'Zone updated!',
});
});
} else {
res.status(500).json({
'message': 'Error updating ' + htmlEscape(zoneInDB.zone),
});
}
}).catch(function(errorMsg) {
res.status(500).json({
'message': errorMsg.toString(),
});
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Add an IPv4 zone to Marinus
* description: "[Admin-only] Add a new IPv4 zone to Marinus"
*
* /api/v1.0/admin/ip_zones:
* post:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Manually add a new IPv4 zone for Marinus to track."
* tags: [Admin - Add an IPv4 zone to Marinus]
* produces:
* - application/json
* parameters:
* - name: zone
* type: string
* required: true
* description: The new IPv4 zone to add to Marinus.
* example: "example.org"
* in: body
* responses:
* 201:
* description: A message indicating whether the IPv4 zone addition succeeded.
* type: object
* properties:
* message:
* type: string
* example: "IPv4 zone created!"
* description: A status message indicating success or failure
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/ip_zones')
.post(function(req, res) {
checkAdmin(req, res);
if (!('zone' in req.body)) {
res.status(400).json({'message': 'An IPv4 zone must be provided!'});
return;
}
let ipv4 = /^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/([0-9]|[1-2][0-9]|3[0-2]))$/;
if (!(req.body.zone.match(ipv4))) {
res.status(400).json({
'message': 'An invalid IPv4 zone has been provided',
});
return;
}
let zonePromise = ipZone.getZoneByNamePromise(req.body.zone);
zonePromise.then(function(zoneInDB) {
if (!zoneInDB) {
let newZone = new ipZone.IpZoneModel();
newZone.updated = Date.now();
newZone.created = Date.now();
newZone.status = 'confirmed';
newZone.source = 'manual';
newZone.zone = req.body.zone;
newZone.notes = [];
newZone.save(function(err) {
if (err) {
res.status(500).send(err);
return;
}
res.status(201).json({
message: 'IPv4 zone created!',
});
});
} else {
res.status(500).json({
'message': 'IPv4 Zone ' + htmlEscape(zoneInDB.zone) + ' already exists!',
});
}
}).catch(function(errorMsg) {
res.status(500).json({
'message': errorMsg.toString(),
});
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Modify an IPv4 zone in Marinus
* description: "[DataAdmin-only] Modify an existing IPv4 zone in Marinus"
*
* /api/v1.0/admin/ip_zones/{zone}:
* patch:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[DataAdmin-only] Change the notes or status of an IPv4 zone in Marinus. You must be a DataAdmin to make this change."
* tags: [Admin - Modify an IPv4 zone in Marinus]
* produces:
* - application/json
* parameters:
* - name: zone
* type: string
* required: true
* description: The IPv4 zone to modify in Marinus
* example: "12.34.56.78"
* in: body
* - name: notes
* type: string
* required: false
* description: A short string to add to the collection of notes for the IPv4 zone.
* example: "This zone represents the Oregon data center."
* in: body
* - name: status
* type: string
* required: false
* description: "The status for the IPv4 zone. Must be either 'confirmed', 'unconfirmed', 'expired', or 'false_positive'."
* example: "false_positive"
* in: body
* responses:
* 201:
* description: A message indicating whether the IPv4 zone modification succeeded.
* type: object
* properties:
* message:
* type: string
* example: "IPv4 zone updated!"
* description: A status message indicating success or failure
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/ip_zones/:id')
.patch(function(req, res) {
checkDataAdmin(req, res);
if (!(req.params.hasOwnProperty('id'))) {
res.status(400).json({'message': 'An IPv4 CIDR must be provided!'});
return;
}
let zonePromise = ipZone.getZoneByIdPromise(req.params.id);
zonePromise.then(function(zoneInDB) {
if (zoneInDB) {
if ('notes' in req.body) {
zoneInDB['notes'].push(req.body.notes);
}
if ('status' in req.body) {
if (statusValues.indexOf(req.body.status) === -1) {
res.status(400).json({
'message': 'A bad status_value was provided.',
});
return;
}
zoneInDB['status'] = req.body.status;
}
zoneInDB.updated = Date.now();
zoneInDB.save(function(err) {
if (err) {
res.status(500).send(err);
return;
}
res.status(201).json({
message: 'IPv4 zone updated!',
});
});
} else {
res.status(500).json({
'message': 'Error updating ' + htmlEscape(zoneInDB.zone),
});
}
}).catch(function(errorMsg) {
res.status(500).json({
'message': errorMsg.toString(),
});
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Add an IPv6 zone to Marinus
* description: "[Admin-only] Add a new IPv6 zone to Marinus"
*
* /api/v1.0/admin/ipv6_zones:
* post:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Manually add a new IPv6 zone for Marinus to track."
* tags: [Admin - Add an IPv6 zone to Marinus]
* produces:
* - application/json
* parameters:
* - name: zone
* type: string
* required: true
* description: The new IPv6 zone to add to Marinus.
* example: "example.org"
* in: body
* responses:
* 201:
* description: A message indicating whether the IPv6 zone addition succeeded.
* type: object
* properties:
* message:
* type: string
* example: "IPv6 zone created!"
* description: A status message indicating success or failure
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/ipv6_zones')
.post(function(req, res) {
checkAdmin(req, res);
if (!('zone' in req.body)) {
res.status(400).json({'message': 'An IPv6 zone must be provided!'});
return;
}
let ipv6 = /^[0-9a-zA-z\:]+(\/([0-9]|[1-5][0-9]|6[0-4]))$/;
if (!(req.body.zone.match(ipv6))) {
res.status(400).json({
'message': 'An invalid IPv6 zone has been provided',
});
return;
}
let zonePromise = ipv6Zone.getZoneByNamePromise(req.body.zone);
zonePromise.then(function(zoneInDB) {
if (!zoneInDB) {
let newZone = new ipv6Zone.Ipv6ZoneModel();
newZone.updated = Date.now();
newZone.created = Date.now();
newZone.status = 'confirmed';
newZone.source = 'manual';
newZone.zone = req.body.zone;
newZone.notes = [];
newZone.save(function(err) {
if (err) {
res.status(500).send(err);
return;
}
res.status(201).json({
message: 'IPv6 zone created!',
});
});
} else {
res.status(500).json({
'message': 'IPv6 Zone ' + htmlEscape(zoneInDB.zone) + ' already exists!',
});
}
}).catch(function(errorMsg) {
res.status(500).json({
'message': errorMsg.toString(),
});
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Modify an IPv6 zone in Marinus
* description: "[DataAdmin-only] Modify an existing IPv6 zone in Marinus"
*
* /api/v1.0/admin/ipv6_zones/{zone}:
* patch:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[DataAdmin-only] Change the notes or status of an IPv6 zone in Marinus. You must be a DataAdmin to make this change."
* tags: [Admin - Modify an IPv6 zone in Marinus]
* produces:
* - application/json
* parameters:
* - name: zone
* type: string
* required: true
* description: The IPv6 zone to modify in Marinus
* example: "12.34.56.78"
* in: body
* - name: notes
* type: string
* required: false
* description: A short string to add to the collection of notes for the IPv6 zone.
* example: "This zone represents the Oregon data center."
* in: body
* - name: status
* type: string
* required: false
* description: "The status for the IPv6 zone. Must be either 'confirmed', 'unconfirmed', 'expired', or 'false_positive'."
* example: "false_positive"
* in: body
* responses:
* 201:
* description: A message indicating whether the IPv6 zone modification succeeded.
* type: object
* properties:
* message:
* type: string
* example: "IPv6 zone updated!"
* description: A status message indicating success or failure
* 400:
* description: Bad request parameters.
* schema:
* $ref: '#/definitions/BadInputError'
* 404:
* description: Results not found.
* schema:
* $ref: '#/definitions/ResultsNotFound'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/ipv6_zones/:id')
.patch(function(req, res) {
checkDataAdmin(req, res);
if (!(req.params.hasOwnProperty('id'))) {
res.status(400).json({'message': 'An IPv6 CIDR must be provided!'});
return;
}
let zonePromise = ipv6Zone.getZoneByIdPromise(req.params.id);
zonePromise.then(function(zoneInDB) {
if (zoneInDB) {
if ('notes' in req.body) {
zoneInDB['notes'].push(req.body.notes);
}
if ('status' in req.body) {
if (statusValues.indexOf(req.body.status) === -1) {
res.status(400).json({
'message': 'A bad status_value was provided.',
});
return;
}
zoneInDB['status'] = req.body.status;
}
zoneInDB.updated = Date.now();
zoneInDB.save(function(err) {
if (err) {
res.status(500).send(err);
return;
}
res.status(201).json({
message: 'IPv6 zone updated!',
});
});
} else {
res.status(500).json({
'message': 'Error updating ' + htmlEscape(zoneInDB.zone),
});
}
}).catch(function(errorMsg) {
res.status(500).json({
'message': errorMsg.toString(),
});
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Retrieve the list of jobs and their status
* description: "[Admin-only] Retrieve the list of Python cron jobs and their current status"
*
* /api/v1.0/admin/job_status:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Retrieve the list of Python cron jobs and their current status"
* tags: [Admin - Retrieve the list of jobs and their status]
* produces:
* - application/json
* responses:
* 200:
* description: An individual user record for the current user.
* type: array
* items:
* $ref: '#/definitions/JobRecord'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/job_status')
.get(function(req, res) {
checkAdmin(req, res);
let jobsPromise = jobs.getAllJobsPromise();
jobsPromise.then(function(jobStatus) {
if (!jobStatus) {
res.status(500).json({
'message': 'Unable to retrieve job status!',
});
return;
}
res.status(200).json(jobStatus);
return;
});
});
/**
* @swagger
*
* security:
* - APIKeyHeader: []
*
* tags:
* - name: Admin - Retrieve the DNS Admins from the Marinus configuration
* description: Retrieve the DNS Admin configuration information from Marinus
* - name: Admin - Retrieve the TLS Orgs from the Marinus configuration
* description: Retrieve the list of TLS Organizations from the Marinus configuration
* - name: Admin - Retrieve the Whois Orgs from the Marinus configuration
* description: Retrieve the list of Whois Organizations from the Marinus configuration
* - name: Admin - Retrieve the complete Marinus configuration. Admins only.
* description: "[Admin-only] Retrieve the complete configuration information from Marinus"
*
* /api/v1.0/admin/config?field=DNS_Admins:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Retrieve the list of Python cron jobs and their current status"
* tags: [Admin - Retrieve the DNS Admins from the Marinus configuration]
* produces:
* - application/json
* parameters:
* - name: field
* type: string
* required: true
* description: Retrieve the list of DNS_Admins from the configuration.
* example: DNS_Admins
* in: query
* responses:
* 200:
* description: An array of the DNS Admins.
* type: array
* items:
* type: string
* example: "<EMAIL>"
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*
* /api/v1.0/admin/config?field=SSL_Orgs:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "Retrieve the list of TLS Organizations from the Marinus configuration"
* tags: [Admin - Retrieve the TLS Orgs from the Marinus configuration]
* produces:
* - application/json
* parameters:
* - name: field
* type: string
* required: true
* description: Retrieve the list of SSL_Orgs from the configuration.
* example: SSL_Orgs
* in: query
* responses:
* 200:
* description: The array of configured TLS Organizations in Marinus.
* type: array
* items:
* type: string
* example: "Acme, Inc."
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*
* /api/v1.0/admin/config?field=Whois_Orgs:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "Retrieve the list of Whois Organizations from the Marinus configuration"
* tags: [Admin - Retrieve the Whois Orgs from the Marinus configuration]
* produces:
* - application/json
* parameters:
* - name: field
* type: string
* required: true
* description: Retrieve the list of Whois_Orgs from the configuration.
* example: Whois_Orgs
* in: query
* responses:
* 200:
* description: The list of configured Whois Organizations in Marinus.
* type: array
* items:
* type: string
* example: "Acme, Inc."
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*
* /api/v1.0/admin/config:
* get:
* # Operation-specific security:
* security:
* - APIKeyHeader: []
* description: "[Admin-only] Retrieve the complete Marinus configuration"
* tags: [Admin - Retrieve the complete Marinus configuration. Admins only.]
* produces:
* - application/json
* responses:
* 200:
* description: The complete configuration record.
* schema:
* $ref: '#/definitions/ConfigRecord'
* 500:
* description: Server error.
* schema:
* $ref: '#/definitions/ServerError'
*/
router.route('/admin/config')
.get(function(req, res) {
let configField = '';
if (req.query.hasOwnProperty("field")) {
configField = req.query.field;
}
let promise;
if (configField === "DNS_Admins") {
promise = marinusConfig.getDNSAdminsPromise();
} else if (configField === "SSL_Orgs") {
promise = marinusConfig.getSSLOrgsPromise();
} else if (configField === "Whois_Orgs") {
promise = marinusConfig.getSSLOrgsPromise();
} else {
// In the future, there may be config properties that shouldn't be public
checkAdmin(req, res);
promise = marinusConfig.getFullConfigPromise();
}
promise.then(function(results) {
if (!results) {
res.status(500).json({
'message': 'Unable to retrieve config information!',
});
return;
}
res.status(200).json(results[0]);
return;
});
});
return (router);
};
|
import styled from 'styled-components';
export const Container = styled.div`
max-width: 420px;
margin: 20px auto;
`;
export const ThumbnailPreview = styled.label`
height: 170px;
margin-bottom: 20px;
border: 1px dashed #ddd;
border-radius: 8px;
display: flex;
justify-content: center;
align-items: center;
background-size: cover;
cursor: pointer;
input {
display: none;
}
`;
|
<gh_stars>0
export const SET_APP_ROUTE_STATE = 'SET_APP_ROUTE_STATE';
export const SET_APP_UI_STATUS = 'SET_APP_UI_STATUS';
export const APP_REDUCER_NAME = 'APP_REDUCER';
export const ID = 'APP';
|
<reponame>mgcrea/node-sodium-jwt
import {signSync} from 'src/crypto';
import {Payload} from 'src/types';
const payload: Payload = {
sub: 7,
iat: new Date('2022-01-03T16:37:48.612Z').getTime(),
};
const secretKey = '<KEY>';
describe('Sign', () => {
it('should properly sign a JWT', async () => {
const jwt = signSync(payload, Buffer.from(secretKey, 'base64'));
expect(jwt).toBeDefined();
expect(typeof jwt).toEqual('string');
expect(jwt.split('.').length).toEqual(3);
expect(jwt).toMatchSnapshot();
});
it('should properly support a stringified secretKey', async () => {
const jwt = signSync(payload, secretKey);
expect(jwt).toBeDefined();
expect(typeof jwt).toEqual('string');
expect(jwt.split('.').length).toEqual(3);
expect(jwt).toMatchSnapshot();
});
});
|
#!/usr/bin/env bash
# MyHPOM user zone control script to create iRODS users on-demand corresponding to MyHPOM users
# AUthor: Hong Yi <hongyi@renci.org>
HS_WWW_IRODS_PROXY_USER=wwwHydroProxy
HS_WWW_IRODS_ZONE=hydroshareZone
HS_USER_IRODS_ZONE=hydroshareuserZone
echo " - iadmin mkuser $1 rodsuser"
iadmin mkuser $1 rodsuser
echo " - iadmin moduser $1 password $2"
iadmin moduser $1 password $2
echo " - ichmod -rM own ${HS_WWW_IRODS_PROXY_USER}#${HS_WWW_IRODS_ZONE} /${HS_USER_IRODS_ZONE}/home"
ichmod -rM own ${HS_WWW_IRODS_PROXY_USER}#${HS_WWW_IRODS_ZONE} /${HS_USER_IRODS_ZONE}/home
echo " - ichmod -rM inherit /${HS_USER_IRODS_ZONE}/home/$1"
ichmod -rM inherit /${HS_USER_IRODS_ZONE}/home/$1
|
<reponame>hatanagayasu/itooii-api<gh_stars>0
package controllers.admin;
import controllers.Result;
import controllers.constants.Error;
import models.Other;
import models.User;
import models.admin.Employee;
import org.bson.types.ObjectId;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class EmployeesController extends AppController {
public static Result add(JsonNode params) {
String name = params.get("name").textValue();
String password = params.get("password").textValue();
if (Employee.getByName(name) != null)
return Error(Error.USER_ALREADY_EXISTS);
Employee employee = new Employee(name, password);
employee.save();
return Ok(employee);
}
public static Result me(JsonNode params) {
Employee employee = getEmployee(params);
if (employee == null)
return Error(Error.INVALID_ACCESS_TOKEN);
employee.setPassword(null);
return Ok(employee);
}
public static Result get(JsonNode params) {
//ObjectId userId = getObjectId(params, "user_id");
//User me = params.has("access_token") ? getMe(params) : null;
//Other other = Other.get(userId, me == null ? null : me);
//if (other == null)
// return Error(Error.USER_NOT_FOUND);
String accesstoken = params.get("access_token").textValue();
Employee employee = Employee.getByAccessToken(accesstoken);
if (employee == null )
return Error(Error.USER_NOT_FOUND);
return Ok(employee);
}
public static Result login(JsonNode params) {
String name = params.get("name").textValue();
String password = params.get("password").textValue();
Employee employee = Employee.getByName(name);
if (employee == null)
return Error(Error.INCORRECT_USER);
if (!employee.getPassword().equals(Employee.md5(password)))
return Error(Error.INCORRECT_PASSWORD);
ObjectNode result = mapper.createObjectNode();
result.put("access_token", employee.newAccessToken());
return Ok(result);
}
public static Result logout(JsonNode params) {
String token = params.get("access_token").textValue();
Employee.deleteAccessToken(token);
return Ok();
}
}
|
<reponame>nishikeshKardak/pyparsems
from setuptools import setup
setup(
name='pyparsems',
version='1.0.0',
description='Returns a dict object for a long millisecond value.',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/tapasweni-pathak/pyparsems',
packages=['pyparsems'],
)
|
class Board:
def __init__(self):
self.board = [['_', '_', '_'], ['_', '_', '_'], ['_', '_', '_']]
def make_move(self, position, player_token):
x, y = position
self.board[x][y] = player_token
def check_winner(self):
# check horizontally
for row in self.board:
if len(set(row)) == 1:
return row[0]
# check vertically
for i in range(3):
col = [row[i] for row in self.board]
if len(set(col)) == 1:
return col[0]
# check diagonally
if self.board[0][0] == self.board[1][1] == self.board[2][2] != '_':
return self.board[0][0]
if self.board[2][0] == self.board[1][1] == self.board[0][2] != '_':
return self.board[2][0]
# draw
if '_' not in [cell for row in self.board for cell in row]:
return 'D'
# game is still ongoing
return None
class Game:
def __init__(self, player_x, player_o):
self.board = Board()
self.player_x = player_x
self.player_o = player_o
self.turn = player_x
def play_move(self, position):
if self.turn == self.player_x:
self.board.make_move(position, 'X')
self.turn = self.player_o
else:
self.board.make_move(position, 'O')
self.turn = self.player_x
def check_winner(self):
return self.board.check_winner()
class Player:
def __init__(self, name):
self.name = name
def make_move(self):
position = input('Enter your move in the format (x, y): ').split(',')
return (int(position[0]), int(position[1]))
class Manager:
def __init__(self, player_x, player_o):
self.game = Game(player_x, player_o)
def play_game(self):
while True:
winner = self.game.check_winner()
if winner:
print('Winner is ' + winner)
break
position = self.game.turn.make_move()
self.game.play_move(position)
if name == 'main':
player_x = Player('Player X')
player_o = Player('Player O')
manager = Manager(player_x, player_o)
manager.play_game() |
#!/bin/tcsh
cd ${SRCROOT}
set HEADERS = `ls ../*.h ../*.inl`
foreach header (${HEADERS})
cp -fp "${SRCROOT}"/$header ../../SDK/Include
end
cp -fp "${BUILT_PRODUCTS_DIR}"/libWm5AglApplication.dylib ../../SDK/Library/ReleaseDLL/libWm5AglApplication.dylib
|
<filename>test/webdriver_test.js
var assert = require('assert');
describe('grunt-webdriverjs test', function () {
it('should have right options', function() {
assert.strictEqual(browser.options.waitforTimeout, 12345);
assert.strictEqual(browser.options.coloredLogs, true);
assert.strictEqual(browser.options.updateJob, true);
assert.strictEqual(browser.options.logLevel, 'command');
assert.strictEqual(browser.options.cucumberOpts.require[0], 'nothing');
});
it('checks if title contains the search query', function() {
return browser
.url('/')
.getTitle(function(err,title) {
assert.strictEqual(title, 'WebdriverJS Testpage');
});
});
}); |
<reponame>jordanleeee/core-ng-project<gh_stars>100-1000
package core.framework.internal.http;
import core.framework.http.HTTPMethod;
import core.framework.http.HTTPRequest;
import okhttp3.Interceptor;
import okhttp3.Request;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.io.IOException;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
import static org.mockito.Mockito.when;
/**
* @author neo
*/
@ExtendWith(MockitoExtension.class)
class TimeoutInterceptorTest {
@Mock
Interceptor.Chain chain;
private TimeoutInterceptor interceptor;
@BeforeEach
void createTimeoutInterceptor() {
interceptor = new TimeoutInterceptor();
}
@Test
void interceptWithConnectTimeout() throws IOException {
var httpRequest = new HTTPRequest(HTTPMethod.POST, "https://localhost");
httpRequest.connectTimeout = Duration.ofSeconds(30);
var request = new Request.Builder().url(httpRequest.uri).tag(HTTPRequest.class, httpRequest).build();
when(chain.request()).thenReturn(request);
when(chain.withConnectTimeout(30000, TimeUnit.MILLISECONDS)).thenReturn(chain);
interceptor.intercept(chain);
}
@Test
void interceptWithTimeout() throws IOException {
var httpRequest = new HTTPRequest(HTTPMethod.POST, "https://localhost");
httpRequest.timeout = Duration.ofSeconds(30);
var request = new Request.Builder().url(httpRequest.uri).tag(HTTPRequest.class, httpRequest).build();
when(chain.request()).thenReturn(request);
when(chain.withReadTimeout(30000, TimeUnit.MILLISECONDS)).thenReturn(chain);
when(chain.withWriteTimeout(30000, TimeUnit.MILLISECONDS)).thenReturn(chain);
interceptor.intercept(chain);
}
@Test
void interceptWithoutTimeout() throws IOException {
var request = new Request.Builder().url("http://localhost").build();
when(chain.request()).thenReturn(request);
interceptor.intercept(chain);
}
}
|
package org.apache.maven.model.inheritance;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.function.Consumer;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerConfigurationException;
import org.xml.sax.SAXException;
import org.xml.sax.ext.LexicalHandler;
import org.apache.maven.model.Model;
import org.apache.maven.model.building.AbstractModelSourceTransformer;
import org.apache.maven.model.building.SimpleProblemCollector;
import org.apache.maven.model.building.TransformerContext;
import org.apache.maven.model.io.DefaultModelReader;
import org.apache.maven.model.io.DefaultModelWriter;
import org.apache.maven.model.io.ModelWriter;
import org.apache.maven.model.transform.sax.AbstractSAXFilter;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.xmlunit.matchers.CompareMatcher;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* @author <NAME>
*/
public class DefaultInheritanceAssemblerTest
{
private DefaultModelReader reader;
private ModelWriter writer;
private InheritanceAssembler assembler;
@BeforeEach
public void setUp()
throws Exception
{
reader = new DefaultModelReader( new AbstractModelSourceTransformer()
{
@Override
protected AbstractSAXFilter getSAXFilter( Path pomFile, TransformerContext context,
Consumer<LexicalHandler> lexicalHandlerConsumer )
throws TransformerConfigurationException, SAXException, ParserConfigurationException
{
return null;
}
} );
writer = new DefaultModelWriter();
assembler = new DefaultInheritanceAssembler();
}
private File getPom( String name )
{
return new File( "src/test/resources/poms/inheritance/" + name + ".xml" );
}
private Model getModel( String name )
throws IOException
{
return reader.read( getPom( name ), null );
}
@Test
public void testPluginConfiguration()
throws Exception
{
testInheritance( "plugin-configuration" );
}
/**
* Check most classical urls inheritance: directory structure where parent POM in parent directory
* and child directory == artifactId
* @throws IOException Model read problem
*/
@Test
public void testUrls()
throws Exception
{
testInheritance( "urls" );
}
/**
* Flat directory structure: parent & child POMs in sibling directories, child directory == artifactId.
* @throws IOException Model read problem
*/
@Test
public void testFlatUrls()
throws IOException
{
testInheritance( "flat-urls" );
}
/**
* MNG-5951 MNG-6059 child.x.y.inherit.append.path="false" test
* @throws Exception
*/
@Test
public void testNoAppendUrls()
throws Exception
{
testInheritance( "no-append-urls" );
}
/**
* MNG-5951 special case test: inherit with partial override
* @throws Exception
*/
@Test
public void testNoAppendUrls2()
throws Exception
{
testInheritance( "no-append-urls2" );
}
/**
* MNG-5951 special case test: child.x.y.inherit.append.path="true" in child should not reset content
* @throws Exception
*/
@Test
public void testNoAppendUrls3()
throws Exception
{
testInheritance( "no-append-urls3" );
}
/**
* Tricky case: flat directory structure, but child directory != artifactId.
* Model interpolation does not give same result when calculated from build or from repo...
* This is why MNG-5000 fix in code is marked as bad practice (uses file names)
* @throws IOException Model read problem
*/
@Test
public void testFlatTrickyUrls()
throws IOException
{
// parent references child with artifactId (which is not directory name)
// then relative path calculation will fail during build from disk but success when calculated from repo
try
{
// build from disk expected to fail
testInheritance( "tricky-flat-artifactId-urls", false );
//fail( "should have failed since module reference == artifactId != directory name" );
}
catch ( AssertionError afe )
{
// expected failure: wrong relative path calculation
assertTrue( afe.getMessage().contains(
"Expected text value 'http://www.apache.org/path/to/parent/child-artifact-id/' but was " +
"'http://www.apache.org/path/to/parent/../child-artifact-id/'" ),
afe.getMessage() );
}
// but ok from repo: local disk is ignored
testInheritance( "tricky-flat-artifactId-urls", true );
// parent references child with directory name (which is not artifact id)
// then relative path calculation will success during build from disk but fail when calculated from repo
testInheritance( "tricky-flat-directory-urls", false );
AssertionError afe = assertThrows(
AssertionError.class,
() -> testInheritance( "tricky-flat-directory-urls", true ),
"should have failed since module reference == directory name != artifactId" );
// expected failure
assertTrue( afe.getMessage().contains(
"Expected text value 'http://www.apache.org/path/to/parent/../child-artifact-id/' but was " +
"'http://www.apache.org/path/to/parent/child-artifact-id/'" ),
afe.getMessage() );
}
@Test
public void testWithEmptyUrl()
throws IOException
{
testInheritance( "empty-urls", false );
}
public void testInheritance( String baseName )
throws IOException
{
testInheritance( baseName, false );
testInheritance( baseName, true );
}
public void testInheritance( String baseName, boolean fromRepo )
throws IOException
{
Model parent = getModel( baseName + "-parent" );
Model child = getModel( baseName + "-child" );
if ( fromRepo )
{
// when model is read from repo, a stream is used, then pomFile == null
// (has consequences in inheritance algorithm since getProjectDirectory() returns null)
parent.setPomFile( null );
child.setPomFile( null );
}
SimpleProblemCollector problems = new SimpleProblemCollector();
assembler.assembleModelInheritance( child, parent, null, problems );
// write baseName + "-actual"
File actual = new File( "target/test-classes/poms/inheritance/" + baseName
+ ( fromRepo ? "-build" : "-repo" ) + "-actual.xml" );
writer.write( actual, null, child );
// check with getPom( baseName + "-expected" )
File expected = getPom( baseName + "-expected" );
assertThat( actual, CompareMatcher.isIdenticalTo( expected ).ignoreComments().ignoreWhitespace() );
}
@Test
public void testModulePathNotArtifactId()
throws IOException
{
Model parent = getModel( "module-path-not-artifactId-parent" );
Model child = getModel( "module-path-not-artifactId-child" );
SimpleProblemCollector problems = new SimpleProblemCollector();
assembler.assembleModelInheritance( child, parent, null, problems );
File actual = new File( "target/test-classes/poms/inheritance/module-path-not-artifactId-actual.xml" );
writer.write( actual, null, child );
// check with getPom( "module-path-not-artifactId-effective" )
File expected = getPom( "module-path-not-artifactId-expected" );
assertThat( actual, CompareMatcher.isIdenticalTo(expected).ignoreComments().ignoreWhitespace() );
}
}
|
#!/bin/bash
# Initial values
f1=0
f2=1
# Print the first number
echo 0,
# Generate the next 9 numbers
for (( i=0; i<9; i++ ))
do
f3=$(( f1 + f2 ))
echo -n "$f3,"
f1=$f2
f2=$f3
done
echo |
REAL_DATA="./CIFAR/CIFAR_20K/cGAN-based_KD/data/CIFAR10_trainset_20000_seed_2020.h5"
FAKE_DATA="None"
NFAKE=1e30
### ResNet18 --> vgg11 ;
python3 student.py --real_data $REAL_DATA --fake_data $FAKE_DATA --nfake $NFAKE \
--t-path ./experiments/teacher_ResNet18_seed0/ --s-arch vgg11 --lr 0.05 --weight-decay 5e-4 --gpu-id 0 \
2>&1 | tee output_t_ResNet18_s_vgg11_nfake_${NFAKE}.txt
### ResNet18 --> ShuffleV2 ;
python3 student.py --real_data $REAL_DATA --fake_data $FAKE_DATA --nfake $NFAKE \
--t-path ./experiments/teacher_ResNet18_seed0/ --s-arch ShuffleV2 --lr 0.01 --weight-decay 5e-4 --gpu-id 0 \
2>&1 | tee output_t_ResNet18_s_ShuffleV2_nfake_${NFAKE}.txt
|
<gh_stars>0
package com.yin.springboot.mybatis.server.service;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import com.yin.springboot.mybatis.domain.UmsMemberTask;
import java.util.List;
import com.yin.springboot.mybatis.mapper.UmsMemberTaskMapper;
import com.yin.springboot.mybatis.server.UmsMemberTaskService;
@Service
public class UmsMemberTaskServiceImpl implements UmsMemberTaskService{
@Resource
private UmsMemberTaskMapper umsMemberTaskMapper;
@Override
public int deleteByPrimaryKey(Long id) {
return umsMemberTaskMapper.deleteByPrimaryKey(id);
}
@Override
public int insert(UmsMemberTask record) {
return umsMemberTaskMapper.insert(record);
}
@Override
public int insertOrUpdate(UmsMemberTask record) {
return umsMemberTaskMapper.insertOrUpdate(record);
}
@Override
public int insertOrUpdateSelective(UmsMemberTask record) {
return umsMemberTaskMapper.insertOrUpdateSelective(record);
}
@Override
public int insertSelective(UmsMemberTask record) {
return umsMemberTaskMapper.insertSelective(record);
}
@Override
public UmsMemberTask selectByPrimaryKey(Long id) {
return umsMemberTaskMapper.selectByPrimaryKey(id);
}
@Override
public int updateByPrimaryKeySelective(UmsMemberTask record) {
return umsMemberTaskMapper.updateByPrimaryKeySelective(record);
}
@Override
public int updateByPrimaryKey(UmsMemberTask record) {
return umsMemberTaskMapper.updateByPrimaryKey(record);
}
@Override
public int updateBatch(List<UmsMemberTask> list) {
return umsMemberTaskMapper.updateBatch(list);
}
@Override
public int batchInsert(List<UmsMemberTask> list) {
return umsMemberTaskMapper.batchInsert(list);
}
}
|
#!/bin/sh
content=$(wget -O- --header=Content-Type:application/json --header="Authorization: $DASPANEL_SYS_UUID" "$DASPANEL_SYS_APISERVER/sites/httpconf/$DASPANEL_SYS_HOSTNAME")
echo "[DASPANEL-$DASPANEL_CONTAINER_TYPE] INFO Processing site templates for engine: $ENGINE"
# Remove all config of SITES-AVAILBLE FOR this engine
rm /etc/caddy/sites-enabled/*
rm /etc/caddy/sites-available/*
# Generate new configs
echo $content | /opt/daspanel/bin/jq -rc '.[]' | while IFS='' read site;do
siteuuid=$(echo "$site" | /opt/daspanel/bin/jq -r ._cuid)
siteenabled=$(echo "$site" | /opt/daspanel/bin/jq -r .enabled)
configs=$(echo $site | /opt/daspanel/bin/jq -r .configs )
echo $configs | /opt/daspanel/bin/jq -rc '.[]' | while IFS='' read sitecfg;do
siteengine=$(echo "$sitecfg" | /opt/daspanel/bin/jq -r .engine)
if [ "$siteengine" == "$ENGINE" ]; then
sitetype=$(echo "$sitecfg" | /opt/daspanel/bin/jq -r .sitetype)
template=""
template1="/opt/daspanel/data/$DASPANEL_SYS_UUID/conf-templates/engine-$ENGINE/caddy/$sitetype-$siteengine.template"
template2="/opt/daspanel/conf-templates/engine-$ENGINE/caddy/$sitetype-$siteengine.template"
if [ -f "$template1" ]; then
template=$template1
else
if [ -f "$template2" ]; then
template=$template2
else
template=""
echo "[DASPANEL-$DASPANEL_CONTAINER_TYPE] FAIL Site $siteuuid missing templates: $template1 OR $template2"
fi
fi
if [ ! -z "$template" ]; then
export SITEINFO=$site
export SITECFG=$sitecfg
echo "[DASPANEL-$DASPANEL_CONTAINER_TYPE] INFO Processing site $siteuuid template: $template"
/opt/daspanel/bin/gomplate -d cfg=$DASPANEL_SYS_APISERVER/tenants/$DASPANEL_SYS_UUID \
-H "cfg=Authorization: $DASPANEL_SYS_APIKEY" \
< $template \
>> /etc/caddy/sites-available/$siteuuid.conf
fi
# do your processing here
else
echo "[DASPANEL-$DASPANEL_CONTAINER_TYPE] Ignoring sites of engine $siteengine: $0"
fi
done
if [ "$siteenabled" = true ] ; then
if [ -f "/etc/caddy/sites-enabled/$siteuuid.conf" ]; then
rm /etc/caddy/sites-enabled/$siteuuid.conf
fi
if [ -f "/etc/caddy/sites-available/$siteuuid.conf" ] ; then
ln -sf /etc/caddy/sites-available/$siteuuid.conf /etc/caddy/sites-enabled/$siteuuid.conf
fi
else
if [ -f "/etc/caddy/sites-enabled/$siteuuid.conf" ]; then
rm /etc/caddy/sites-enabled/$siteuuid.conf
fi
fi
done
if [ -d "/var/run/s6/services/caddy" ]; then
/bin/s6-svc -1 /var/run/s6/services/caddy
fi
|
<gh_stars>10-100
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017-2020 The SymbiFlow Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
from warnings import warn
available = []
""" List of parser submodules available. Strings should match module names. """
try:
from fasm.parser.antlr import \
parse_fasm_filename, parse_fasm_string, implementation
available.append('antlr')
except ImportError as e:
warn(
"""Unable to import fast Antlr4 parser implementation.
ImportError: {}
Falling back to the much slower pure Python textX based parser
implementation.
Getting the faster antlr parser can normally be done by installing the
required dependencies and then reinstalling the fasm package with:
pip uninstall
pip install -v fasm
""".format(e), RuntimeWarning)
from fasm.parser.textx import \
parse_fasm_filename, parse_fasm_string, implementation
# The textx parser is available as a fallback.
available.append('textx')
|
package edu.wpi.first.gradlerio.wpi.cpp;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Iterator;
import org.gradle.api.Action;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.plugins.ExtensionContainer;
import org.gradle.language.base.internal.ProjectLayout;
import org.gradle.language.nativeplatform.tasks.AbstractNativeSourceCompileTask;
import org.gradle.model.ModelMap;
import org.gradle.model.Mutate;
import org.gradle.model.RuleSource;
import org.gradle.model.Validate;
import org.gradle.nativeplatform.BuildTypeContainer;
import org.gradle.nativeplatform.NativeBinarySpec;
import org.gradle.nativeplatform.TargetedNativeComponent;
import org.gradle.nativeplatform.test.googletest.GoogleTestTestSuiteBinarySpec;
import org.gradle.platform.base.BinaryContainer;
import org.gradle.platform.base.BinarySpec;
import org.gradle.platform.base.ComponentSpec;
import org.gradle.platform.base.ComponentSpecContainer;
import org.gradle.platform.base.internal.BinarySpecInternal;
import edu.wpi.first.nativeutils.NativeUtilsExtension;
import edu.wpi.first.toolchain.NativePlatforms;
public class WPINativeCompileRules extends RuleSource {
@Mutate
public void addBuildTypes(BuildTypeContainer bts) {
bts.maybeCreate("debug");
bts.maybeCreate("release");
}
@Mutate
public void addBinaryFlags(BinaryContainer binaries, ExtensionContainer extensions) {
NativeUtilsExtension ntExt = extensions.getByType(NativeUtilsExtension.class);
binaries.withType(NativeBinarySpec.class, bin -> {
ntExt.usePlatformArguments(bin);
});
}
@Mutate
public void addBinaryFlags(BinaryContainer binaries) {
binaries.withType(GoogleTestTestSuiteBinarySpec.class, bin -> {
if (!bin.getTargetPlatform().getName().equals(NativePlatforms.desktop)) {
((BinarySpecInternal)bin).setBuildable(false);
}
bin.getCppCompiler().define("RUNNING_FRC_TESTS");
bin.getcCompiler().define("RUNNING_FRC_TESTS");
});
}
@Validate
void setupCompilerWarningPrints(ModelMap<Task> tasks, ProjectLayout layout, ComponentSpecContainer components) {
if (components == null)
return;
Project project = (Project) layout.getProjectIdentifier();
for (ComponentSpec c : components) {
if (c instanceof TargetedNativeComponent) {
for (BinarySpec bin : ((TargetedNativeComponent) c).getBinaries()) {
bin.getTasks().withType(AbstractNativeSourceCompileTask.class, t -> {
t.doLast(new Action<Task>() {
@Override
public void execute(Task arg0) {
printWarningsForBinTask(t.getName().toString(), project);
}
});
});
}
}
}
}
// From
// https://github.com/wpilibsuite/native-utils/blob/a8ea595670716c7b898878a37e36c2b52e8e3f42/src/main/groovy/edu/wpi/first/nativeutils/rules/BuildConfigRules.groovy#L450
private static void printWarningsForBinTask(String taskName, Project project) {
File file = new File(project.getBuildDir(), "tmp/" + taskName + "/output.txt");
if (!file.exists())
return;
String currentFile = "";
boolean hasFirstLine = false;
boolean hasPrintedFileName = false;
Iterable<String> fileIterator = new Iterable<String>() {
@Override
public Iterator<String> iterator() {
try {
return Files.lines(file.toPath()).iterator();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
for (String line : fileIterator) {
if (!hasFirstLine) {
hasFirstLine = true;
} else if (line.startsWith("compiling ")) {
currentFile = line.substring(10, line.indexOf("successful."));
hasPrintedFileName = false;
} else if (line.contains("Finished") && line.contains("see full log")) {
// No op
} else if (line.trim().equals(currentFile.trim())) {
// No op
} else if (!line.isEmpty()) {
if (!hasPrintedFileName) {
hasPrintedFileName = true;
System.out.println("Warnings in file " + currentFile + "....");
}
System.out.println(line);
}
}
}
}
|
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e
import (
"fmt"
"strings"
"sync"
"time"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/kubernetes/pkg/api/resource"
"k8s.io/kubernetes/pkg/api/v1"
"k8s.io/kubernetes/pkg/client/cache"
"k8s.io/kubernetes/pkg/util/system"
"k8s.io/kubernetes/test/e2e/framework"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = framework.KubeDescribe("Opaque resources [Feature:OpaqueResources]", func() {
f := framework.NewDefaultFramework("opaque-resource")
opaqueResName := v1.OpaqueIntResourceName("foo")
var node *v1.Node
BeforeEach(func() {
if node == nil {
// Priming invocation; select the first non-master node.
nodes, err := f.ClientSet.Core().Nodes().List(metav1.ListOptions{})
Expect(err).NotTo(HaveOccurred())
for _, n := range nodes.Items {
if !system.IsMasterNode(n.Name) {
node = &n
break
}
}
if node == nil {
Fail("unable to select a non-master node")
}
}
removeOpaqueResource(f, node.Name, opaqueResName)
addOpaqueResource(f, node.Name, opaqueResName)
})
It("should not break pods that do not consume opaque integer resources.", func() {
By("Creating a vanilla pod")
requests := v1.ResourceList{v1.ResourceCPU: resource.MustParse("0.1")}
limits := v1.ResourceList{v1.ResourceCPU: resource.MustParse("0.2")}
pod := newTestPod(f, "without-oir", requests, limits)
By("Observing an event that indicates the pod was scheduled")
action := func() error {
_, err := f.ClientSet.Core().Pods(f.Namespace.Name).Create(pod)
return err
}
predicate := func(e *v1.Event) bool {
return e.Type == v1.EventTypeNormal &&
e.Reason == "Scheduled" &&
// Here we don't check for the bound node name since it can land on
// any one (this pod doesn't require any of the opaque resource.)
strings.Contains(e.Message, fmt.Sprintf("Successfully assigned %v", pod.Name))
}
success, err := observeEventAfterAction(f, predicate, action)
Expect(err).NotTo(HaveOccurred())
Expect(success).To(Equal(true))
})
It("should schedule pods that do consume opaque integer resources.", func() {
By("Creating a pod that requires less of the opaque resource than is allocatable on a node.")
requests := v1.ResourceList{
v1.ResourceCPU: resource.MustParse("0.1"),
opaqueResName: resource.MustParse("1"),
}
limits := v1.ResourceList{
v1.ResourceCPU: resource.MustParse("0.2"),
opaqueResName: resource.MustParse("2"),
}
pod := newTestPod(f, "min-oir", requests, limits)
By("Observing an event that indicates the pod was scheduled")
action := func() error {
_, err := f.ClientSet.Core().Pods(f.Namespace.Name).Create(pod)
return err
}
predicate := func(e *v1.Event) bool {
return e.Type == v1.EventTypeNormal &&
e.Reason == "Scheduled" &&
strings.Contains(e.Message, fmt.Sprintf("Successfully assigned %v to %v", pod.Name, node.Name))
}
success, err := observeEventAfterAction(f, predicate, action)
Expect(err).NotTo(HaveOccurred())
Expect(success).To(Equal(true))
})
It("should not schedule pods that exceed the available amount of opaque integer resource.", func() {
By("Creating a pod that requires more of the opaque resource than is allocatable on any node")
requests := v1.ResourceList{opaqueResName: resource.MustParse("6")}
limits := v1.ResourceList{}
By("Observing an event that indicates the pod was not scheduled")
action := func() error {
_, err := f.ClientSet.Core().Pods(f.Namespace.Name).Create(newTestPod(f, "over-max-oir", requests, limits))
return err
}
predicate := func(e *v1.Event) bool {
return e.Type == "Warning" &&
e.Reason == "FailedScheduling" &&
strings.Contains(e.Message, "failed to fit in any node")
}
success, err := observeEventAfterAction(f, predicate, action)
Expect(err).NotTo(HaveOccurred())
Expect(success).To(Equal(true))
})
It("should account opaque integer resources in pods with multiple containers.", func() {
By("Creating a pod with two containers that together require less of the opaque resource than is allocatable on a node")
requests := v1.ResourceList{opaqueResName: resource.MustParse("1")}
limits := v1.ResourceList{}
image := framework.GetPauseImageName(f.ClientSet)
// This pod consumes 2 "foo" resources.
pod := &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "mult-container-oir",
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: "pause",
Image: image,
Resources: v1.ResourceRequirements{
Requests: requests,
Limits: limits,
},
},
{
Name: "pause-sidecar",
Image: image,
Resources: v1.ResourceRequirements{
Requests: requests,
Limits: limits,
},
},
},
},
}
By("Observing an event that indicates the pod was scheduled")
action := func() error {
_, err := f.ClientSet.Core().Pods(f.Namespace.Name).Create(pod)
return err
}
predicate := func(e *v1.Event) bool {
return e.Type == v1.EventTypeNormal &&
e.Reason == "Scheduled" &&
strings.Contains(e.Message, fmt.Sprintf("Successfully assigned %v to %v", pod.Name, node.Name))
}
success, err := observeEventAfterAction(f, predicate, action)
Expect(err).NotTo(HaveOccurred())
Expect(success).To(Equal(true))
By("Creating a pod with two containers that together require more of the opaque resource than is allocatable on any node")
requests = v1.ResourceList{opaqueResName: resource.MustParse("3")}
limits = v1.ResourceList{}
// This pod consumes 6 "foo" resources.
pod = &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "mult-container-over-max-oir",
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: "pause",
Image: image,
Resources: v1.ResourceRequirements{
Requests: requests,
Limits: limits,
},
},
{
Name: "pause-sidecar",
Image: image,
Resources: v1.ResourceRequirements{
Requests: requests,
Limits: limits,
},
},
},
},
}
By("Observing an event that indicates the pod was not scheduled")
action = func() error {
_, err = f.ClientSet.Core().Pods(f.Namespace.Name).Create(pod)
return err
}
predicate = func(e *v1.Event) bool {
return e.Type == "Warning" &&
e.Reason == "FailedScheduling" &&
strings.Contains(e.Message, "failed to fit in any node")
}
success, err = observeEventAfterAction(f, predicate, action)
Expect(err).NotTo(HaveOccurred())
Expect(success).To(Equal(true))
})
})
// Adds the opaque resource to a node.
func addOpaqueResource(f *framework.Framework, nodeName string, opaqueResName v1.ResourceName) {
action := func() error {
patch := []byte(fmt.Sprintf(`[{"op": "add", "path": "/status/capacity/%s", "value": "5"}]`, escapeForJSONPatch(opaqueResName)))
return f.ClientSet.Core().RESTClient().Patch(types.JSONPatchType).Resource("nodes").Name(nodeName).SubResource("status").Body(patch).Do().Error()
}
predicate := func(n *v1.Node) bool {
capacity, foundCap := n.Status.Capacity[opaqueResName]
allocatable, foundAlloc := n.Status.Allocatable[opaqueResName]
return foundCap && capacity.MilliValue() == int64(5000) &&
foundAlloc && allocatable.MilliValue() == int64(5000)
}
success, err := observeNodeUpdateAfterAction(f, nodeName, predicate, action)
Expect(err).NotTo(HaveOccurred())
Expect(success).To(Equal(true))
}
// Removes the opaque resource from a node.
func removeOpaqueResource(f *framework.Framework, nodeName string, opaqueResName v1.ResourceName) {
action := func() error {
patch := []byte(fmt.Sprintf(`[{"op": "remove", "path": "/status/capacity/%s"}]`, escapeForJSONPatch(opaqueResName)))
f.ClientSet.Core().RESTClient().Patch(types.JSONPatchType).Resource("nodes").Name(nodeName).SubResource("status").Body(patch).Do()
return nil // Ignore error -- the opaque resource may not exist.
}
predicate := func(n *v1.Node) bool {
_, foundCap := n.Status.Capacity[opaqueResName]
_, foundAlloc := n.Status.Allocatable[opaqueResName]
return !foundCap && !foundAlloc
}
success, err := observeNodeUpdateAfterAction(f, nodeName, predicate, action)
Expect(err).NotTo(HaveOccurred())
Expect(success).To(Equal(true))
}
func escapeForJSONPatch(resName v1.ResourceName) string {
// Escape forward slashes in the resource name per the JSON Pointer spec.
// See https://tools.ietf.org/html/rfc6901#section-3
return strings.Replace(string(resName), "/", "~1", -1)
}
// Returns true if a node update matching the predicate was emitted from the
// system after performing the supplied action.
func observeNodeUpdateAfterAction(f *framework.Framework, nodeName string, nodePredicate func(*v1.Node) bool, action func() error) (bool, error) {
observedMatchingNode := false
nodeSelector := fields.OneTermEqualSelector("metadata.name", nodeName)
informerStartedChan := make(chan struct{})
var informerStartedGuard sync.Once
_, controller := cache.NewInformer(
&cache.ListWatch{
ListFunc: func(options metav1.ListOptions) (runtime.Object, error) {
options.FieldSelector = nodeSelector.String()
ls, err := f.ClientSet.Core().Nodes().List(options)
return ls, err
},
WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) {
options.FieldSelector = nodeSelector.String()
w, err := f.ClientSet.Core().Nodes().Watch(options)
// Signal parent goroutine that watching has begun.
informerStartedGuard.Do(func() { close(informerStartedChan) })
return w, err
},
},
&v1.Node{},
0,
cache.ResourceEventHandlerFuncs{
UpdateFunc: func(oldObj, newObj interface{}) {
n, ok := newObj.(*v1.Node)
Expect(ok).To(Equal(true))
if nodePredicate(n) {
observedMatchingNode = true
}
},
},
)
// Start the informer and block this goroutine waiting for the started signal.
informerStopChan := make(chan struct{})
defer func() { close(informerStopChan) }()
go controller.Run(informerStopChan)
<-informerStartedChan
// Invoke the action function.
err := action()
if err != nil {
return false, err
}
// Poll whether the informer has found a matching node update with a timeout.
// Wait up 2 minutes polling every second.
timeout := 2 * time.Minute
interval := 1 * time.Second
err = wait.Poll(interval, timeout, func() (bool, error) {
return observedMatchingNode, nil
})
return err == nil, err
}
// Returns true if an event matching the predicate was emitted from the system
// after performing the supplied action.
func observeEventAfterAction(f *framework.Framework, eventPredicate func(*v1.Event) bool, action func() error) (bool, error) {
observedMatchingEvent := false
// Create an informer to list/watch events from the test framework namespace.
_, controller := cache.NewInformer(
&cache.ListWatch{
ListFunc: func(options metav1.ListOptions) (runtime.Object, error) {
ls, err := f.ClientSet.Core().Events(f.Namespace.Name).List(options)
return ls, err
},
WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) {
w, err := f.ClientSet.Core().Events(f.Namespace.Name).Watch(options)
return w, err
},
},
&v1.Event{},
0,
cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
e, ok := obj.(*v1.Event)
By(fmt.Sprintf("Considering event: \nType = [%s], Reason = [%s], Message = [%s]", e.Type, e.Reason, e.Message))
Expect(ok).To(Equal(true))
if ok && eventPredicate(e) {
observedMatchingEvent = true
}
},
},
)
informerStopChan := make(chan struct{})
defer func() { close(informerStopChan) }()
go controller.Run(informerStopChan)
// Invoke the action function.
err := action()
if err != nil {
return false, err
}
// Poll whether the informer has found a matching event with a timeout.
// Wait up 2 minutes polling every second.
timeout := 2 * time.Minute
interval := 1 * time.Second
err = wait.Poll(interval, timeout, func() (bool, error) {
return observedMatchingEvent, nil
})
return err == nil, err
}
|
package gitlab
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"net/http"
)
const (
gitlabProjectURL = "https://gitlab.com/api/v4/projects"
gitlabEnvironmentURL = "https://gitlab.com/api/v4/projects/%v/variables"
)
// GitlabHTTP struct to leverage Gitlab
type GitlabHTTP struct {
// client http.Client
prompt Prompt
}
//TODO: Factor out ID to be used by multiple git repos
// GitRepository struct containing information about git repository
type GitRepository struct {
Name string `json:"name"`
URL string `json:"http_url_to_repo"`
ID json.Number `json:"id,Number"`
}
// AddEnvironmentVariables to project
func (rest GitlabHTTP) AddEnvironmentVariables(deployToken string, projectID string, gitToken string) error {
environmentRequest := []byte(`{
"key": "HEROKU_API_KEY",
"value": "` + deployToken + `",
"protected": true
}`)
url := fmt.Sprintf(gitlabEnvironmentURL, projectID)
req, err := createPostRequest(gitToken, url, environmentRequest)
if err != nil {
println("Error adding environment variables")
return err
}
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if isSuccessStatusCode(resp.StatusCode) {
return nil
}
if isUnauthorized(resp.StatusCode) {
fmt.Println("Received unauthorized from Gitlab")
return errors.New("Unauthorized")
}
println("Failed to add environment variables")
println(resp.StatusCode)
return errors.New("Bad Request")
}
// PostGitRepository Creates Git Repository
func (rest GitlabHTTP) PostGitRepository(repositoryName string, gitToken string) (GitRepository, error) {
group, err := rest.prompt.forGroupId()
if err != nil {
println("Error retrieving Gitlab Group name")
return GitRepository{}, err
}
var projectRequest = createProjectRequest(repositoryName, group)
req, err := createPostRequest(gitToken, gitlabProjectURL, projectRequest)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
println("Error creating Gitlab Repository; aborting...")
return GitRepository{}, err
}
defer resp.Body.Close()
if isSuccessStatusCode(resp.StatusCode) {
response := GitRepository{}
parseGitlabResponse(resp, &response)
return response, nil
}
if isUnauthorized(resp.StatusCode) {
fmt.Println("Received unauthorized from Gitlab")
return GitRepository{}, errors.New("Unauthorized")
}
println("Failed to create gitlab repository")
println(resp.StatusCode)
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Println(err)
}
bodyString := string(bodyBytes)
fmt.Println(bodyString)
return GitRepository{}, errors.New("Error creating gitlab repo")
}
func createProjectRequest(respositoryName string, group string) []byte {
return []byte(`{"path":"` + respositoryName + `",` + `"visibility":"private",` + ` "namespace_id": ` + group + `}`)
}
func createPostRequest(gitToken string, url string, request []byte) (*http.Request, error) {
req, err := http.NewRequest("POST", url, bytes.NewBuffer(request))
req.Header.Set("PRIVATE-TOKEN", gitToken)
req.Header.Set("Content-Type", "application/json")
return req, err
}
func isSuccessStatusCode(statusCode int) bool {
return statusCode == 201 || statusCode == 200
}
func isUnauthorized(statusCode int) bool {
return statusCode == 401
}
func parseGitlabResponse(response *http.Response, target interface{}) error {
bodyBytes, err := ioutil.ReadAll(response.Body)
if err != nil {
println(err)
return err
}
return json.Unmarshal(bodyBytes, target)
}
// NewGitlabHTTP init
func NewGitlabHTTP(prompt Prompt) GitlabHTTP {
return GitlabHTTP{prompt}
}
|
<gh_stars>1-10
module GData
class Spreadsheets
include Enumerable
SPREADSHEETS_LIST_URL = 'https://spreadsheets.google.com/feeds/spreadsheets/private/full'.freeze
# A GData::Client::Spreadsheets service, ready for use
attr_accessor :service
# The URL we list spreadsheets from, defaults to SPREADSHEETS_LIST_URL
attr_accessor :url
def initialize options={}
options.each do |key, value|
self.send("#{key}=", value)
end
@url ||= SPREADSHEETS_LIST_URL
end
delegate :length, :[], :each, :to => :spreadsheets
def length
spreadsheets.length
end
def [] key
spreadsheets[key]
end
def each(&b)
spreadsheets.each(&b)
end
def reload!
@feed = @spreadsheets = nil
self
end
protected
def feed
@feed ||= Nokogiri::XML.parse(service.get(SPREADSHEETS_LIST_URL).body)
end
def spreadsheets
@spreadsheets ||= feed.xpath('/xmlns:feed/xmlns:entry').collect do |entry|
GData::Spreadsheets::Spreadsheet.new :service => service,
:url => entry.at_xpath('xmlns:content')[:src],
:author_name => entry.at_xpath('xmlns:author/xmlns:name').text,
:author_email => entry.at_xpath('xmlns:author/xmlns:email').text
end
end
end
end |
package cmd
import (
"fmt"
"io"
"os"
"github.com/dmolesUC3/cos/internal/keys"
"github.com/dmolesUC3/cos/internal/logging"
)
type keysFlags struct {
CosFlags
// TODO: more output formats other than --raw and quoted-Go-literal, e.g. --ascii
Raw bool
OkFile string
BadFile string
ListName string
KeyFile string
Sample int
}
func (f *keysFlags) Pretty() string {
format := `
raw: %v
okFile: %v
badFile: %v
listName: %v
listFile: %v
sample: %d
region: %#v
endpoint: %#v
log level: %v
`
format = logging.Untabify(format, " ")
// TODO: clean up order of flags in other commands
return fmt.Sprintf(format,
f.Raw,
f.OkFile,
f.BadFile,
f.ListName,
f.KeyFile,
f.Sample,
f.Region,
f.Endpoint,
f.LogLevel(),
)
}
func (f *keysFlags) KeyList() (keyList keys.KeyList, err error) {
if f.KeyFile == "" {
keyList, err = keys.KeyListForName(f.ListName)
} else {
keyList, err = keys.KeyListForFile(f.KeyFile)
}
if err == nil && f.Sample > 0 {
keyList, err = keys.SamplingKeyList(keyList, f.Sample)
}
return keyList, err
}
func (f *keysFlags) Outputs() (okOut io.Writer, badOut io.Writer, err error) {
if f.OkFile != "" {
okOut, err = os.Create(f.OkFile)
if err != nil {
return nil, nil, err
}
}
if f.BadFile == "" {
badOut = os.Stdout
} else {
badOut, err = os.Create(f.BadFile)
if err != nil {
// if we opened the okFile we now need to close it
if okOutC, ok := okOut.(io.WriteCloser); ok {
//noinspection GoUnhandledErrorResult
defer okOutC.Close()
}
return nil, nil, err
}
}
return
}
|
#! /bin/sh
die() {
printf "\e[31:1mError: %s\e[0m\n" "$1" >&2
exit 1
}
if [ -z "$GITHUB_ACTOR" ]
then
die "the GITHUB_ACTOR environment variable is not set"
fi
if [ -z "$GITHUB_TOKEN" ]
then
die "the GITHUB_TOKEN environment variable is not set"
fi
if [ -z "$GITHUB_REPOSITORY" ]
then
die "the GITHUB_REPOSITORY environment variable is not set"
fi
(
cd "$(git rev-parse --show-toplevel)/target/shields" || die "cannot find project root!"
repo="https://${GITHUB_ACTOR}:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git"
tmp_dir=$(mktemp -d -t cursive-multiplex-deploy-XXXXXXXX)
git config --global user.email "runner@ci"
git config --global user.name "Github CI Runner"
try=0
while :; do
if ! git clone --branch gh-pages "$repo" "$tmp_dir"
then
(
cd "$tmp_dir" || die "failed to enter temporary directory"
git init
git remote add origin "$repo"
git checkout -b gh-pages
)
fi
cp -ar ./* "$tmp_dir"
(
cd "$tmp_dir" || die "failed to enter temporary directory"
git add -A
git commit -m "Github CI badge deployment"
git push origin gh-pages:gh-pages
)
result=$?
if [ "$result" -eq 0 ] || [ "$try" -ge 5 ]
then
break
fi
try=$((try + 1))
done
rm -rf "$tmp_dir"
)
|
<gh_stars>100-1000
//
// Copyright (c) 2009, <NAME> and <NAME>
// Licensed under the Academic Free License version 3.0
//
// History:
// 3 Dec 09 <NAME> Creation
//
//
// Pod
//
//fan.sys.Pod.$sysPod = fan.sys.Pod.find("sys");
//
// Bool
//
fan.sys.Bool.m_defVal = false;
//
// Int
//
fan.sys.Int.m_maxVal = Math.pow(2, 53)
fan.sys.Int.m_minVal = -Math.pow(2, 53)
fan.sys.Int.m_defVal = 0;
fan.sys.Int.Chunk = 4096;
// Float
fan.sys.Float.m_posInf = fan.sys.Float.make(Number.POSITIVE_INFINITY);
fan.sys.Float.m_negInf = fan.sys.Float.make(Number.NEGATIVE_INFINITY);
fan.sys.Float.m_nan = fan.sys.Float.make(Number.NaN);
fan.sys.Float.m_e = fan.sys.Float.make(Math.E);
fan.sys.Float.m_pi = fan.sys.Float.make(Math.PI);
fan.sys.Float.m_defVal = fan.sys.Float.make(0);
//
// Num
//
/*
fan.sys.NumPattern.cache("00"); fan.sys.NumPattern.cache("000"); fan.sys.NumPattern.cache("0000");
fan.sys.NumPattern.cache("0.0"); fan.sys.NumPattern.cache("0.00"); fan.sys.NumPattern.cache("0.000");
fan.sys.NumPattern.cache("0.#"); fan.sys.NumPattern.cache("#,###.0"); fan.sys.NumPattern.cache("#,###.#");
fan.sys.NumPattern.cache("0.##"); fan.sys.NumPattern.cache("#,###.00"); fan.sys.NumPattern.cache("#,###.##");
fan.sys.NumPattern.cache("0.###"); fan.sys.NumPattern.cache("#,###.000"); fan.sys.NumPattern.cache("#,###.###");
fan.sys.NumPattern.cache("0.0#"); fan.sys.NumPattern.cache("#,###.0#"); fan.sys.NumPattern.cache("#,###.0#");
fan.sys.NumPattern.cache("0.0##"); fan.sys.NumPattern.cache("#,###.0##"); fan.sys.NumPattern.cache("#,###.0##");
*/
//
// Str
//
fan.sys.Str.m_defVal = "";
|
<reponame>madhudskumar/ng2-weather-app
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var core_1 = require('@angular/core');
var http_1 = require('@angular/http');
var Observable_1 = require('rxjs/Observable');
require('rxjs/add/operator/map');
var WeatherService = (function () {
function WeatherService(_http) {
this._http = _http;
//localFunc
this.getWeatherUrl = function (city) {
return this.weatherApiUrl + city + this.appId;
};
this.weatherApiUrl = 'http://api.openweathermap.org/data/2.5/weather?q=';
this.appId = '&appid=5eca1bb9b4183218679a40214432960d';
}
WeatherService.prototype.exists = function (cityWeather, id) {
for (var _i = 0, cityWeather_1 = cityWeather; _i < cityWeather_1.length; _i++) {
var weather_1 = cityWeather_1[_i];
if (weather_1.id === id) {
return true;
}
}
return false;
};
;
WeatherService.prototype.getWeather = function (city) {
var _this = this;
return new Observable_1.Observable(function (observable) {
_this._http.get(_this.getWeatherUrl(city))
.map(function (res) { return res.json(); })
.subscribe(function (res) {
var weather = res.weather[0];
weather.city = city;
observable.next(weather);
});
});
};
;
WeatherService = __decorate([
core_1.Injectable(),
__metadata('design:paramtypes', [http_1.Http])
], WeatherService);
return WeatherService;
}());
exports.WeatherService = WeatherService;
//# sourceMappingURL=weather.service.js.map |
// const fs = require('fs')
// const file = require()
const songs = document.querySelectorAll('li')
console.log(songs)
for (const song of songs) {
song.classList.add("red")
} |
#!/bin/bash
set -e # Exit with nonzero exit code if anything fails
SOURCE_BRANCH="master"
TARGET_BRANCH="gh-pages"
PAGES_DIR="_builds/docs/html"
# GITHUB_TOKEN - set as private variable
echo "Starting deployment"
echo "Target: ${TARGET_BRANCH} branch"
CURRENT_COMMIT=`git rev-parse HEAD`
ORIGIN_URL=`git config --get remote.origin.url`
ORIGIN_URL_WITH_CREDENTIALS=${ORIGIN_URL/\/\/github.com/\/\/$GITHUB_TOKEN@github.com}
# Clone the existing gh-pages for this repo into gh-pages-deploy/
# Create a new empty branch if gh-pages doesn't exist yet (should only happen on first deply)
echo "Checking out ${TARGET_BRANCH} branch"
git clone -b ${TARGET_BRANCH} --single-branch --depth=1 ${ORIGIN_URL} gh-pages-deploy
echo "Removing old static content"
rm -rf gh-pages-deploy/**/*
echo "Copying pages content to root"
cp -Rpv ${PAGES_DIR}/* gh-pages-deploy/
echo "Pushing new content to ${ORIGIN_URL}:${TARGET_BRANCH}"
cd gh-pages-deploy
git config user.name "Travis CI" || exit 1
git config user.email "${COMMIT_AUTHOR_EMAIL}" || exit 1
git add -A .
git commit -m "Deploy to GitHub Pages: ${CURRENT_COMMIT}"
git push --quiet "${ORIGIN_URL_WITH_CREDENTIALS}" ${TARGET_BRANCH} > /dev/null 2>&1
cd ..
echo "Deployed successfully."
exit 0
|
set -e
echo "Enter message: "
read MESSAGE
echo "Deploying $MESSAGE ..."
# commit
cd build
git init
git add -A
git commit -m "$MESSAGE"
git push -f https://github.com/spaxjs/spax.git master:gh-pages
# back to root
cd ..
|
package mg.weather.mapper;
import mg.utils.mapper.DateMapper;
import mg.weather.dto.openweather.DailyWeatherForecastInfoDto;
import mg.weather.entity.DailyWeatherForecastInfo;
import org.mapstruct.Mapper;
@Mapper(
uses = {
DateMapper.class,
TemperatureInfoMapper.class,
TemperatureFeelsLikeMapper.class,
WeatherMapper.class
},
componentModel = "spring"
)
public interface DailyWeatherForecastMapper {
DailyWeatherForecastInfoDto mapToDTO(DailyWeatherForecastInfo entity);
DailyWeatherForecastInfo mapToEntity(DailyWeatherForecastInfoDto dto);
}
|
const filterResults = (arr, parameter, value) => {
return arr.filter((el) => el[parameter] === value);
};
const result = filterResults([
["John", 28, true],
["Jane", 21, false],
["Tom", 35, true],
], 1, 28);
console.log(result); // [["John", 28, true]]; |
SELECT COUNT(DISTINCT(user_id)) as count,
YEAR(registration_date) as year,
MONTH(registration_date) as month
FROM table
GROUP BY year, month
ORDER BY year, month; |
<reponame>Infinite-graph/vulcan-next
export * from "./server/getAppEnhancer";
|
<filename>src/components/Core/Button/index.ts
export { default as Button } from './Button';
export { default as ButtonRemove } from './ButtonRemove';
|
<reponame>zaneray/scrolltoid
/*
$.scrollToID();
specify a #anchor-link in a link href, make sure
there is a corresponding id in the page.
that's about it.
accepts 'speed' and 'offset' arguments.
*/
(function ($) {
$.fn.scrollToID = function (args) {
var options = {
speed: 500,
offset: 20
};
$.extend( options, args );
this.off('click.scrolltoid').on('click.scrolltoid',function(e){
e.preventDefault();
var url = $(this).attr('href'),
urlArray = url.split('#'),
id = urlArray[1],
topOffset = $('#' + id).offset().top - options.offset;
$('body, html').animate({
scrollTop: topOffset
},options.speed);
});
return this;
};
}( jQuery ));
|
package cyclops.container.immutable.impl;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import cyclops.container.control.Either;
import cyclops.container.control.Option;
import cyclops.container.immutable.impl.base.BAMT;
import cyclops.container.basetests.BaseImmutableListTest;
import cyclops.container.immutable.ImmutableList;
import cyclops.container.immutable.tuple.Tuple;
import cyclops.container.immutable.tuple.Tuple2;
import cyclops.reactive.ReactiveSeq;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import java.util.stream.Stream;
import org.junit.Ignore;
import org.junit.Test;
public class VectorTest extends BaseImmutableListTest {
@Override
protected <T> Vector<T> fromStream(Stream<T> s) {
return Vector.fromStream(s);
}
@Test
public void dropSize() {
System.out.println(of(1,
2,
3).drop(2));
System.out.println(of(1,
2,
3).drop(2)
.size());
assertThat(of(1,
2,
3).drop(2)
.size(),
equalTo(1));
}
@Test
public void npeTest() {
com.google.common.collect.ImmutableList<String> guava = com.google.common.collect.ImmutableList.copyOf(ReactiveSeq.range(0,
1000)
.map(i ->
""
+ i));
for (int i = 0; i < 1000; i++) {
System.out.println("Guava size " + guava.size());
guava = com.google.common.collect.ImmutableList.<String>builder().addAll(guava.subList(0,
i))
.add("" + i)
.addAll(guava.subList(i + 1,
guava.size()))
.build();
}
/**
//System.out.println(Seq.range(0,3).concatMap(i -> Vector.range(i*10,i*10+15).map(n->"i " + i + " n : " + n)));
System.out.println(Vector.range(0,3).concatMap(i -> Vector.range(i*10,i*10+15).map(n->"i " + i + " n : " + n)).size());
System.out.println(Vector.range(0,3).concatMap(i -> Vector.range(i*10,i*10+15).map(n->"i " + i + " n : " + n)));
**/
/** Vector.range(0,10).map(i -> i * 2)
.concatMap(i->Vector.range(0,10))
.map(i -> i * 2)
.filter(i -> i < 5000)
.map(i -> "hello " + i)
.map(i -> i.length())
.zip(Vector.range(0,1000000))
.map(i->i._1())
.foldLeft((a, b) -> a + b);**/
}
@Test
public void zipWithIndexOperation() {
Vector.of(1,
2,
3)
.zipWithIndex();
}
@Test
public void map3() {
Vector.range(0,
10000)
.map(i -> i * 2);
}
@Test
public void types() {
System.out.println(of(1,
2,
3).splitAt(2)
._1()
.getClass());
System.out.println(of(1,
2,
3).splitAt(2)
._1());
System.out.println(of(1,
2,
3).splitAt(2)
._2()
.getClass());
System.out.println(of(1,
2,
3).splitAt(2)
._2());
System.out.println(Tuple.tuple(of(1,
2),
of(3))
._1()
.getClass());
System.out.println(Tuple.tuple(of(1,
2),
of(3))
._1());
System.out.println(Tuple.tuple(of(1,
2),
of(3))
._2()
.getClass());
assertThat(of(1,
2,
3).splitAt(2)
._1(),
equalTo(of(1,
2)));
ImmutableList<Integer> three = of(1,
2,
3).splitAt(2)
._2();
three.equals(of(3));
assertThat(of(1,
2,
3).splitAt(2)
._2(),
equalTo(of(3)));
assertThat(of(1,
2,
3).splitAt(2),
equalTo(Tuple.tuple(of(1,
2),
of(3))));
}
@Test
public void equalsAndHash() {
LinkedList<Integer> l = new LinkedList<>();
ArrayList<Integer> al = new ArrayList<>();
l.add(1);
al.add(1);
assertThat(l,
equalTo(al));
assertThat(l.hashCode(),
equalTo(al.hashCode()));
assertThat(Vector.of(1,
2),
equalTo(Seq.of(1,
2)));
assertThat(Vector.of(1,
2)
.hashCode(),
equalTo(Seq.of(1,
2)
.hashCode()));
assertThat(Vector.of(1,
2),
equalTo(LazySeq.of(1,
2)));
assertThat(Vector.of(1,
2)
.hashCode(),
equalTo(LazySeq.of(1,
2)
.hashCode()));
assertThat(Vector.of(1,
2),
equalTo(IntMap.of(1,
2)));
assertThat(Vector.of(1,
2)
.hashCode(),
equalTo(IntMap.of(1,
2)
.hashCode()));
}
@Override
public <T> Vector<T> empty() {
return Vector.empty();
}
@Override
public <T> Vector<T> of(T... values) {
return Vector.of(values);
}
@Override
public Vector<Integer> range(int start,
int end) {
return Vector.range(start,
end);
}
@Override
public Vector<Long> rangeLong(long start,
long end) {
return Vector.rangeLong(start,
end);
}
@Override
public <T> ImmutableList<T> iterate(int times,
T seed,
UnaryOperator<T> fn) {
return Vector.iterate(seed,
fn,
times);
}
@Override
public <T> Vector<T> generate(int times,
Supplier<T> fn) {
return Vector.generate(fn,
times);
}
@Override
public <U, T> Vector<T> unfold(U seed,
Function<? super U, Option<Tuple2<T, U>>> unfolder) {
return Vector.unfold(seed,
unfolder);
}
@Test
public void map2depth() {
//Vector.range(0, 10000)
Vector.range(0,
32)
.map(i -> i * 2)
.foldLeft((a, b) -> a + b);
}
@Test
public void map3depth() {
Vector.range(0,
2000)
.map(i -> i * 2)
.foldLeft((a, b) -> a + b);
}
@Test
public void map4depth() {
Vector.range(0,
10000)
.map(i -> i * 2)
.foldLeft((a, b) -> a + b);
}
@Test
public void map5depth() {
Vector.range(0,
5000000)
.map(i -> i * 2)
.foldLeft((a, b) -> a + b);
}
@Test
public void npe4D() {
Vector.range(0,
10000)
// Vector.range(0, 32)
// .map(i -> i * 2)
.concatMap(i -> Vector.range(0,
10))
.map(i -> i * 2)
// .filter(i -> i < 5000)
// .map(i -> "hello " + i)
// .map(i -> i.length())
.foldLeft((a, b) -> a + b);
}
@Test
public void npe3D() {
Vector.range(0,
1000)
// Vector.range(0, 32)
// .map(i -> i * 2)
.concatMap(i -> Vector.range(0,
10))
.map(i -> i * 2)
// .filter(i -> i < 5000)
// .map(i -> "hello " + i)
// .map(i -> i.length())
.foldLeft((a, b) -> a + b);
}
@Test
public void map3D() {
assertThat(Vector.range(0,
10000)
.map(i -> i * 2),
equalTo(Seq.range(0,
10000)
.map(i -> i * 2)));
}
@Test
public void map2D() {
for (int i = 32; i < 1024; i++) {
System.out.println("I is " + i);
assertThat(Vector.range(0,
i)
.map(n -> n * 2),
equalTo(Seq.range(0,
i)
.map(n -> n * 2)));
}
}
@Test
public void map1D() {
for (int i = 0; i < 32; i++) {
assertThat(Vector.range(0,
i)
.map(n -> n * 2),
equalTo(Seq.range(0,
i)
.map(n -> n * 2)));
}
}
@Test
public void testVector() {
Vector<Integer> ints = Vector.<Integer>empty().plus(1);
assertThat(ints.get(0),
equalTo(Option.some(1)));
}
@Test
public void testVector100() {
Vector<Integer> ints = Vector.<Integer>empty();
for (int i = 0; i < 1025; i++) {
ints = ints.plus(i);
}
assertThat(ints.get(0),
equalTo(Option.some(0)));
assertThat(ints.get(900),
equalTo(Option.some(900)));
}
@Test
public void last() {
Object[] array = {"hello", "world"};
assertThat(BAMT.ArrayUtils.last(array),
equalTo("world"));
}
@Test
public void test3Pow() {
Vector<Integer> ints = Vector.<Integer>empty();
int p = Double.valueOf(Math.pow(2,
15))
.intValue();
for (int i = 0; i < p; i++) {
System.out.println(i);
ints = ints.plus(i);
}
for (int i = 0; i < p; i++) {
assertThat(ints.get(i),
equalTo(Option.some(i)));
}
final Vector<Integer> finalRef = ints;
ints.stream()
.forEach(next -> {
assertThat(finalRef.get(next),
equalTo(Option.some(next)));
});
}
@Test
public void test3PowSet() {
Vector<Integer> ints = Vector.<Integer>empty();
int p = Double.valueOf(Math.pow(2,
15))
.intValue();
for (int i = 0; i < p; i++) {
ints = ints.plus(i);
}
for (int i = 0; i < p; i++) {
System.out.println(i);
ints = ints.updateAt(i,
i * 2);
}
for (int i = 0; i < p; i++) {
assertThat(ints.get(i),
equalTo(Option.some(i * 2)));
}
}
@Test
@Ignore
public void test4Pow() {
Vector<Integer> ints = Vector.<Integer>empty();
int p = Double.valueOf(Math.pow(2,
20))
.intValue();
for (int i = 0; i < p; i++) {
ints = ints.plus(i);
}
for (int i = 0; i < p; i++) {
assertThat(ints.get(i),
equalTo(Option.some(i)));
}
final Vector<Integer> finalRef = ints;
ints.stream()
.forEach(next -> {
assertThat(finalRef.get(next),
equalTo(Option.some(next)));
});
}
@Test
@Ignore
public void test4PowSet() {
Vector<Integer> ints = Vector.<Integer>empty();
int p = Double.valueOf(Math.pow(2,
20))
.intValue();
for (int i = 0; i < p; i++) {
ints = ints.plus(i);
}
for (int i = 0; i < p; i++) {
ints = ints.updateAt(i,
i * 2);
}
for (int i = 0; i < p; i++) {
assertThat(ints.get(i),
equalTo(Option.some(i * 2)));
}
Vector<Integer> doubled = ints.map(n -> n * 2);
Iterator<Integer> it = doubled.iterator();
for (int i = 0; i < p; i++) {
Integer next = it.next();
assertThat(next,
equalTo(i * 4));
}
}
@Test
@Ignore
public void test5Pow() {
Vector<Integer> ints = Vector.<Integer>empty();
int p = Double.valueOf(Math.pow(2,
25))
.intValue();
for (int i = 0; i < p; i++) {
ints = ints.plus(i);
}
for (int i = 0; i < p; i++) {
assertThat(ints.get(i),
equalTo(Option.some(i)));
}
final Vector<Integer> finalRef = ints;
ints.stream()
.forEach(next -> {
assertThat(finalRef.get(next),
equalTo(Option.some(next)));
});
Vector<Integer> doubled = ints.map(n -> n * 2);
Iterator<Integer> it = doubled.iterator();
for (int i = 0; i < p; i++) {
Integer next = it.next();
assertThat(next,
equalTo(i * 2));
}
}
@Test
@Ignore
public void test5PowSet() {
Vector<Integer> ints = Vector.<Integer>empty();
int p = Double.valueOf(Math.pow(2,
25))
.intValue();
for (int i = 0; i < p; i++) {
ints = ints.plus(i);
}
for (int i = 0; i < p; i++) {
ints = ints.updateAt(i,
i * 2);
}
for (int i = 0; i < p; i++) {
assertThat(ints.get(i),
equalTo(Option.some(i * 2)));
}
}
@Test
@Ignore
public void test6Pow() {
Vector<Integer> ints = Vector.<Integer>empty();
int progress = Double.valueOf(Math.pow(2,
24))
.intValue();
int p = Double.valueOf(Math.pow(2,
26))
.intValue();
System.out.println("Plus");
for (int i = 0; i < p; i++) {
ints = ints.plus(i);
if (i % progress == 0) {
System.out.println("Progress marker " + i);
}
}
System.out.println("get");
for (int i = 0; i < p; i++) {
assertThat(ints.get(i),
equalTo(Option.some(i)));
}
System.out.println("stream");
final Vector<Integer> finalRef = ints;
ints.stream()
.forEach(next -> {
assertThat(finalRef.get(next),
equalTo(Option.some(next)));
});
System.out.println("map");
Vector<Integer> doubled = ints.map(n -> n * 2);
Iterator<Integer> it = doubled.iterator();
for (int i = 0; i < p; i++) {
Integer next = it.next();
assertThat(next,
equalTo(i * 2));
}
}
@Test
@Ignore
public void test6PowSet() {
Vector<Integer> ints = Vector.<Integer>empty();
int p = Double.valueOf(Math.pow(2,
30))
.intValue() / 2;
for (int i = 0; i < p; i++) {
ints = ints.plus(i);
for (int x = 0; x < i; x++) {
assertThat(ints.get(x),
equalTo(Option.some(x)));
}
}
for (int i = 0; i < p; i++) {
ints = ints.updateAt(i,
i * 2);
}
for (int i = 0; i < p; i++) {
assertThat(ints.get(i),
equalTo(Option.some(i * 2)));
}
}
@Test
public void shift() {
System.out.println("Shift " + (1 >>> 5) + " and " + ((1 >>> 5) & 31));
System.out.println("Shift " + (32 >>> 5) + " and " + ((32 >>> 5) & 31));
System.out.println("Shift " + (64 >>> 5) + " and " + ((64 >>> 5) & 31));
System.out.println("Shift " + (96 >>> 5) + " and " + ((96 >>> 5) & 31));
System.out.println("Shift " + (128 >>> 5) + " and " + ((128 >>> 5) & 31));
}
@Test
public void shift2() {
for (int i = 0; i < 10000; i++) {
System.out.println("I is " + i + "Shift " + (i >>> 5) + " and " + ((i >>> 5) & 31));
}
}
@Test
public void setEither() {
Vector<Integer> ints = Vector.of(1,
2,
3);
assertTrue(ints.set(-1,
10)
.isLeft());
assertTrue(ints.set(4,
10)
.isLeft());
assertThat(ints.set(2,
10),
equalTo(Either.right(Vector.of(1,
2,
10))));
}
@Test
public void deleteEither() {
Vector<Integer> ints = Vector.of(1,
2,
3);
assertTrue(ints.delete(-1)
.isLeft());
assertTrue(ints.delete(4)
.isLeft());
assertThat(ints.delete(2),
equalTo(Either.right(Vector.of(1,
2))));
}
}
|
#!/usr/bin/env sh
# Here is the script to deploy the cert to G-Core CDN service (https://gcorelabs.com/ru/) using the G-Core Labs API (https://docs.gcorelabs.com/cdn/).
# Returns 0 when success.
#
# Written by temoffey <temofffey@gmail.com>
# Public domain, 2019
#export DEPLOY_GCORE_CDN_USERNAME=myusername
#export DEPLOY_GCORE_CDN_PASSWORD=mypassword
######## Public functions #####################
#domain keyfile certfile cafile fullchain
gcore_cdn_deploy() {
_cdomain="$1"
_ckey="$2"
_ccert="$3"
_cca="$4"
_cfullchain="$5"
_debug _cdomain "$_cdomain"
_debug _ckey "$_ckey"
_debug _ccert "$_ccert"
_debug _cca "$_cca"
_debug _cfullchain "$_cfullchain"
_fullchain=$(tr '\r\n' '*#' <"$_cfullchain" | sed 's/*#/#/g;s/##/#/g;s/#/\\n/g')
_key=$(tr '\r\n' '*#' <"$_ckey" | sed 's/*#/#/g;s/#/\\n/g')
_debug _fullchain "$_fullchain"
_debug _key "$_key"
if [ -z "$DEPLOY_GCORE_CDN_USERNAME" ]; then
if [ -z "$Le_Deploy_gcore_cdn_username" ]; then
_err "Please define the target username: export DEPLOY_GCORE_CDN_USERNAME=username"
return 1
fi
else
Le_Deploy_gcore_cdn_username="$DEPLOY_GCORE_CDN_USERNAME"
_savedomainconf Le_Deploy_gcore_cdn_username "$Le_Deploy_gcore_cdn_username"
fi
if [ -z "$DEPLOY_GCORE_CDN_PASSWORD" ]; then
if [ -z "$Le_Deploy_gcore_cdn_password" ]; then
_err "Please define the target password: export DEPLOY_GCORE_CDN_PASSWORD=password"
return 1
fi
else
Le_Deploy_gcore_cdn_password="$DEPLOY_GCORE_CDN_PASSWORD"
_savedomainconf Le_Deploy_gcore_cdn_password "$Le_Deploy_gcore_cdn_password"
fi
_info "Get authorization token"
_request="{\"username\":\"$Le_Deploy_gcore_cdn_username\",\"password\":\"$Le_Deploy_gcore_cdn_password\"}"
_debug _request "$_request"
export _H1="Content-Type:application/json"
_response=$(_post "$_request" "https://api.gcdn.co/auth/signin")
_debug _response "$_response"
_regex=".*\"token\":\"\([-._0-9A-Za-z]*\)\".*$"
_debug _regex "$_regex"
_token=$(echo "$_response" | sed -n "s/$_regex/\1/p")
_debug _token "$_token"
if [ -z "$_token" ]; then
_err "Error G-Core Labs API authorization"
return 1
fi
_info "Find CDN resource with cname $_cdomain"
export _H2="Authorization:Token $_token"
_response=$(_get "https://api.gcdn.co/resources")
_debug _response "$_response"
_regex=".*(\"id\".*?\"cname\":\"$_cdomain\".*?})"
_regex="^.*\"cname\":\"$_cdomain\".*$"
_debug _regex "$_regex"
_resource=$(echo "$_response" | sed 's/},{/},\n{/g' | _egrep_o "$_regex")
_debug _resource "$_resource"
_regex=".*\"id\":\([0-9]*\),.*$"
_debug _regex "$_regex"
_resourceId=$(echo "$_resource" | sed -n "s/$_regex/\1/p")
_debug _resourceId "$_resourceId"
_regex=".*\"sslData\":\([0-9]*\)}.*$"
_debug _regex "$_regex"
_sslDataOld=$(echo "$_resource" | sed -n "s/$_regex/\1/p")
_debug _sslDataOld "$_sslDataOld"
_regex=".*\"originGroup\":\([0-9]*\),.*$"
_debug _regex "$_regex"
_originGroup=$(echo "$_resource" | sed -n "s/$_regex/\1/p")
_debug _originGroup "$_originGroup"
if [ -z "$_resourceId" ] || [ -z "$_originGroup" ]; then
_err "Not found CDN resource with cname $_cdomain"
return 1
fi
_info "Add new SSL certificate"
_date=$(date "+%d.%m.%Y %H:%M:%S")
_request="{\"name\":\"$_cdomain ($_date)\",\"sslCertificate\":\"$_fullchain\",\"sslPrivateKey\":\"$_key\"}"
_debug _request "$_request"
_response=$(_post "$_request" "https://api.gcdn.co/sslData")
_debug _response "$_response"
_regex=".*\"id\":\([0-9]*\),.*$"
_debug _regex "$_regex"
_sslDataAdd=$(echo "$_response" | sed -n "s/$_regex/\1/p")
_debug _sslDataAdd "$_sslDataAdd"
if [ -z "$_sslDataAdd" ]; then
_err "Error new SSL certificate add"
return 1
fi
_info "Update CDN resource"
_request="{\"originGroup\":$_originGroup,\"sslData\":$_sslDataAdd}"
_debug _request "$_request"
_response=$(_post "$_request" "https://api.gcdn.co/resources/$_resourceId" '' "PUT")
_debug _response "$_response"
_regex=".*\"sslData\":\([0-9]*\).*$"
_debug _regex "$_regex"
_sslDataNew=$(echo "$_response" | sed -n "s/$_regex/\1/p")
_debug _sslDataNew "$_sslDataNew"
if [ "$_sslDataNew" != "$_sslDataAdd" ]; then
_err "Error CDN resource update"
return 1
fi
if [ -z "$_sslDataOld" ] || [ "$_sslDataOld" = "null" ]; then
_info "Not found old SSL certificate"
else
_info "Delete old SSL certificate"
_response=$(_post '' "https://api.gcdn.co/sslData/$_sslDataOld" '' "DELETE")
_debug _response "$_response"
fi
_info "Certificate successfully deployed"
return 0
}
|
import React from 'react';
import Alert from "@material-ui/lab/Alert";
import AlertTitle from '@material-ui/lab/AlertTitle';
type NotificationProps = {
status: string,
onCloseNotif: any,
message: string,
title: string,
}
export default function Notification(props: NotificationProps) {
return (
<div className="alertContainer">
{
props.status === "error" &&
<Alert severity="error" onClose={props.onCloseNotif}>
<AlertTitle>
{props.title}
</AlertTitle>
{props.message}
</Alert>
}
{
props.status === "success" &&
<Alert severity="success" onClose={props.onCloseNotif}>
<AlertTitle>
{props.title}
</AlertTitle>
{props.message}
</Alert>
}
</div>
)
} |
<reponame>leongaban/redux-saga-exchange<gh_stars>1-10
export * from './namespace';
export * from './multiConnect';
export * from './communication';
export { default as composeReducers } from './composeReducers';
export { default as makeIdentityReducer } from './makeIdentityReducer';
export { default as makeResetStateReducer } from './makeResetStateReducer';
export { default as validateFields } from './validateFields';
export { default as isSuccessedByState } from './isSuccessedByState';
export { default as isProcessedByState } from './isProcessedByState';
export { default as isFailedByState } from './isFailedByState';
export { default as makeReduxFormEntry } from './makeReduxFormEntry';
export { createErrorReporterMiddleware } from './createErrorReporterMiddleware';
|
package sgmongo
import (
"github.com/coderguang/GameEngine_go/sgcfg"
)
type MongoCfg struct {
Host string `json:"host"`
Port string `json:"port"`
User string `json:"user"`
Pwd string `json:"pwd"`
DbName string `json:"dbName"`
}
func ReadCfg(filename string) (*MongoCfg, error) {
cfg := new(MongoCfg)
err := sgcfg.ReadCfg(filename, cfg)
return cfg, err
}
|
#!/bin/bash
TEST_CMD="curl --max-time 5 --fail http://localhost:$2/api/v1/ping"
re='^[0-9]+$'
! [[ $1 =~ $re ]] && echo "Error: integer startup timeout seconds not supplied!" >&2 && exit 1
! [[ $2 =~ $re ]] && echo "Error: port not supplied" >&2 && exit 1
# Hit URL
$TEST_CMD -s && exit 0
# Hit failed. Starting up?
if [ ! -f "tmp/init_complete" ]; then
# Startup delay
while (( ${SECONDS} < $1 )) && ! $TEST_CMD -s > /dev/null 2>&1; do
sleep $(( ${SECONDS} + 5 < $1 ? 5 : 1 ))
done
fi
# Pass result of test
$TEST_CMD
|
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2016 Google, Inc
# Written by <NAME> <<EMAIL>>
#
# Entry-type module for Intel Firmware Support Package binary blob
#
from binman.etype.blob_ext import Entry_blob_ext
class Entry_intel_fsp(Entry_blob_ext):
"""Entry containing an Intel Firmware Support Package (FSP) file
Properties / Entry arguments:
- filename: Filename of file to read into entry
This file contains binary blobs which are used on some devices to make the
platform work. U-Boot executes this code since it is not possible to set up
the hardware using U-Boot open-source code. Documentation is typically not
available in sufficient detail to allow this.
An example filename is 'FSP/QUEENSBAY_FSP_GOLD_001_20-DECEMBER-2013.fd'
See README.x86 for information about x86 binary blobs.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
|
#!/usr/bin/env bats
load $BATS_TEST_DIRNAME/helper/common.bash
setup() {
setup_common
}
teardown() {
teardown_common
}
@test "Renaming a column should preserve the tag number" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
c1 BIGINT,
PRIMARY KEY (pk));
SQL
run dolt schema tags -r=csv
[ $status -eq 0 ]
[[ "$output" =~ "test,c1,8201" ]] || false
dolt sql -q "alter table test rename column c1 to c0"
run dolt schema tags -r=csv
[ $status -eq 0 ]
[[ "$output" =~ "test,c0,8201" ]] || false
}
@test "Renaming a table should preserve the tag number" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
c1 BIGINT,
PRIMARY KEY (pk));
SQL
run dolt schema tags -r=csv
[ $status -eq 0 ]
[[ "$output" =~ "test,pk,3228" ]] || false
[[ "$output" =~ "test,c1,8201" ]] || false
dolt sql -q "alter table test rename to new_name"
run dolt schema tags -r=csv
[ $status -eq 0 ]
[[ "$output" =~ "new_name,pk,3228" ]] || false
[[ "$output" =~ "new_name,c1,8201" ]] || false
}
@test "Schema tags should be case insensitive to tables" {
dolt sql <<SQL
CREATE TABLE TeSt (
pk BIGINT NOT NULL,
c1 BIGINT,
PRIMARY KEY (pk));
SQL
run dolt schema tags test -r=csv
[ $status -eq 0 ]
[[ "$output" =~ "TeSt,pk,3228" ]] || false
[[ "$output" =~ "TeSt,c1,8201" ]] || false
}
@test "Merging two branches that added same tag, name, type, and constraints" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
c1 BIGINT,
PRIMARY KEY (pk));
SQL
dolt add test
dolt commit -m "Committed test table"
dolt branch branch1
dolt branch branch2
dolt checkout branch1
dolt sql -q "alter table test add column c2 bigint"
dolt add test
dolt commit -m "Added column c2 bigint"
dolt checkout branch2
dolt sql -q "alter table test add column c2 bigint"
dolt add test
dolt commit -m "Added column c2 bigint"
dolt checkout master
dolt merge branch1
dolt merge branch2
}
@test "Merging branches that use the same tag referring to different schema fails" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:1234',
c1 BIGINT COMMENT 'tag:5678',
PRIMARY KEY (pk));
SQL
dolt add test
dolt commit -m "Committed test table"
dolt branch branch1
dolt branch branch2
dolt checkout branch1
dolt sql -q "alter table test add column c2 bigint"
dolt add test
dolt commit -m "Added column c2 bigint"
dolt checkout branch2
dolt sql -q "alter table test add column c2 longtext"
dolt add test
dolt commit -m "Added column c2 longtext"
dolt checkout master
dolt merge branch1
run dolt merge branch2
[ $status -ne 0 ]
}
@test "Merging branches that use the same tag referring to different column names fails" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:1234',
c1 BIGINT COMMENT 'tag:5678',
PRIMARY KEY (pk));
SQL
dolt add test
dolt commit -m "Committed test table"
dolt branch branch1
dolt branch branch2
dolt checkout branch1
dolt sql -q "alter table test add column c2 bigint"
dolt add test
dolt commit -m "Added column c2 bigint"
dolt checkout branch2
dolt sql -q "alter table test add column c2 bigint"
dolt sql -q "alter table test rename column c2 to c0"
dolt add test
dolt commit -m "Added column c0 bigint"
dolt checkout master
dolt merge branch1
run dolt merge branch2
[ $status -eq 1 ]
}
@test "Merging branches that both created the same column succeeds" {
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL COMMENT 'tag:0',
c1 BIGINT COMMENT 'tag:1',
PRIMARY KEY (pk));
SQL
dolt add test
dolt commit -m "Committed test table"
dolt branch branch1
dolt branch branch2
dolt checkout branch1
dolt sql -q "alter table test add column c2 bigint"
dolt sql -q "alter table test add column c3 double"
dolt add test
dolt commit -m "Added columns c2 bigint and c3 double to branch1"
dolt checkout branch2
dolt sql -q "alter table test add column c2 bigint"
# column c3 will have the same tag on both branches due to deterministic tag generation
dolt sql -q "alter table test add column c3 double"
dolt add test
dolt commit -m "Added columns c2 bigint and c3 double to branch2"
dolt checkout master
dolt merge branch1
run dolt merge branch2
[ $status -eq 0 ]
run dolt schema show
[[ "${lines[2]}" =~ "\`pk\` bigint NOT NULL" ]] || false
[[ "${lines[3]}" =~ "\`c1\` bigint" ]] || false
[[ "${lines[4]}" =~ "\`c2\` bigint" ]] || false
[[ "${lines[5]}" =~ "\`c3\` double" ]] || false
}
@test "Merging branches that both created the same table succeeds" {
dolt branch branch1
dolt branch branch2
dolt checkout branch1
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
c1 BIGINT,
PRIMARY KEY (pk));
SQL
dolt add test
dolt commit -m "Committed test table"
dolt checkout branch2
dolt sql <<SQL
CREATE TABLE test (
pk BIGINT NOT NULL,
c1 BIGINT,
PRIMARY KEY (pk));
SQL
dolt add test
# pk and c1 will have the same tags on both branches due to deterministic tag generation
dolt commit -m "Committed test table"
dolt checkout master
dolt merge branch1
run dolt merge branch2
[ $status -eq 0 ]
run dolt schema show
[[ "${lines[2]}" =~ "\`pk\` bigint NOT NULL" ]] || false
[[ "${lines[3]}" =~ "\`c1\` bigint" ]] || false
}
@test "Deterministic tag generation produces consistent results" {
dolt branch other
dolt sql <<SQL
CREATE TABLE test1 (
pk1 BIGINT NOT NULL,
c1 BIGINT,
c2 DOUBLE,
c3 LONGTEXT,
PRIMARY KEY (pk1));
SQL
dolt add test1
dolt commit -m "Committed test table"
# If anything changes to deterministic tag generation, this will break
run dolt schema tags -r=csv
[ $status -eq 0 ]
[[ "$output" =~ "test1,pk1,10458" ]] || false
[[ "$output" =~ "test1,c1,5951" ]] || false
[[ "$output" =~ "test1,c2,10358" ]] || false
[[ "$output" =~ "test1,c3,11314" ]] || false
}
@test "dolt table import -c uses deterministic tag generation" {
cat <<DELIM > data.csv
pk,c1,c2,c3,c4,c5
0,1,2,3,4,5
a,b,c,d,e,f
DELIM
run dolt table import -c -pk=pk ints_table data.csv
[ $status -eq 0 ]
run dolt schema tags -r=csv
[ $status -eq 0 ]
[[ "$output" =~ "ints_table,pk,6302" ]] || false
[[ "$output" =~ "ints_table,c1,12880" ]] || false
[[ "$output" =~ "ints_table,c2,15463" ]] || false
[[ "$output" =~ "ints_table,c3,14526" ]] || false
[[ "$output" =~ "ints_table,c4,5634" ]] || false
[[ "$output" =~ "ints_table,c5,12796" ]] || false
}
|
CRAWL_LAUNCHER_PATH=/Users/phoqe/Downloads/app-crawler/crawl_launcher.jar
APP_PACKAGE_NAME=com.phoqe.fackla
ANDROID_SDK_PATH=/Users/phoqe/Library/Android/sdk
java -jar $CRAWL_LAUNCHER_PATH --app-package-name $APP_PACKAGE_NAME --android-sdk $ANDROID_SDK_PATH
|
# scp's file to board relative to the board dir in the repo
# Input = the name of the file to transfer
ROOT_PROJECT_DIR="$(readlink -fm "$0"/..)"
# Flags to control install
print_flags () {
echo "========================================================================================================================="
echo "Usage: setup.sh"
echo "========================================================================================================================="
echo "Utility to setup the repo"
echo "========================================================================================================================="
echo "How to use:"
echo " To Start: ./setup.sh [flags]"
echo "========================================================================================================================="
echo "Available Flags (mutually exclusive):"
echo " -e | --extern: Use this for files relative to extern"
echo " -i | --install: Use this for files relative to install"
echo " -s | --src: Use this for files relative to src"
echo " -f | --filename: Name of file to transfer"
echo " -h | --help: This message. Note that the options are in cascading precedence if multiple are picked"
echo "========================================================================================================================="
}
# parse command line args
relative_dir=""
file_name=""
while [[ "$#" -gt 0 ]]; do
case $1 in
-e | --extern )
relative_dir="extern"
;;
-i | --install )
relative_dir="install"
;;
-s | --src )
relative_dir="src"
;;
-f | --filename )
file_name="$2"
shift
;;
-h | --help )
print_flags
exit 0
;;
* )
echo "... Unrecognized Command: $1"
print_flags
exit 1
esac
shift
done
if [ "${file_name}" = "" ]; then
echo "No filename give to -f flag. Please try again."
exit
fi
if [ "${relative_dir}" = "" ]; then
echo "No relative directory selected. Please try again. Use -h to print valid flags."
exit
fi
scp $ROOT_PROJECT_DIR/board/$relative_dir/$file_name pi:~/sandboxes/door_detector/board/$relative_dir/$file_name
|
<reponame>alejsanc/nexttypes
/*
* Copyright 2015-2021 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nexttypes.system;
import java.io.IOException;
import org.apache.poi.util.HexDump;
import org.bouncycastle.util.Arrays;
import com.nexttypes.exceptions.NXException;
public class Debug {
public static final String HTTP_REQUEST = "HTTP Request";
public static final String HTTP_RESPONSE = "HTTP Response";
public static final String BODY = "Body";
public static final String PARAMETERS = "Parameters";
public static final String HEADERS = "Headers";
public static final String EXCEPTION = "Exception";
public static void title(String title) {
if (title != null) {
System.out.println("\n--------------- " + title + "---------------");
}
}
public static void subtitle(String subtitle) {
if (subtitle != null) {
System.out.println("\n---------- " + subtitle + " ----------");
}
}
public static void text(String text) {
if (text != null) {
System.out.println(text);
}
}
public static void binary(byte[] data, int limit) {
if (data != null) {
boolean truncated = false;
if (data.length > limit) {
data = Arrays.copyOf(data, limit);
truncated = true;
}
try {
HexDump.dump(data, 0, System.out, 0);
} catch (ArrayIndexOutOfBoundsException | IllegalArgumentException | IOException e) {
throw new NXException(e);
}
if (truncated) {
System.out.println("---- More ----");
}
}
}
public static void exception(Exception e) {
exception(e.getMessage(), e);
}
public static void exception(String message, Exception e) {
title(EXCEPTION);
text(message);
e.printStackTrace();
}
public static void httpRequest() {
title(HTTP_REQUEST);
}
public static void httpResponse() {
title(HTTP_RESPONSE);
}
public static void body() {
subtitle(BODY);
}
public static void parameters() {
subtitle(PARAMETERS);
}
public static void headers() {
subtitle(HEADERS);
}
}
|
<reponame>rollbar/rollbar-cli<filename>test/sourcemaps/signed-url-uploader.test.js
/* globals describe */
/* globals it */
/* globals beforeEach */
const expect = require('chai').expect;
const sinon = require('sinon');
const axios = require('axios')
const SignedUrlUploader = require('../../src/sourcemaps/signed-url-uploader');
const Scanner = require('../../src/sourcemaps/scanner');
const Output = require('../../src/common/output');
describe('Uploader()', function() {
it('should initialize successfully', function() {
const signedUrlUploader = new SignedUrlUploader();
expect(signedUrlUploader.zippedMapFile).to.equal('');
expect(signedUrlUploader.files).to.be.an('array').that.is.empty;
});
});
describe('.zipFiles()', function() {
beforeEach(function() {
global.output = new Output({quiet: true});
});
it('should zip files successfully', async function() {
const scannerOptions = {
targetPath: './test/fixtures/builds/react16/build',
sources: true
};
const scanner = new Scanner(scannerOptions);
await scanner.scan();
const files = scanner.mappedFiles();
const signedUrlUploader = new SignedUrlUploader();
signedUrlUploader.mapFiles(files);
signedUrlUploader.zipFiles();
expect(signedUrlUploader.zipBuffer.length).to.equal(22);
});
});
describe('.upload()', function() {
beforeEach(function() {
global.output = new Output({quiet: true});
});
it('should upload signed url successfully', async function() {
const scannerOptions = {
targetPath: './test/fixtures/builds/react16/build',
sources: true
};
const scanner = new Scanner(scannerOptions);
await scanner.scan();
const files = scanner.mappedFiles();
const signedUrlUploader = new SignedUrlUploader();
const stub = sinon.stub(axios, 'put');
stub.resolves({
status: 200,
statusText: 'Success',
});
await signedUrlUploader.upload(false, files);
expect(stub.callCount).to.equal(1);
});
});
|
<filename>utils/profile_session_run_hooks.py
import os
import logging
from scipy import misc
import tensorflow as tf
from tensorflow.python.client import timeline
from tensorflow.python.training import training_util
from tensorflow.python.training import session_run_hook
class ProfileAtStepHook(session_run_hook.SessionRunHook):
"""Hook that requests stop at a specified step."""
def __init__(self, at_step=None, checkpoint_dir=None, trace_level=tf.RunOptions.FULL_TRACE):
self._at_step = at_step
self._do_profile = False
self._writer = tf.summary.FileWriter(checkpoint_dir)
self._trace_level = trace_level
def begin(self):
self._global_step_tensor = tf.train.get_global_step()
if self._global_step_tensor is None:
raise RuntimeError("Global step should be created to use ProfileAtStepHook.")
def before_run(self, run_context): # pylint: disable=unused-argument
if self._do_profile:
options = tf.RunOptions(trace_level=self._trace_level)
else:
options = None
graph = tf.get_default_graph()
# 添加rotated_image张量运算和预处理图像
rotated_image_tensor = graph.get_tensor_by_name(name="Input/RandomRotate/PyFunc:0")
image_to_float_tensor = graph.get_tensor_by_name(name="Input/RandomRotate/ToFloat:0")
resized_image_tensor = graph.get_tensor_by_name(name="Input/ResizeRandomMethod/Merge:0")
preprocessed_image_tensor = graph.get_tensor_by_name(name='Input/RandomAdjustBrightness/clip_by_value:0')
# 添加prediction_dict张量运算
recognitions_1 = graph.get_tensor_by_name(name='clone_0/Postprocess/Forward/Postprocess/hash_table_1_Lookup:0')
recognitions_2 = graph.get_tensor_by_name(name='clone_0/Postprocess/Backward/Postprocess/hash_table_3_Lookup:0')
# groundtruth张量
# groundtruth_tensor = graph.get_tensor_by_name(name='Input/Reshape_7:0')
return tf.train.SessionRunArgs([self._global_step_tensor, rotated_image_tensor, image_to_float_tensor, resized_image_tensor, preprocessed_image_tensor, \
[recognitions_1, recognitions_2]], options=options)
def after_run(self, run_context, run_values):
global_step = run_values.results[0] - 1
if self._do_profile:
self._do_profile = False
self._writer.add_run_metadata(run_values.run_metadata,
'trace_{}'.format(global_step), global_step)
timeline_object = timeline.Timeline(run_values.run_metadata.step_stats)
chrome_trace = timeline_object.generate_chrome_trace_format()
chrome_trace_save_path = 'timeline_{}.json'.format(global_step)
with open(chrome_trace_save_path, 'w') as f:
f.write(chrome_trace)
logging.info('Profile trace saved to {}'.format(chrome_trace_save_path))
if global_step == self._at_step:
self._do_profile = True
# 保存预处理后图像
rotated_image = run_values.results[1]
# image_to_float = run_values.results[2]
# resized_image = run_values.results[3]
preprocessed_image = run_values.results[4]
if global_step % 200 == 0:
misc.imsave('./Chinese_aster/experiments/rotated_image/rotated_image_{}.jpg'.format(global_step), rotated_image)
# misc.imsave('./Chinese_aster/experiments/image_to_float/image_to_float_{}.jpg'.format(global_step), image_to_float)
# misc.imsave('./Chinese_aster/experiments/resized_image/resized_image_{}.jpg'.format(global_step), resized_image)
misc.imsave('./Chinese_aster/experiments/preprocessed_image/preprocessed_image_{}.jpg'.format(global_step), preprocessed_image)
# 保存训练预测结果
predictions = run_values.results[5][0]
if global_step % 200 == 0:
for prediction in predictions:
predict_string = ""
for char in prediction:
char = char.decode('utf-8')
predict_string += char
print(predict_string)
|
<reponame>access6080/books_ssr
import { Router } from 'express';
import booksDb from '../db/books.js'
const pageRoutes = Router();
pageRoutes.get('/', (req, res) => {
res.render('index.ejs')
});
pageRoutes.get('/library', (req, res) => {
res.render('library.ejs', { books: booksDb})
});
pageRoutes.get('/create', (req, res) => {
res.render('create.ejs')
});
pageRoutes.get('/update', (req, res) => {
res.render('update.ejs')
});
export default pageRoutes; |
fn process_server_response(response: ServerResponse, current_log_index: usize, current_state: &mut ServerState) -> usize {
match response {
ServerResponse::AppendEntriesResponse { success, next_log_index } => {
if success {
// Update the server's log index and any other relevant state
current_state.log_index = next_log_index;
// Perform any other necessary updates to the server state
}
// Return the next log index to be sent to the server
next_log_index + 1
}
ServerResponse::InstallSnapshot => {
// Handle the InstallSnapshot response by updating the server state
current_state.apply_snapshot();
// Return the next log index to be sent to the server
current_log_index + 1
}
}
} |
#pragma once
#include "common.h"
namespace scheduler {
class GroupSchedule {
public:
GroupSchedule();
explicit GroupSchedule(string group_name);
virtual ~GroupSchedule() = default;
static bool is_group_name_valid(const string &group_name);
const string &get_group_name() const;
void set_group_name(const string &group_name);
const string &get_group_faculty() const;
void set_group_faculty(const string &group_faculty);
const string &get_group_magic_number() const;
void set_group_magic_number(const string &group_magic_number);
const Lesson &get_lesson(int parity, int day, int lesson_number) const;
void set_lesson(int parity, int day, int lesson_number, const Lesson &lesson);
void add_addition_message(const string &message, int day_number);
string to_string() const;
friend ostream &operator<<(ostream &os, const GroupSchedule &gs);
Lesson &get_lesson_non_const_ref(int parity, int day, int lesson_number) const;
private:
const static regex _group_name_template;
string _group_name;
string _group_faculty;
string _group_magic_number; // I don't know what this number means
parity_week_template _group_schedule;
void _create_group_schedule_template();
void _check_parity_day_and_lesson(int parity, int day, int lesson) const;
};
}
|
#!/bin/bash
passwd=$1
db_host=$2
datetime=$(date +'%Y-%m-%d %H:%M:%S')
sqlfile=/tmp/testdata-$(date +%H%M%S).sql
cat >$sqlfile <<EOF
INSERT INTO "flavors" ("id","created_at","updated_at","deleted_at","name","cpu","memory","disk") VALUES (1, '$datetime','$datetime',NULL,'m1.tiny',1,256,8) RETURNING "flavors"."id";
ALTER sequence "flavors_id_seq" restart with 2;
INSERT INTO "images" ("id","created_at","updated_at","deleted_at","name","os_code","format","architecture","status","href","checksum") VALUES (1,'$datetime','$datetime',NULL,'centos','centos','qcow2','x86-64','available','','') RETURNING "images"."id";
ALTER sequence "images_id_seq" restart with 2;
INSERT INTO "subnets" ("id","created_at","updated_at","deleted_at","name","network","netmask","gateway","start","end","vlan","type","router") VALUES (1,'$datetime','$datetime',NULL,'public','192.168.1.0','255.255.255.0','192.168.1.1/24','192.168.1.100','192.168.1.150',5000,'public',0) RETURNING "subnets"."id";
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.100/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.101/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.102/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.103/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.104/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.105/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.106/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.107/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.108/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.109/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.110/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.111/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.112/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.113/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.114/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.115/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.116/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.117/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.118/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.119/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.120/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.121/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.122/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.123/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.124/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.125/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.126/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.127/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.128/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.129/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.130/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.131/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.132/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.133/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.134/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.135/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.136/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.137/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.138/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.139/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.140/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.141/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.142/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.143/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.144/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.145/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.146/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.147/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.148/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.149/24','255.255.255.0','ipv4',1,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'192.168.1.150/24','255.255.255.0','ipv4',1,0);
INSERT INTO "subnets" ("id","created_at","updated_at","deleted_at","name","network","netmask","gateway","start","end","vlan","type","router") VALUES (2,'$datetime','$datetime',NULL,'private','172.16.20.0','255.255.255.0','172.16.20.1/24','172.16.20.100','172.16.20.150',5010,'private',0) RETURNING "subnets"."id";
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.100/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.101/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.102/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.103/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.104/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.105/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.106/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.107/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.108/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.109/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.110/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.111/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.112/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.113/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.114/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.115/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.116/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.117/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.118/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.119/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.120/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.121/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.122/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.123/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.124/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.125/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.126/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.127/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.128/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.129/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.130/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.131/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.132/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.133/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.134/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.135/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.136/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.137/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.138/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.139/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.140/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.141/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.142/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.143/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.144/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.145/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.146/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.147/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.148/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.149/24','255.255.255.0','ipv4',2,0);
INSERT INTO "addresses" ("created_at","updated_at","deleted_at","address","netmask","type","subnet_id","interface") VALUES ('$datetime','$datetime',NULL,'172.16.20.150/24','255.255.255.0','ipv4',2,0);
ALTER sequence "subnets_id_seq" restart with 3;
EOF
sleep 6
export PGUSER=postgres
export PGPASSWORD=$passwd
export PGHOST=$db_host
export PGDATABASE=hypercube
psql -c "select count(*) from flavors"
psql -v ON_ERROR_STOP=1 -f $sqlfile
rm -f $sqlfile
|
<reponame>suyash-naithani/apicurio-registry<gh_stars>100-1000
/**
* @license
* Copyright 2020 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React from "react";
import "./artifacts.css";
import {Button, Modal, PageSection, PageSectionVariants} from '@patternfly/react-core';
import {ArtifactList} from "./components/artifactList";
import {PageComponent, PageProps, PageState} from "../basePage";
import {ArtifactsPageToolbar} from "./components/toolbar";
import {ArtifactsPageEmptyState} from "./components/empty";
import {UploadArtifactForm} from "./components/uploadForm";
import {InvalidContentModal} from "../../components/modals";
import {If} from "../../components/common/if";
import {ArtifactsSearchResults, CreateArtifactData, GetArtifactsCriteria, Paging, Services} from "../../../services";
import {SearchedArtifact} from "../../../models";
import {PleaseWaitModal} from "../../components/modals/pleaseWaitModal";
import {RootPageHeader} from "../../components";
/**
* Properties
*/
// tslint:disable-next-line:no-empty-interface
export interface ArtifactsPageProps extends PageProps {
}
/**
* State
*/
export interface ArtifactsPageState extends PageState {
criteria: GetArtifactsCriteria;
isUploadModalOpen: boolean;
isUploadFormValid: boolean;
isInvalidContentModalOpen: boolean;
isPleaseWaitModalOpen: boolean;
paging: Paging;
results: ArtifactsSearchResults | null;
uploadFormData: CreateArtifactData | null;
invalidContentError: any | null;
}
/**
* The artifacts page.
*/
export class ArtifactsPage extends PageComponent<ArtifactsPageProps, ArtifactsPageState> {
constructor(props: Readonly<ArtifactsPageProps>) {
super(props);
}
public renderPage(): React.ReactElement {
return (
<React.Fragment>
<PageSection className="ps_artifacts-header" variant={PageSectionVariants.light} padding={{ default: "noPadding" }}>
<RootPageHeader tabKey={0} />
</PageSection>
<If condition={this.showToolbar}>
<PageSection variant={PageSectionVariants.light} padding={{default : "noPadding"}}>
<ArtifactsPageToolbar artifacts={this.results()}
paging={this.state.paging}
onPerPageSelect={this.onPerPageSelect}
onSetPage={this.onSetPage}
onUploadArtifact={this.onUploadArtifact}
onChange={this.onFilterChange}/>
</PageSection>
</If>
<PageSection variant={PageSectionVariants.default} isFilled={true}>
{
this.artifactsCount() === 0 ?
<ArtifactsPageEmptyState onUploadArtifact={this.onUploadArtifact} isFiltered={this.isFiltered()}/>
:
<React.Fragment>
<ArtifactList artifacts={this.artifacts()} onGroupClick={this.onGroupClick} />
</React.Fragment>
}
</PageSection>
<Modal
title="Upload Artifact"
variant="large"
isOpen={this.state.isUploadModalOpen}
onClose={this.onUploadModalClose}
className="upload-artifact-modal pf-m-redhat-font"
actions={[
<Button key="upload" variant="primary" data-testid="modal-btn-upload" onClick={this.doUploadArtifact} isDisabled={!this.state.isUploadFormValid}>Upload</Button>,
<Button key="cancel" variant="link" data-testid="modal-btn-cancel" onClick={this.onUploadModalClose}>Cancel</Button>
]}
>
<UploadArtifactForm onChange={this.onUploadFormChange} onValid={this.onUploadFormValid} />
</Modal>
<InvalidContentModal error={this.state.invalidContentError}
isOpen={this.state.isInvalidContentModalOpen}
onClose={this.closeInvalidContentModal} />
<PleaseWaitModal message="Creating artifact, please wait..."
isOpen={this.state.isPleaseWaitModalOpen} />
</React.Fragment>
);
}
protected initializePageState(): ArtifactsPageState {
return {
criteria: {
sortAscending: true,
type: "everything",
value: "",
},
invalidContentError: null,
isInvalidContentModalOpen: false,
isLoading: true,
isPleaseWaitModalOpen: false,
isUploadFormValid: false,
isUploadModalOpen: false,
paging: {
page: 1,
pageSize: 10
},
results: null,
uploadFormData: null
};
}
// @ts-ignore
protected createLoaders(): Promise {
return this.search();
}
private onUploadArtifact = (): void => {
this.setSingleState("isUploadModalOpen", true);
};
private onUploadModalClose = (): void => {
this.setSingleState("isUploadModalOpen", false);
};
private onArtifactsLoaded(results: ArtifactsSearchResults): void {
this.setMultiState({
isLoading: false,
results
});
}
private doUploadArtifact = (): void => {
this.onUploadModalClose();
this.pleaseWait(true);
if (this.state.uploadFormData !== null) {
// If no groupId is provided, set it to the "default" group
if (!this.state.uploadFormData.groupId) {
this.state.uploadFormData.groupId = "default";
}
Services.getGroupsService().createArtifact(this.state.uploadFormData).then(metaData => {
const groupId: string = metaData.groupId ? metaData.groupId : "default";
const artifactLocation: string = this.linkTo(`/artifacts/${ encodeURIComponent(groupId) }/${ encodeURIComponent(metaData.id) }`);
Services.getLoggerService().info("[ArtifactsPage] Artifact successfully uploaded. Redirecting to details: ", artifactLocation);
this.navigateTo(artifactLocation)();
}).catch( error => {
this.pleaseWait(false);
if (error && error.error_code === 400) {
this.handleInvalidContentError(error);
} else {
this.handleServerError(error, "Error uploading artifact.");
}
});
}
};
private results(): ArtifactsSearchResults {
return this.state.results ? this.state.results : {
artifacts: [],
count: 0,
page: 1,
pageSize: 10
};
}
private artifacts(): SearchedArtifact[] {
return this.state.results ? this.state.results.artifacts : [];
}
private artifactsCount(): number {
return this.state.results ? this.state.results.artifacts.length : 0;
}
private onFilterChange = (criteria: GetArtifactsCriteria): void => {
this.setMultiState({
criteria,
isLoading: true
}, () => {
this.search();
});
};
private isFiltered(): boolean {
return !!this.state.criteria.value;
}
// @ts-ignore
private search(): Promise {
return Services.getGroupsService().getArtifacts(this.state.criteria, this.state.paging).then(results => {
this.onArtifactsLoaded(results);
}).catch(error => {
this.handleServerError(error, "Error searching for artifacts.");
});
}
private onSetPage = (event: any, newPage: number, perPage?: number): void => {
const paging: Paging = {
page: newPage,
pageSize: perPage ? perPage : this.state.paging.pageSize
};
this.setMultiState({
isLoading: true,
paging
}, () => {
this.search();
});
};
private onPerPageSelect = (event: any, newPerPage: number): void => {
const paging: Paging = {
page: this.state.paging.page,
pageSize: newPerPage
};
this.setMultiState({
isLoading: true,
paging
}, () => {
this.search();
});
};
private onUploadFormValid = (isValid: boolean): void => {
this.setSingleState("isUploadFormValid", isValid);
};
private onUploadFormChange = (data: CreateArtifactData): void => {
this.setSingleState("uploadFormData", data);
};
private closeInvalidContentModal = (): void => {
this.setSingleState("isInvalidContentModalOpen", false);
};
private pleaseWait = (isOpen: boolean): void => {
this.setSingleState("isPleaseWaitModalOpen", isOpen);
};
private handleInvalidContentError(error: any): void {
Services.getLoggerService().info("INVALID CONTENT ERROR", error);
this.setMultiState({
invalidContentError: error,
isInvalidContentModalOpen: true
});
}
private onGroupClick = (groupId: string): void => {
// TODO filter by the group
};
private showToolbar = (): boolean => {
const hasCriteria: boolean = this.state.criteria && this.state.criteria.value != null && this.state.criteria.value != "";
return hasCriteria || this.results().count > 0;
}
}
|
if [ -z "$DOCKER_HOST_IP" ] ; then
if [ -z "$DOCKER_HOST" ] ; then
export DOCKER_HOST_IP=`hostname`
else
echo using ${DOCKER_HOST?}
XX=${DOCKER_HOST%\:*}
export DOCKER_HOST_IP=${XX#tcp\:\/\/}
fi
fi
echo DOCKER_HOST_IP is $DOCKER_HOST_IP
export SPRING_DATASOURCE_URL=jdbc:mysql://${DOCKER_HOST_IP}/eventuate
export SPRING_DATASOURCE_USERNAME=mysqluser
export SPRING_DATASOURCE_PASSWORD=mysqlpw
export SPRING_DATASOURCE_DRIVER_CLASS_NAME=com.mysql.jdbc.Driver
export EVENTUATELOCAL_KAFKA_BOOTSTRAP_SERVERS=$DOCKER_HOST_IP:9092
export EVENTUATELOCAL_CDC_DB_USER_NAME=root
export EVENTUATELOCAL_CDC_DB_PASSWORD=rootpassword
export EVENTUATELOCAL_ZOOKEEPER_CONNECTION_STRING=$DOCKER_HOST_IP:2181 |
package ua.yet.adv.java.concurrency;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
/**
* Small showcase for using the semaphore. Only 2 workers are have permits to
* work. Later (after 200 milliseconds) the release() is called from outside
* thread thus adding one more permit. The fourth worker will start after first
* one will release the permit.
*
* @author yuriy
*/
public class OperationsSemaphore {
static private Semaphore sem = new Semaphore(2);
/**
* @param args
*/
public static void main(String[] args) {
ExecutorService service = Executors.newCachedThreadPool();
service.execute(new Worker(1));
service.execute(new Worker(2));
service.execute(new Worker(3));
service.execute(new Worker(4));
service.execute(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(200);
} catch (InterruptedException e) {
}
System.out.println("releasing...");
sem.release();
}
});
service.shutdown();
}
static class Worker extends Thread {
private final int id;
public Worker(int id) {
super();
this.id = id;
}
@Override
public void run() {
System.out.println(id + " - trying to aquire...");
try {
sem.acquire();
System.out.println(id + " - aquired");
sleep(1000);
sem.release();
System.out.println(id + " - released");
} catch (InterruptedException e) {
System.err.println(id + " - interrupted");
}
}
}
}
|
package database;
import database.jsonhelpers.JSONDataFolders;
import database.jsonhelpers.JSONDataManager;
import database.jsonhelpers.JSONToObjectConverter;
import engine.entities.Entity;
import org.json.JSONObject;
import util.PropertiesReader;
import util.exceptions.ObjectBlueprintNotFoundException;
import java.io.File;
import java.util.Map;
/**
* A class for creating entities from a given blueprint, so that entities can
* create entities for things like attacks
*
* @author <NAME>
*/
public class ObjectFactory {
/* Instance Variables */
private JSONObject blueprintJSON;
private JSONToObjectConverter<Entity> converter;
/**
* Creates a new factory capable of creating objects from the blueprint
* stored in the database
* @param objectName is a {@code String} corresponding to the name of the
* type of Entity that needs to be created
*/
public ObjectFactory(String objectName, JSONDataFolders folder) {
setObjectBlueprint(objectName, folder);
}
/**
* Sets the ObjectFactory to use a new JSON file's blueprint
* @param objectName is {@code String} representing the name of the file corresponding
* to the blueprint you want to use
* @throws ObjectBlueprintNotFoundException if the blueprint for the objectName is not
* found within the database
*/
public void setObjectBlueprint(String objectName, JSONDataFolders folder) {
JSONDataManager manager = new JSONDataManager(folder);
blueprintJSON = manager.readJSONFile(objectName);
converter = new JSONToObjectConverter(Entity.class);
}
/**
* @return A new {@code Entity} corresponding to the blueprint for this object
*/
public Entity newObject() {
return converter.createObjectFromJSON(Entity.class, blueprintJSON);
}
/**
* Creates a new object from the blueprint, but overrides the JSON blueprint
* that is currently held
* @param overriddenParams is a {@code Map<String, Object>} that holds the properties
* to override within the JSON object
* @return A new {@code Entity} corresponding to the blueprint for this object but with
* the overriden params set
*/
public Entity newObject(Map<String, Object> overriddenParams) {
JSONObject modifiedBlueprint = new JSONObject();
for(String key : blueprintJSON.keySet()) {
if(overriddenParams.containsKey(key)){
modifiedBlueprint.put(key, overriddenParams.get(key));
} else {
modifiedBlueprint.put(key, blueprintJSON.get(key));
}
}
return converter.createObjectFromJSON(Entity.class, modifiedBlueprint);
}
/**
* @return A {@code String[]} of all the valid Entity Blueprint Names
*/
public static String[] getEntityTypes(JSONDataFolders folder) {
if(folder == JSONDataFolders.DEFAULT_USER_ENTITY)
return new File(PropertiesReader.path("default_blueprints")).list();
if(folder == JSONDataFolders.ENTITY_BLUEPRINT)
return new File(PropertiesReader.path("blueprints")).list();
return new String[0];
}
}
|
sudo docker-compose run app /bin/bash -c "python"
|
#!/bin/bash
. $(cd `dirname $0` && pwd)/../config.sh
docker exec -d \
-u $DEVELOPER_UID:$DEVELOPER_GID \
$DEVMACHINE_NAME \
/bin/bash -c 'printenv JAVA_HOME | /opt/sqldeveloper/sqldeveloper.sh' |
// Copyright 2017 The TIE Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Question data for incrementDecCodedNumber.
*/
/* eslint no-magic-numbers: ["error",
{ "ignore": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] }] */
globalData.questions['incrementDecimalCodedNumber'] = { // eslint-disable-line dot-notation
title: 'Increment a Decimal-Coded Number',
starterCode: {
python:
`def increment(digits):
return []
`
},
auxiliaryCode: {
python:
`class AuxiliaryCode(object):
@classmethod
def createDigits(cls, atom, input_size):
result = []
for i in xrange(input_size):
result.append(int(atom))
return result
@classmethod
def incrementLastDigitOnly(cls, digits):
digits_array = [x for x in digits]
digits_array[-1] += 1
return digits_array
@classmethod
def incrementWithoutChangingSize(cls, digits):
digits_array = [x for x in digits]
digits_array[-1] = (digits_array[-1] + 1) % 10
carry = (digits_array[-1] == 0)
for i in xrange(len(digits_array) - 2, -1, -1):
if carry == False:
break
digits_array[i] = (digits_array[i] + 1) % 10
carry = (digits_array[-1] == 0)
return digits_array
`
},
tasks: [{
id: 'incrementNumber',
instructions: [{
content: [
'In this problem, you\'ll write a function to increment a ',
'decimal-coded number, which is a list of digits (0 - 9) which ',
'represent a decimal number (such as [1, 2] for 12 or [3, 2, 4] for ',
'324). Your task is to increment a given decimal-coded number and ',
'return the result (a list of digits).'
].join(''),
type: 'text'
}, {
content: 'Input: [1, 2, 3, 4]\nOutput: [1, 2, 3, 5]',
type: 'code'
}, {
content: 'Input: [2, 0, 9]\nOutput: [2, 1, 0]',
type: 'code'
}],
prerequisiteSkills: ['Arrays'],
acquiredSkills: ['Array Manipulation'],
inputFunctionName: null,
outputFunctionName: null,
mainFunctionName: 'increment',
languageSpecificTips: {
python: []
},
testSuites: [{
id: 'GENERAL_CASE',
humanReadableName: 'the general case',
testCases: [{
input: [1, 2, 3, 4],
allowedOutputs: [[1, 2, 3, 5]]
}]
}, {
id: 'LAST_DIGIT_NINE',
humanReadableName: 'last digit 9',
testCases: [{
input: [2, 0, 9],
allowedOutputs: [[2, 1, 0]]
}, {
input: [2, 7, 8, 9],
allowedOutputs: [[2, 7, 9, 0]]
}]
}, {
id: 'SINGLE_DIGITS',
humanReadableName: 'single digits',
testCases: [{
input: [0],
allowedOutputs: [[1]]
}]
}, {
id: 'MULTIPLE_NINES_AT_END',
humanReadableName: 'multiple 9s at the end',
testCases: [{
input: [1, 9, 9],
allowedOutputs: [[2, 0, 0]]
}]
}, {
id: 'ALL_NINES',
humanReadableName: 'all 9s',
testCases: [{
input: [9],
allowedOutputs: [[1, 0]]
}, {
input: [9, 9],
allowedOutputs: [[1, 0, 0]]
}, {
input: [9, 9, 9],
allowedOutputs: [[1, 0, 0, 0]]
}, {
input: [9, 9, 9, 9],
allowedOutputs: [[1, 0, 0, 0, 0]]
}]
}],
buggyOutputTests: [{
buggyFunctionName: 'AuxiliaryCode.incrementLastDigitOnly',
ignoredTestSuiteIds: [],
messages: [
"Try running your code on [1, 9] in your head. What's the result?.",
[
"It doesn't look like you're carrying when you increment a number ",
"like 19 or 29."
].join(''),
[
"You should also increment the next digit by 1 when incrementing a 9."
].join('')
]
}, {
buggyFunctionName: 'AuxiliaryCode.incrementWithoutChangingSize',
ignoredTestSuiteIds: [],
messages: [
"What happens when you run your code on [9]?",
"Are you adding on new digits when necessary?",
[
"If you increment [9] or [9, 9], you will need to add an extra ",
"digit to the array and return [1, 0] or [1, 0, 0], respectively."
].join('')
]
}],
suiteLevelTests: [],
performanceTests: [{
inputDataAtom: '9',
transformationFunctionName: 'AuxiliaryCode.createDigits',
expectedPerformance: 'linear',
evaluationFunctionName: 'increment'
}]
}]
};
|
#!/bin/bash
# packages = authselect
# platform = multi_platform_fedora,Red Hat Enterprise Linux 8,Red Hat Enterprise Linux 9,Oracle Linux 8
# variables = var_accounts_passwords_pam_faillock_fail_interval=900
authselect select sssd --force
authselect enable-feature with-faillock
> /etc/security/faillock.conf
echo "fail_interval = 900" >> /etc/security/faillock.conf
echo "silent" >> /etc/security/faillock.conf
|
class Calculator {
constructor (previousOperandText,currentOperandText){
this.previousOperandText = previousOperandText;
this.currentOperandText = currentOperandText;
this.clear();
this.curry = "";
this.words = [];
this.finalParagraph = "";
}
clear() {
this.currentOperand = "";
this.previousOperand = "";
this.operation = undefined;
this.words = [];
this.curry = "";
this.result = "";
this.previousOperandText.innerText = "";
this.currentOperandText.innerText = "";
}
delete() {
this.currentOperand = this.currentOperand.toString().slice(0,-1);
}
appendNum(number) {
if (number === "." && this.currentOperand.toString().includes(".")) {
}
else {
this.currentOperand = this.currentOperand.toString() + number.toString();
};
}
roundIt(round){
let roundNow = "";
if (round == Number(round.toFixed(1))){
roundNow = round.toFixed(1);
}
else if (round == Number(round.toFixed(2))){
roundNow = round.toFixed(2);
}
else if (round == Number(round.toFixed(3))){
roundNow = round.toFixed(3);
}
else {
roundNow = round.toFixed(4);
}
return roundNow;
}
instanceOperation(operate) {
if (operate == "sin" && this.currentOperand) {
this.currentOperand = this.currentOperand * 0.0174533;
this.currentOperand = this.roundIt(Math.sin(this.currentOperand));
};
if (operate == "cos" && this.currentOperand) {
this.currentOperand = this.currentOperand * 0.0174533;
this.currentOperand = this.roundIt(Math.cos(this.currentOperand));
};
if (operate == "tan" && this.currentOperand) {
let testTan = Number(this.currentOperand);
if (testTan >= 0) {
if (testTan > 360){
let testDivide = Math.floor((testTan / 360));
console.log(testDivide);
testTan = this.currentOperand - (testDivide * 360);
console.log(this.currentOperand);
}
if (testTan !== 90 && testTan !== 270) {
this.currentOperand = testTan * 0.0174533;
this.currentOperand = this.roundIt(Math.tan(this.currentOperand));
}
}
if (testTan < 0) {
if (testTan < -360){
let testDivide = Math.abs(Math.ceil((testTan / 360)));
console.log(testDivide);
testTan = this.currentOperand + (testDivide * 360);
console.log(testTan);
}
if (testTan !== -90 && testTan !== -270) {
this.currentOperand = testTan * 0.0174533;
this.currentOperand = this.roundIt(Math.tan(this.currentOperand));
}
}
};
if (operate == "cot" && this.currentOperand) {
let testCot = Number(this.currentOperand);
if (testCot > 0) {
if (testCot > 360){
let testDivide = Math.floor((testCot / 360));
console.log(testDivide);
testCot = this.currentOperand - (testDivide * 360);
console.log(this.currentOperand);
}
if (testCot !== 180) {
this.currentOperand = testCot * 0.0174533;
this.currentOperand = this.roundIt(1/Math.tan(this.currentOperand));
}
}
if (testCot < 0) {
if (testCot < -360){
let testDivide = Math.abs(Math.ceil((testCot / 360)));
console.log(testDivide);
testCot = this.currentOperand + (testDivide * 360);
console.log(testCot);
}
if (testCot !== -180) {
this.currentOperand = testCot * 0.0174533;
this.currentOperand = this.roundIt(1/Math.tan(this.currentOperand));
}
}
};
if (operate == "|x|" && this.currentOperand) {
this.currentOperand = Math.abs(this.currentOperand);
};
if (operate == "%" && this.currentOperand) {
this.currentOperand = this.currentOperand / 100;
};
if (operate == "RAND" && !this.currentOperand) {
this.currentOperand = Math.random().toFixed(4);
};
if (operate == "log" && this.currentOperand) {
this.currentOperand = Math.log(this.currentOperand) / Math.log(10);
if (this.currentOperand % 1) {
this.currentOperand = this.roundIt(this.currentOperand);
}
};
if (operate == "π" && !this.currentOperand) {
this.currentOperand = 3.1416;
};
if (operate == "e" && !this.currentOperand) {
this.currentOperand = 2.7183;
};
if (operate == "n!" && this.currentOperand) {
if (this.currentOperand >= 0) {
let factorialCalc = 1;
for (let i = this.currentOperand; i > 0; i--){
factorialCalc *= i;
}
if (factorialCalc % 1) {
factorialCalc = this.roundIt(factorialCalc);
}
this.currentOperand = factorialCalc;
}
else {
this.currentOperand = NaN;
}
};
if (operate == "√" && this.currentOperand) {
this.currentOperand = Math.sqrt(this.currentOperand);
if (this.currentOperand % 1) {
this.currentOperand = this.roundIt(this.currentOperand);
}
};
if (operate == "+/-" && this.currentOperand) {
this.currentOperand = -1 * this.currentOperand;
};
if (operate == "x ²" && this.currentOperand) {
this.currentOperand = Math.pow(this.currentOperand, 2);
if (this.currentOperand % 1) {
this.currentOperand = this.roundIt(this.currentOperand);
}
};
}
chooseOperation(operation) {
if (this.currentOperand === '') return;
if (isNaN(this.currentOperand)) {
this.words = [];
this.currentOperand = 0;
};
if (this.previousOperand !== '') {
this.compute();
}
if (operation == "x ⁿ") {
this.operation = "<h6>^</h6>";
this.previousOperand = this.currentOperand;
this.currentOperand = '';
}
else if (operation == "Round to x") {
this.operation = "<h6>RT</h6>";
this.previousOperand = this.currentOperand;
this.currentOperand = '';
}
else if (operation == "*") {
this.operation = "<small>×</small>";
this.previousOperand = this.currentOperand;
this.currentOperand = '';
}
else if (operation == "/") {
this.operation = "<small>÷</small>";
this.previousOperand = this.currentOperand;
this.currentOperand = '';
}
else {
this.operation = `<small>${operation}</small>`;
this.previousOperand = this.currentOperand;
this.currentOperand = '';
}
}
compute() {
if (this.currentOperand == ".") {
this.clear();
}
else {
if (this.words[this.words.length-2] == "<small>+</small>" || this.words[this.words.length-2] == "<small>-</small>" || this.words[this.words.length-2] == "<small>×</small>" || this.words[this.words.length-2] == "<small>÷</small>" || this.words[this.words.length-2] == "<h6>^</h6>" || this.words[this.words.length-2] == "<h6>RT</h6>") {
this.words.pop();
}
if (isNaN(this.prevy) || this.prevy == undefined){}
else if (this.words[this.words.length-2] == "<small>=</small>" && this.words[this.words.length-2] !== this.prevy){
this.words = [];
this.words.push(`${this.prevy}`);
}
else if (this.words[this.words.length-2] !== "<small>=</small>") {
this.words.push(`${this.prevy}`);
}
if (this.operation == undefined){}
else {
this.words.push(`${this.operation}`);
}
if (isNaN(this.curry) || this.curry == undefined){}
else {
this.words.push(`${this.curry}`);
}
}
}
getDisplay(...number) {
const sNumber = number.toString();
const iDigits = parseFloat(sNumber.split('.')[0]);
const dDigits = sNumber.split('.')[1];
let integerDisplay;
if (isNaN(iDigits)) {
integerDisplay = '';
}
else {
integerDisplay = iDigits.toLocaleString('en', {maximumFractionDigits: 0});
}
if (dDigits != null) {
return `${integerDisplay}.${dDigits}`;
}
else {
return integerDisplay;
}
}
update(updates) {
if (updates == "update") {
this.compute();
this.words.pop();
if (this.currentOperand){
this.words.push(this.currentOperand);
}
let result = this.words;
this.finalParagraph = this.words.toString().replace(/,/g, "");
let numbers =[];
if (result.includes("<h6>RT</h6>")){
for (let i = 0; i <result.length -1; i++){
if (result[i] == "<h6>RT</h6>") {
let newN = parseFloat(result[i-1]).toFixed(parseFloat(result[i+1]));
numbers = parseFloat(result.splice(i-1, 3, newN));
i = 0;
}
else { }
}
}
if (result.includes("<h6>^</h6>")){
for (let i = 0; i <result.length -1; i++){
if (result[i] == "<h6>^</h6>") {
let newN = Math.pow(parseFloat(result[i-1]), parseFloat(result[i+1]));
if (newN % 1) {
newN = this.roundIt(newN);
}
numbers = parseFloat(result.splice(i-1, 3, newN));
i = 0;
}
else { }
}
}
if (result.includes("<small>×</small>")){
for (let i = 0; i <result.length -1; i++){
if (result[i] == "<small>×</small>") {
let newN = parseFloat(result[i-1]) * parseFloat(result[i+1]);
if (newN % 1) {
newN = this.roundIt(newN);
}
numbers = parseFloat(result.splice(i-1, 3, newN));
i = 0;
}
else { }
}
}
if (result.includes("<small>÷</small>")){
for (let i = 0; i <result.length -1; i++){
if (result[i] == "<small>÷</small>") {
let newN = parseFloat(result[i-1]) / parseFloat(result[i+1]);
if (newN % 1) {
newN = this.roundIt(newN);
}
numbers = parseFloat(result.splice(i-1, 3, newN));
i = 0;
}
else { }
}
}
if (result.includes("<small>+</small>")){
for (let i = 0; i <result.length -1; i++){
if (result[i] == "<small>+</small>") {
let newN = parseFloat(result[i-1]) + parseFloat(result[i+1]);
if (newN % 1) {
newN = this.roundIt(newN);
}
numbers = parseFloat(result.splice(i-1, 3, newN));
i = 0;
}
else { }
}
}
if (result.includes("<small>-</small>")){
for (let i = 0; i <result.length -1; i++){
if (result[i] == "<small>-</small>") {
let newN = parseFloat(result[i-1]) - parseFloat(result[i+1]);
if (newN % 1) {
newN = this.roundIt(newN);
}
numbers = parseFloat(result.splice(i-1, 3, newN));
i = 0;
}
else { }
}
}
if (!this.finalParagraph || !result){ }
else {
if (Number(result[0]) % 1) {
result[0] = this.roundIt(Number(result[0]));
}
this.previousOperandText.innerHTML = `${this.finalParagraph}<small>=</small>${this.getDisplay(result[0])}`;
this.currentOperandText.innerHTML = this.getDisplay(result);
this.words = [];
this.currentOperand = parseFloat(result[0]);
this.previousOperand = '';
this.operation = undefined;
}
}
else {
this.prevy = parseFloat(this.previousOperand);
this.curry = parseFloat(this.currentOperand);
this.currentOperandText.innerHTML = this.currentOperand;
if (this.operation !== undefined) {
(this.words[0])? this.previousOperandText.innerHTML = this.words.toString().replace(/,/g, "") : this.previousOperandText.innerHTML = this.previousOperand + this.operation;
}
}
}
}
const dataClear = document.querySelector("[data-clear]");
const dataDelete = document.querySelector("[data-delete]");
const dataOperation = document.querySelectorAll('[data-operation]');
const instanceOperation = document.querySelectorAll('[data-instance-operation]');
const dataNumber = document.querySelectorAll('[data-number]');
const dataEquals = document.querySelector("[data-equals]");
const previousOperandText = document.querySelector("[data-previous-operand]");
const currentOperandText = document.querySelector("[data-current-operand]");
const calculator = new Calculator(previousOperandText, currentOperandText);
dataNumber.forEach(number => {
number.addEventListener("click", (num)=>{
num.preventDefault();
calculator.appendNum(number.innerText);
calculator.update();
})
});
instanceOperation.forEach(button => {
button.addEventListener("click", function(but){
but.preventDefault();
calculator.instanceOperation(this.innerText);
calculator.update();
})
});
dataOperation.forEach(button => {
button.addEventListener("click", function(but){
but.preventDefault();
calculator.chooseOperation(this.innerText);
calculator.update();
})
});
dataClear.addEventListener("click", (cl)=>{
cl.preventDefault();
calculator.clear();
calculator.update();
});
dataDelete.addEventListener("click", (del)=>{
del.preventDefault();
calculator.delete();
calculator.update();
});
dataEquals.addEventListener("click", (eq)=>{
eq.preventDefault();
calculator.update("update");
});
window.addEventListener('keydown', (e)=>{
if ((e.keyCode >= 48 && e.keyCode <= 57) || (e.keyCode >= 96 && e.keyCode <= 105)){
e.preventDefault();
calculator.appendNum(e.key);
calculator.update();
};
if (e.key == "+" || e.key == "-" || e.key == "/" || e.key == "*"){
e.preventDefault();
calculator.chooseOperation(e.key);
calculator.update();
};
if (e.key == "Enter"){
e.preventDefault();
calculator.update("update");
};
if (e.key == "Delete"){
e.preventDefault();
calculator.delete();
calculator.update();
};
}); |
#!/usr/bin/env bash
set -e
echo '--- PREVERSION --------------------'
echo ' -- Add any changes to git...'
# TODO prompt about stashing
git add .
echo ' -- Stash any changes...'
result=`git stash -m LUME_CLI_STASH`
if [[ $result =~ 'No local changes' ]]; then
echo ' - No changes to stash.'
else
echo ' - Changes stashed.'
fi
echo ' -- Clean repo...'
# TODO don't run clean if there's no clean script.
npm run clean
echo ' -- Run tests...'
npm test
# undo any changes in case test build output is non-deterministic
git checkout .
echo '--- PREVERSION DONE --------------------'
|
def binary_search(arr, target):
start = 0
end = len(arr) - 1
while start <= end:
mid = (start + end) // 2
if arr[mid] == target:
return mid
elif arr[mid] > target:
end = mid - 1
else:
start = mid + 1
return -1 |
#!/bin/bash
#/**
# * remove all vagrant dependencies
# *
# * @category devops
# */
BASEDIR=$(dirname "${0}")
. ${BASEDIR}/../tools/colors.sh
if [ -n "$(vagrant status|grep 'running')" ]; then
{
echo_warning 'Vagrant running, attempting to stop'
echo_info 'vagrant halt'
vagrant halt
} || {
echo_error 'Stopping Vagrant failed'
}
fi
if [ -z "$(vagrant status|grep 'running')" ]; then
# uninstall virtualbox
echo_caption 'uninstall virtualbox'
echo_info 'sudo apt-get remove --assume-yes virtualbox-5.2'
sudo apt-get remove --assume-yes virtualbox-5.2
echo_info 'sudo groupdel vboxusers'
sudo groupdel vboxusers
fi
# uninstall vagrant
echo_caption 'uninstall vagrant'
echo_info 'sudo rm -rf /opt/vagrant'
sudo rm -rf /opt/vagrant
echo_info 'sudo rm -f /usr/bin/vagrant'
sudo rm -f /usr/bin/vagrant
echo_info 'sudo rm -rf ~/.vagrant.d'
sudo rm -rf ~/.vagrant.d
echo_info 'sudo apt-get remove --assume-yes --autoremove vagrant'
sudo apt-get remove --assume-yes --autoremove vagrant
echo_info 'sudo dpkg -r vagrant'
sudo dpkg -r vagrant
# uninstall ruby
echo_caption 'uninstall ruby'
echo_info 'rvm remove ruby'
rvm remove ruby
echo_info 'sudo apt-get autoremove -y --autoremove ruby'
sudo apt-get autoremove -y --autoremove ruby
# uninstall rvm
echo_caption 'uninstall rvm'
# This will remove the rvm/ directory and all the rubies built within it.
echo_info 'rvm implode'
rvm implode
# In order to remove the final trace of rvm, you need to remove the rvm gem, too:
echo_info 'gem uninstall rvm'
gem uninstall rvm
# uninstall python
echo_caption 'uninstall python'
echo_info 'sudo apt-get remove --assume-yes --autoremove python-pip python-dev'
sudo apt-get remove --assume-yes --autoremove python-pip python-dev
# uninstall chefdk
echo_caption 'uninstall chefdk'
echo_info 'sudo dpkg -P chefdk'
sudo dpkg --purge chefdk
# uninstall chef-solo
echo_caption 'uninstall chef-solo'
echo_info 'sudo dpkg -P chef-solo'
sudo dpkg --purge chef
# berks and chef doesn't uninstall properly
echo_info 'rm -rf /opt/chef'
rm -rf /opt/chef
echo_info 'rm -rf ~/.berkshelf'
rm -rf ~/.berkshelf
# stopping nfs-kernel-server
echo_info 'sudo service nfs-kernel-server stop'
sudo service nfs-kernel-server stop
# uninstall essentials
echo_caption 'uninstall essentials'
echo_info 'sudo apt-get remove --assume-yes --autoremove autoconf'
sudo apt-get remove --assume-yes --autoremove autoconf
echo_info 'sudo apt-get remove --assume-yes --autoremove build-essential'
sudo apt-get remove --assume-yes --autoremove build-essential
echo_info 'sudo apt-get remove --assume-yes --autoremove libssl-dev'
sudo apt-get remove --assume-yes --autoremove libssl-dev
echo_info 'sudo apt-get remove --assume-yes --autoremove libxml2-dev'
sudo apt-get remove --assume-yes --autoremove libxml2-dev
echo_info 'sudo apt-get remove --assume-yes --autoremove libxslt-dev'
sudo apt-get remove --assume-yes --autoremove libxslt-dev
echo_info 'sudo apt-get remove --assume-yes --autoremove ruby-dev'
sudo apt-get remove --assume-yes --autoremove ruby-dev
# uninstall nfs server
echo_caption 'uninstall nfs'
echo_info 'sudo dpkg --purge nfs-kernel-server'
sudo dpkg --purge nfs-kernel-server
echo_info 'sudo dpkg --purge nfs-common'
sudo dpkg --purge nfs-common
echo_info 'sudo apt-get --assume-yes autoremove'
sudo apt-get --assume-yes autoremove
|
/*
* Copyright (c) 2016 Oracle and/or its affiliates. All rights reserved. This
* code is released under a tri EPL/GPL/LGPL license. You can use it,
* redistribute it and/or modify it under the terms of the:
*
* Eclipse Public License version 1.0
* GNU General Public License version 2
* GNU Lesser General Public License version 2.1
*/
package org.jruby.truffle.language.arguments;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.frame.FrameSlot;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.source.SourceSection;
import org.jruby.truffle.RubyContext;
import org.jruby.truffle.core.Layouts;
import org.jruby.truffle.language.RubyNode;
import org.jruby.truffle.language.locals.ReadFrameSlotNode;
import org.jruby.truffle.language.locals.ReadFrameSlotNodeGen;
import org.jruby.truffle.language.locals.WriteFrameSlotNode;
import org.jruby.truffle.language.locals.WriteFrameSlotNodeGen;
public class RunBlockKWArgsHelperNode extends RubyNode {
@Child private ReadFrameSlotNode readArrayNode;
@Child private WriteFrameSlotNode writeArrayNode;
private final Object kwrestName;
public RunBlockKWArgsHelperNode(RubyContext context, SourceSection sourceSection, FrameSlot arrayFrameSlot, Object kwrestName) {
super(context, sourceSection);
readArrayNode = ReadFrameSlotNodeGen.create(arrayFrameSlot);
writeArrayNode = WriteFrameSlotNodeGen.create(arrayFrameSlot);
this.kwrestName = kwrestName;
}
@Override
public Object execute(VirtualFrame frame) {
CompilerDirectives.bailout("blocks with kwargs are not optimized yet");
final Object array = readArrayNode.executeRead(frame);
final Object remainingArray = ruby("Truffle::Primitive.load_arguments_from_array_kw_helper(array, kwrest_name, binding)", "array", array, "kwrest_name", kwrestName, "binding", Layouts.BINDING.createBinding(coreLibrary().getBindingFactory(), frame.materialize()));
writeArrayNode.executeWrite(frame, remainingArray);
return nil();
}
}
|
<reponame>yafraorg/yafra-database<gh_stars>0
set head off
set linesize 500
set numwidth 11
set colsep ""
set pagesize 20000
set echo off
SPOOL mpul_1
select * from ROOT.MAXIMA;
SPOOL OFF
SPOOL mpul_2
select * from ROOT.SPRACHEN;
SPOOL OFF
SPOOL mpul_3
select * from ROOT.BEZEICHNUNG;
SPOOL OFF
SPOOL mpul_4
select sta_id, abk, sta, bez_id, TO_CHAR(kond,'999999999999999.9999999'), typ from ROOT.STATUS_WERTE;
SPOOL OFF
SPOOL mpul_5
select * from ROOT.TEXTE;
SPOOL OFF
SPOOL mpul_9
select * from ROOT.TMP_TEXTE;
SPOOL OFF
SPOOL mpul_6
select * from ROOT.TMP_BEZEICHNUNG;
SPOOL OFF
SPOOL mpul_7
select * from ROOT.AKTIONSTEXTE;
SPOOL OFF
SPOOL mpul_8
select * from ROOT.HISTORY;
SPOOL OFF
SPOOL mpul_10
select * from ROOT.HELP;
SPOOL OFF
SPOOL mpul_12
select * from ROOT.MSG;
SPOOL OFF
SPOOL mpul_11
select * from ROOT.LABEL;
SPOOL OFF
SPOOL mpul_13
select * from ROOT.SAISON;
SPOOL OFF
SPOOL mpul_14
select * from ROOT.WAEHRUNG;
SPOOL OFF
SPOOL mpul_15
select * from ROOT.LAND;
SPOOL OFF
SPOOL mpul_16
select * from ROOT.ORT;
SPOOL OFF
SPOOL mpul_17
select * from ROOT.LAND_WHR;
SPOOL OFF
SPOOL mpul_18
select * from ROOT.LAND_SPR;
SPOOL OFF
SPOOL mpul_19
select * from ROOT.PERSONEN;
SPOOL OFF
SPOOL mpul_21
select * from ROOT.REISENDER_TYP;
SPOOL OFF
SPOOL mpul_20
select * from ROOT.REISENDER;
SPOOL OFF
SPOOL mpul_22
select * from ROOT.PERSADR;
SPOOL OFF
SPOOL mpul_23
select * from ROOT.KUNDEN_TYP;
SPOOL OFF
SPOOL mpul_24
select * from ROOT.KUNDEN;
SPOOL OFF
SPOOL mpul_25
select * from ROOT.KOLLEKTIV_TYP;
SPOOL OFF
SPOOL mpul_26
select * from ROOT.KOLLEKTIV;
SPOOL OFF
SPOOL mpul_27
select * from ROOT.TRAEGER_TYP;
SPOOL OFF
SPOOL mpul_28
select * from ROOT.KATEGORIE;
SPOOL OFF
SPOOL mpul_29
select * from ROOT.DIENST_TRAEGER;
SPOOL OFF
SPOOL mpul_30
select * from ROOT.ANGEBOTS_TYP;
SPOOL OFF
SPOOL mpul_31
select * from ROOT.AKTIONS_TYP;
SPOOL OFF
SPOOL mpul_32
select * from ROOT.AKTIONSPARA;
SPOOL OFF
SPOOL mpul_33
select * from ROOT.AKTIONEN;
SPOOL OFF
SPOOL mpul_34
select * from ROOT.AKTIONSGRP;
SPOOL OFF
SPOOL mpul_35
select * from ROOT.DIENST_ANGEBOT;
SPOOL OFF
SPOOL mpul_36
select * from ROOT.DLG_PARTS;
SPOOL OFF
SPOOL mpul_37
select * from ROOT.DIENSTLEISTUNG;
SPOOL OFF
SPOOL mpul_38
select * from ROOT.DLG_DLG;
SPOOL OFF
SPOOL mpul_39
select * from ROOT.PROGRAMM;
SPOOL OFF
SPOOL mpul_40
select * from ROOT.ARRANGEMENT;
SPOOL OFF
SPOOL mpul_41
select * from ROOT.ARR_DLG;
SPOOL OFF
SPOOL mpul_42
select * from ROOT.HOST_KONT;
SPOOL OFF
SPOOL mpul_43
select * from ROOT.KONTINGENT;
SPOOL OFF
SPOOL mpul_44
select * from ROOT.KONT_DETAIL;
SPOOL OFF
SPOOL mpul_58
select * from ROOT.MP_PROFIL;
SPOOL OFF
SPOOL mpul_45
select * from ROOT.BUCHUNG;
SPOOL OFF
SPOOL mpul_46
select * from ROOT.BCH_DLN;
SPOOL OFF
SPOOL mpul_47
select * from ROOT.AKT_DETAIL;
SPOOL OFF
SPOOL mpul_48
select * from ROOT.KOSTEN_ART;
SPOOL OFF
SPOOL mpul_49
select * from ROOT.KOMMISSION;
SPOOL OFF
SPOOL mpul_50
select * from ROOT.KOMM_DETAIL;
SPOOL OFF
SPOOL mpul_51
select * from ROOT.RESERVATION;
SPOOL OFF
SPOOL mpul_52
select pre_id,
TO_CHAR(apr,'999999999999999.9999999'),
TO_CHAR(epr,'999999999999999.9999999'),
TO_CHAR(vpr,'999999999999999.9999999'),
glt, WHR_ID, KBST, KAR_ID, DLTT_ID, KAT_ID,
RKR_ID, DLA_ID, DL_ID, DL_VONDAT, DL_BISDAT,
KONT_ID, STA_ID, TEXTID, HISTORY, SAI_ID
from ROOT.PREIS;
SPOOL OFF
SPOOL mpul_53
select * from ROOT.KAPA;
SPOOL OFF
SPOOL mpul_59
select * from ROOT.MP_ARRKOM;
SPOOL OFF
SPOOL mpul_54
select * from ROOT.GRAPHIK;
SPOOL OFF
SPOOL mpul_55
select * from ROOT.PCX_FILES;
SPOOL OFF
SPOOL mpul_56
select * from ROOT.GRAFORM;
SPOOL OFF
SPOOL mpul_57
select * from ROOT.REGIONS;
SPOOL OFF
COMMIT WORK;
|
import json
from typing import Union
def pythonObj_to_jsonStr(obj: Union[dict, list]) -> str:
jsonStr = json.dumps(obj)
return jsonStr |
<reponame>bradmccoydev/keptn<filename>cli/cmd/trigger.go
package cmd
import "github.com/spf13/cobra"
var triggerCmd = &cobra.Command{
Use: "trigger [delivery | evaluation] ",
Short: "Triggers the execution of an action in keptn",
Long: "Triggers the execution of an action in keptn",
}
func init() {
rootCmd.AddCommand(triggerCmd)
}
|
<filename>docs/html/search/classes_0.js
var searchData=
[
['dcel',['DCEL',['../structDCEL.html',1,'']]]
];
|
<reponame>jinjuan-li/iot-suite-server<filename>iot-suite-server-service/src/main/java/com/tuya/iot/suite/service/idaas/impl/RoleServiceImpl.java
package com.tuya.iot.suite.service.idaas.impl;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.tuya.iot.suite.ability.idaas.ability.GrantAbility;
import com.tuya.iot.suite.ability.idaas.ability.IdaasUserAbility;
import com.tuya.iot.suite.ability.idaas.ability.PermissionAbility;
import com.tuya.iot.suite.ability.idaas.ability.RoleAbility;
import com.tuya.iot.suite.ability.idaas.model.*;
import com.tuya.iot.suite.core.constant.ErrorCode;
import com.tuya.iot.suite.core.exception.ServiceLogicException;
import com.tuya.iot.suite.core.model.PageVO;
import com.tuya.iot.suite.core.util.Todo;
import com.tuya.iot.suite.service.dto.PermissionNodeDTO;
import com.tuya.iot.suite.service.dto.RoleCreateReqDTO;
import com.tuya.iot.suite.service.enums.RoleTypeEnum;
import com.tuya.iot.suite.service.idaas.GrantService;
import com.tuya.iot.suite.service.idaas.PermissionTemplateService;
import com.tuya.iot.suite.service.idaas.RoleService;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author <EMAIL>
* @description
* @date 2021/05/31
*/
@Service
@Setter
@Slf4j
public class RoleServiceImpl implements RoleService {
@Autowired
private RoleAbility roleAbility;
@Autowired
private GrantAbility grantAbility;
@Autowired
private IdaasUserAbility idaasUserAbility;
@Autowired
private PermissionAbility permissionAbility;
@Autowired
private PermissionTemplateService permissionTemplateService;
@Override
public Boolean createRole(String spaceId, RoleCreateReqDTO req) {
// 0. check permission
checkRoleWritePermission(spaceId, req.getUid(), req.getRoleCode());
String roleType = RoleTypeEnum.fromRoleCode(req.getRoleCode()).name();
List<PermissionNodeDTO> perms = permissionTemplateService.getTemplatePermissionFlattenList(roleType, Locale.CHINESE.getLanguage());
// 1. create role
boolean createRoleRes = roleAbility.createRole(spaceId, IdaasRoleCreateReq.builder()
.roleCode(req.getRoleCode())
.roleName(req.getRoleName())
.remark(req.getRemark()).build());
if (!createRoleRes) {
return false;
}
// 2. grant permissions from template
// if grant failure, need not rollback
return grantAbility.grantPermissionsToRole(RoleGrantPermissionsReq.builder()
.spaceId(spaceId)
.roleCode(req.getRoleCode())
.permissionCodes(perms.stream().map(it ->
it.getPermissionCode()).collect(Collectors.toList())
).build());
}
private void checkRoleWritePermission(String spaceId, String operatorUid, String targetRoleCode) {
Assert.isTrue(!RoleTypeEnum.fromRoleCode(targetRoleCode).isAdmin(), "can not write a 'admin' role!");
//数据权限校验,校验操作者自己是否为更高的角色。
}
private void checkRoleWritePermission(String spaceId, String operatorUid, Collection<String> targetRoleCodes) {
targetRoleCodes.forEach(targetRoleCode ->
Assert.isTrue(!RoleTypeEnum.fromRoleCode(targetRoleCode).isAdmin(), "can not write a 'admin' role!"));
}
@Override
public Boolean updateRole(String spaceId, String operatorUid, String roleCode, RoleUpdateReq req) {
checkRoleWritePermission(spaceId, operatorUid, roleCode);
return roleAbility.updateRole(spaceId, roleCode, RoleUpdateReq.builder()
.roleName(req.getRoleName()).build());
}
@Override
public Boolean deleteRole(String spaceId, String operatorUid, String roleCode) {
checkRoleWritePermission(spaceId, operatorUid, roleCode);
//底层api没有判断删除角色时是否存在关联的用户,那么我们就需要实现这个逻辑
IdaasPageResult<IdaasUser> pageResult = idaasUserAbility.queryUserPage(spaceId, IdaasUserPageReq.builder()
.roleCode(roleCode).build());
if (pageResult.getTotalCount() > 0) {
throw new ServiceLogicException(ErrorCode.ROLE_DEL_FAIL_FOR_RELATED_USERS);
}
return roleAbility.deleteRole(spaceId, roleCode);
}
@Override
public IdaasRole getRole(String spaceId, String operatorUid, String roleCode) {
//checkRoleReadPermission(spaceId,operatorUid,roleCode);
return roleAbility.getRole(spaceId, roleCode);
}
@Override
public List<IdaasRole> queryRolesByUser(String spaceId, String uid) {
//need check read permission?
return roleAbility.queryRolesByUser(spaceId, uid);
}
@Override
public PageVO<IdaasRole> queryRolesPagination(String spaceId, RolesPaginationQueryReq req) {
IdaasPageResult<IdaasRole> pageResult = roleAbility.queryRolesPagination(spaceId, req);
List<IdaasRole> list = pageResult.getResults();
return PageVO.builder().pageNo(pageResult.getPageNumber())
.pageSize(pageResult.getPageSize())
.total(pageResult.getTotalCount())
.data((List) list).build();
}
@Override
public boolean deleteRoles(String permissionSpaceId, String uid, Collection<String> roleCodes) {
checkRoleWritePermission(permissionSpaceId, uid, roleCodes);
long count = roleCodes.stream().map(roleCode -> roleAbility.deleteRole(permissionSpaceId, roleCode)).count();
return count == roleCodes.size();
}
@Override
public Boolean resetRolePermissionsFromTemplate(String spaceId, String operatorUid, String roleCode) {
// 0. check permission
checkRoleWritePermission(spaceId, operatorUid, roleCode);
Set<String> existPermSet = permissionAbility.queryPermissionsByRoleCodes(spaceId, PermissionQueryByRolesReq.builder()
.roleCodeList(Lists.newArrayList(roleCode))
.build()).stream().flatMap(it -> it.getPermissionList().stream()).map(it -> it.getPermissionCode())
.collect(Collectors.toSet());
RoleTypeEnum roleType = RoleTypeEnum.fromRoleCode(roleCode);
// 1. get permissions from template
List<String> templatePerms = permissionTemplateService.getTemplatePermissionFlattenList(roleType.name(), Locale.CHINESE.getLanguage())
.stream().map(it -> it.getPermissionCode())
.collect(Collectors.toList());
Set<String> templatePermSet = Sets.newHashSet(templatePerms);
List<String> permsToRevoke = Lists.newArrayList(Sets.difference(existPermSet,templatePermSet));
List<String> permsToGrant = Lists.newArrayList(Sets.difference(templatePermSet,existPermSet));
// 2. delete permissions if need
if (!permsToRevoke.isEmpty()) {
boolean delRes = grantAbility.revokePermissionsFromRole(RoleRevokePermissionsReq.builder()
.spaceId(spaceId)
.permissionCodes(permsToRevoke)
.roleCode(roleCode)
.build());
if (!delRes) {
log.info("revoke permissions from role failed");
return false;
}
}
// 3. add permissions if need
if (!permsToGrant.isEmpty()) {
boolean addRes = grantAbility.grantPermissionsToRole(RoleGrantPermissionsReq.builder()
.spaceId(spaceId)
.permissionCodes(permsToGrant)
.roleCode(roleCode)
.build());
if (!addRes) {
log.info("grant permissions to role failed");
return false;
}
}
return true;
}
@Override
public List<String> checkAndRemoveOldRole(String spaceId, String uid, List<String> roleCodes, boolean removeOld) {
List<IdaasRole> userRoles = roleAbility.queryRolesByUser(spaceId, uid);
//code->name
Map<String, String> roleMap = new HashMap<>();
if (!CollectionUtils.isEmpty(userRoles)) {
for (IdaasRole userRole : userRoles) {
if (RoleTypeEnum.isAdminRoleCode(userRole.getRoleCode())) {
throw new ServiceLogicException(ErrorCode.ADMIN_CANT_NOT_UPDATE);
}
roleMap.put(userRole.getRoleCode(), userRole.getRoleName());
}
}
List<String> newRoles = new ArrayList<>();
if (!CollectionUtils.isEmpty(roleCodes)) {
for (String roleCode : roleCodes) {
if (RoleTypeEnum.isAdminRoleCode(roleCode)) {
throw new ServiceLogicException(ErrorCode.ADMIN_CANT_NOT_GRANT);
}
if (!roleMap.containsKey(roleCode)) {
newRoles.add(roleCode);
} else {
roleMap.remove(roleCode);
}
}
}
if (removeOld) {
//去除原来的角色
roleMap.keySet().stream().forEach(e -> {
Boolean removeRole = grantAbility.revokeRoleFromUser(spaceId, e, uid);
if (removeRole) {
log.info("移除了uid={} 的角色roleCode={}", uid, e);
}
});
return newRoles;
}
return newRoles;
}
@Override
public RoleTypeEnum userOperateRole(String spaceId, String operatUserId) {
List<IdaasRole> operatorRoles = roleAbility.queryRolesByUser(spaceId, operatUserId);
return userOperateRole(spaceId, operatUserId, operatorRoles.stream().map(e -> e.getRoleCode()).collect(Collectors.toList()));
}
@Override
public RoleTypeEnum userOperateRole(String spaceId, String operatUserId, List<String> roleCodes) {
if(roleCodes!=null){
Set<RoleTypeEnum> types = roleCodes.stream().map(it->
RoleTypeEnum.fromRoleCode(it)
).collect(Collectors.toSet());
RoleTypeEnum[] orderedTypes = new RoleTypeEnum[]{RoleTypeEnum.admin,RoleTypeEnum.manager,RoleTypeEnum.normal};
for(RoleTypeEnum type: orderedTypes){
if(types.contains(type)){
return type;
}
}
}
return RoleTypeEnum.normal;
}
}
|
<reponame>mutru/smart_session_store
require 'active_record'
require 'base64'
require 'pp'
# +SmartSessionStore+ is a session store that strives to correctly handle session storage in the face of multiple
# concurrent actions accessing the session. It is derived from Stephen Kaes' +SqlSessionStore+, a stripped down,
# optimized for speed version of class +ActiveRecordStore+.
#
# This version is the one used for rails > 2.3
class SmartSessionStore < ActionController::Session::AbstractStore
include SessionSmarts
# The class to be used for creating, retrieving and updating sessions.
# Defaults to SmartSessionStore::Session, which is derived from +ActiveRecord::Base+.
#
# In order to achieve acceptable performance you should implement
# your own session class, similar to the one provided for Myqsl.
#
# Only functions +find_session+, +create_session+,
# +update_session+ and +destroy+ are required. See file +mysql_session.rb+.
cattr_accessor :session_class
@@session_class = SqlSession
SESSION_RECORD_KEY = 'rack.session.record'.freeze
private
def get_session(env, sid)
ActiveRecord::Base.silence do
sid ||= generate_sid
session = find_session(sid)
env[SESSION_RECORD_KEY] = session
[sid, unmarshalize(session.data)]
end
end
def set_session(env, sid, session_data)
ActiveRecord::Base.silence do
record = env[SESSION_RECORD_KEY] ||= find_session(sid)
data, session = save_session(record, session_data)
env[SESSION_RECORD_KEY] = session
end
return true
end
def find_session(id)
@@session_class.find_by_session_id(id) ||
@@session_class.new(:session_id => id, :data => marshalize({}))
end
end
__END__
# This software is released under the MIT license
# Copyright (c) 2007-2009 <NAME>
# Copyright (c) 2005,2006 <NAME>
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
package de.ids_mannheim.korap.web.controller;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.time.ZonedDateTime;
import java.util.Map;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.SecurityContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import com.nimbusds.oauth2.sdk.AccessTokenResponse;
import com.nimbusds.oauth2.sdk.ParseException;
import com.nimbusds.oauth2.sdk.ResponseMode;
import com.nimbusds.oauth2.sdk.TokenRequest;
import com.nimbusds.oauth2.sdk.http.HTTPRequest.Method;
import com.nimbusds.oauth2.sdk.id.State;
import com.sun.jersey.spi.container.ResourceFilters;
import de.ids_mannheim.korap.constant.OAuth2Scope;
import de.ids_mannheim.korap.exceptions.KustvaktException;
import de.ids_mannheim.korap.oauth2.openid.OpenIdConfiguration;
import de.ids_mannheim.korap.oauth2.openid.OpenIdHttpRequestWrapper;
import de.ids_mannheim.korap.oauth2.openid.service.JWKService;
import de.ids_mannheim.korap.oauth2.openid.service.OpenIdAuthorizationService;
import de.ids_mannheim.korap.oauth2.openid.service.OpenIdConfigService;
import de.ids_mannheim.korap.oauth2.openid.service.OpenIdTokenService;
import de.ids_mannheim.korap.oauth2.service.OAuth2ScopeService;
import de.ids_mannheim.korap.security.context.TokenContext;
import de.ids_mannheim.korap.web.OpenIdResponseHandler;
import de.ids_mannheim.korap.web.filter.APIVersionFilter;
import de.ids_mannheim.korap.web.filter.AuthenticationFilter;
import de.ids_mannheim.korap.web.filter.BlockingFilter;
import de.ids_mannheim.korap.web.utils.MapUtils;
/**
* Describes OAuth2 webAPI with OpenId Connect implementation, an
* additional authentication protocol allowing clients to verify
* user authentication data represented by ID tokens.
*
* @author margaretha
*
*/
@Controller
@Path("{version}/oauth2/openid")
@ResourceFilters({ APIVersionFilter.class })
public class OAuth2WithOpenIdController {
@Autowired
private OpenIdAuthorizationService authzService;
@Autowired
private OpenIdTokenService tokenService;
@Autowired
private JWKService jwkService;
@Autowired
private OpenIdConfigService configService;
@Autowired
private OAuth2ScopeService scopeService;
@Autowired
private OpenIdResponseHandler openIdResponseHandler;
/**
* Required parameters for OpenID authentication requests:
*
* <ul>
* <li>scope: MUST contain "openid" for OpenID Connect
* requests</li>
* <li>response_type: only "code" is supported</li>
* <li>client_id: client identifier given by Kustvakt during
* client registration</li>
* <li>redirect_uri: MUST match a pre-registered redirect uri
* during client registration</li>
* </ul>
*
* Other parameters:
*
* <ul>
* <li>state (recommended): Opaque value used to maintain state
* between the request and the callback.</li>
* <li>response_mode (optional) : mechanism to be used for
* returning parameters, only "query" is supported</li>
* <li>nonce (optional): String value used to associate a Client
* session with an ID Token,
* and to mitigate replay attacks. </li>
* <li>display (optional): specifies how the Authorization Server
* displays the authentication and consent user interface
* pages. Options: page (default), popup, touch, wap. This
* parameter is more relevant for Kalamar. </li>
* <li>prompt (optional): specifies if the Authorization Server
* prompts the End-User for reauthentication and consent. Defined
* values: none, login, consent, select_account </li>
* <li>max_age (optional): maximum Authentication Age.</li>
* <li>ui_locales (optional): preferred languages and scripts for
* the user interface represented as a space-separated list of
* BCP47 [RFC5646] </li>
* <li>id_token_hint (optional): ID Token previously issued by the
* Authorization Server being passed as a hint</li>
* <li>login_hint (optional): hint to the Authorization Server
* about the login identifier the End-User might use to log
* in</li>
* <li>acr_values (optional): requested Authentication Context
* Class Reference values. </li>
* </ul>
*
* @see "OpenID Connect Core 1.0 specification"
*
* @param request
* @param context
* @param form
* @return a redirect to client redirect uri
*/
@POST
@Path("authorize")
@ResourceFilters({ AuthenticationFilter.class, BlockingFilter.class })
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.APPLICATION_JSON + ";charset=utf-8")
public Response requestAuthorizationCode (
@Context HttpServletRequest request,
@Context SecurityContext context,
MultivaluedMap<String, String> form) {
TokenContext tokenContext = (TokenContext) context.getUserPrincipal();
String username = tokenContext.getUsername();
ZonedDateTime authTime = tokenContext.getAuthenticationTime();
Map<String, String> map = MapUtils.toMap(form);
State state = authzService.retrieveState(map);
ResponseMode responseMode = authzService.retrieveResponseMode(map);
boolean isAuthentication = false;
if (map.containsKey("scope") && map.get("scope").contains("openid")) {
isAuthentication = true;
}
URI uri = null;
try {
scopeService.verifyScope(tokenContext, OAuth2Scope.AUTHORIZE);
if (isAuthentication) {
authzService.checkRedirectUriParam(map);
}
uri = authzService.requestAuthorizationCode(form, username,
isAuthentication, authTime);
}
catch (ParseException e) {
return openIdResponseHandler.createErrorResponse(e, state);
}
catch (KustvaktException e) {
return openIdResponseHandler.createAuthorizationErrorResponse(e,
isAuthentication, e.getRedirectUri(), state, responseMode);
}
ResponseBuilder builder = Response.temporaryRedirect(uri);
return builder.build();
}
@POST
@Path("token")
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.APPLICATION_JSON + ";charset=utf-8")
public Response requestAccessToken (
@Context HttpServletRequest servletRequest,
MultivaluedMap<String, String> form) {
Map<String, String> map = MapUtils.toMap(form);
Method method = Method.valueOf(servletRequest.getMethod());
URL url = null;
try {
url = new URL(servletRequest.getRequestURL().toString());
}
catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
OpenIdHttpRequestWrapper httpRequest =
new OpenIdHttpRequestWrapper(method, url);
httpRequest.toHttpRequest(servletRequest, (Map<String, List<String>>) form);
TokenRequest tokenRequest = TokenRequest.parse(httpRequest);
AccessTokenResponse tokenResponse =
tokenService.requestAccessToken(tokenRequest);
return openIdResponseHandler.createResponse(tokenResponse,
Status.OK);
}
catch (ParseException e) {
return openIdResponseHandler.createErrorResponse(e, null);
}
catch (KustvaktException e) {
return openIdResponseHandler.createTokenErrorResponse(e);
}
}
/**
* Retrieves Kustvakt public keys of JWK (Json Web Key) set
* format.
*
* @return json string representation of the public keys
*
* @see "RFC 8017 regarding RSA specifications"
* @see "RFC 7517 regarding JWK (Json Web Key) and JWK Set"
*/
@GET
@Path("jwks")
@Produces(MediaType.APPLICATION_JSON + ";charset=utf-8")
public String requestPublicKeys () {
return jwkService.generatePublicKeySetJson();
}
/**
* When supporting discovery, must be available at
* {issuer_uri}/.well-known/openid-configuration
*
* @return
*
* @return
*/
@GET
@Path("config")
@Produces(MediaType.APPLICATION_JSON + ";charset=utf-8")
public OpenIdConfiguration requestOpenIdConfig () {
return configService.retrieveOpenIdConfigInfo();
}
}
|
<reponame>Karmadon/geo_fencer<filename>drivers/s2.go
package drivers
import (
"github.com/golang/geo/s2"
"github.com/karmadon/geo"
)
type S2fence struct {
resolution int
covers map[s2.CellID][]cover
}
func NewS2fence(resolution int) *S2fence {
return &S2fence{
resolution: resolution,
covers: make(map[s2.CellID][]cover),
}
}
func (s *S2fence) Add(f *geo.Feature) {
coverer := NewFlatCoverer(s.resolution)
for _, shape := range f.Geometry {
if shape.IsClockwise() {
shape.Reverse()
}
points := make([]s2.Point, len(shape.Coordinates))
for i, c := range shape.Coordinates[:] {
points[i] = s2.PointFromLatLng(s2.LatLngFromDegrees(c.Lat, c.Lon))
}
region := s2.Region(LoopRegionFromPoints(points))
bounds := coverer.Covering(region)
if len(bounds) < 1 {
continue
}
interiors := coverer.InteriorCovering(region)
c := cover{
interior: make(map[s2.CellID]bool, len(interiors)),
feature: f,
}
for _, cellID := range interiors {
c.interior[cellID] = true
}
for _, cellID := range bounds {
s.covers[cellID] = append(s.covers[cellID], c)
}
}
}
func (s *S2fence) Get(coordinate geo.Coordinate) (matches []*geo.Feature) {
cellID := s2.CellIDFromLatLng(s2.LatLngFromDegrees(coordinate.Lat, coordinate.Lon)).Parent(s.resolution)
for _, cover := range s.covers[cellID] {
if _, ok := cover.interior[cellID]; ok {
matches = append(matches, cover.feature)
} else if cover.feature.Contains(coordinate) {
matches = append(matches, cover.feature)
}
}
return
}
type cover struct {
feature *geo.Feature
interior map[s2.CellID]bool
}
type LoopRegion struct {
*s2.Loop
}
func LoopRegionFromPoints(points []s2.Point) *LoopRegion {
loop := s2.LoopFromPoints(points)
return &LoopRegion{loop}
}
func (l *LoopRegion) CapBound() s2.Cap {
return l.RectBound().CapBound()
}
func (l *LoopRegion) ContainsCell(cell s2.Cell) bool {
for i := 0; i < 4; i++ {
v := cell.Vertex(i)
if !l.ContainsPoint(v) {
return false
}
}
return true
}
func (l *LoopRegion) IntersectsCell(cell s2.Cell) bool {
for i := 0; i < 4; i++ {
crosser := s2.NewChainEdgeCrosser(cell.Vertex(i), cell.Vertex((i+1)%4), l.Vertex(0))
for _, verticle := range l.Vertices()[1:] {
if crosser.EdgeOrVertexChainCrossing(verticle) {
return true
}
}
if crosser.EdgeOrVertexChainCrossing(l.Vertex(0)) { //close the loop
return true
}
}
return l.ContainsCell(cell)
}
type FlatCoverer struct {
*s2.RegionCoverer
}
func NewFlatCoverer(level int) *FlatCoverer {
return &FlatCoverer{&s2.RegionCoverer{
MinLevel: level,
MaxLevel: level,
LevelMod: 0,
MaxCells: 1 << 12,
}}
}
func (c *FlatCoverer) Covering(r s2.Region) (cover s2.CellUnion) {
cellUnions := c.FastCovering(r.CapBound())
for _, cellID := range cellUnions {
cell := s2.CellFromCellID(cellID)
if r.IntersectsCell(cell) {
cover = append(cover, cellID)
}
}
return cover
}
func (c *FlatCoverer) CellUnion(region s2.Region) s2.CellUnion {
cover := c.Covering(region)
cover.Normalize()
return cover
}
func (c *FlatCoverer) InteriorCovering(region s2.Region) (cover s2.CellUnion) {
cids := c.FastCovering(region.CapBound())
for _, cid := range cids {
cell := s2.CellFromCellID(cid)
if region.ContainsCell(cell) {
cover = append(cover, cid)
}
}
return cover
}
func (c *FlatCoverer) InteriorCellUnion(region s2.Region) s2.CellUnion {
cover := c.InteriorCovering(region)
cover.Normalize()
return cover
}
|
from django.urls import path, include
from . import views
urlpatterns = [
path("", views.home, name="home"),
path("about", views.about, name="about"),
path("add_stock", views.add_stock, name="add_stock"),
path("delete/<stock_id>", views.delete_stock, name="delete"),
] |
<reponame>JavalParel/AEM<filename>Gauss-Seidal.c
#include<stdio.h>
int main(){
int a[5],b[5],c[5],d[5];
float x[100],y[100],z[100],xn,yn,zn;
xn=yn=zn=0;
printf("Eg\na1*x+b1*y+c1*z=d1\na2*x+b2*y+c2*z=d2\na3*x+b3*y+c3*z=d3\n");
for (int i = 0; i < 3; i++)
{
printf("Enter a%d :- ",i+1 );
scanf("%d",&a[i]);
printf("Enter b%d :- ",i+1 );
scanf("%d",&b[i]);
printf("Enter c%d :- ",i+1 );
scanf("%d",&c[i]);
printf("Enter d%d :- ",i+1 );
scanf("%d",&d[i]);
}
for (int i = 0; i < 3; i++)
{
printf("%d*x+%d*y+%d*z=%d\n",a[i],b[i],c[i],d[i]);
}
for (int i = 0; i < 60; i++)
{
x[i]=(d[0]-b[0]*yn-c[0]*zn)/a[0];
y[i]=(d[1]-a[1]*xn-c[1]*zn)/b[1];
z[i]=(d[2]-a[2]*xn-b[2]*yn)/c[2];
printf("x%d = %f \ty%d = %f \tz%d = %f\n",i+1,x[i],i+1,y[i],i+1,z[i] );
xn=x[i];
yn=y[i];
zn=z[i];
}
} |
#!/bin/bash
#$ -cwd
#$ -pe local 4
#$ -l mem_free=5G,h_vmem=6G,h_fsize=100G
################################################
# Shell script to run debris simulation scenario
################################################
# These scripts run the selected demultiplexing tool (e.g. cellSNP/Vireo) for a
# given debris simulation scenario (VCF file, dataset, percent doublets, percent
# debris).
# Requires the modified BAM file from the previous script "parse_BAM_X_debris.sh".
# start runtime
start=`date +%s`
# ----------------------------------------------------------
# Scenario: 1000 Genomes Project VCF filtered, cellSNP/Vireo
# ----------------------------------------------------------
# run cellSNP
mkdir -p ../../../supplementary_debris/scenarios/HGSOC/20pc/debris10pc/1000GenomesFilt_cellSNPVireo
# using recommended parameters for cellSNP
cellsnp-lite \
-s ../../../supplementary_debris/scenarios/HGSOC/20pc/bam_merged_HGSOC_doublets20pc_debris10pc.bam \
-b ../../../supplementary_debris/scenarios/HGSOC/20pc/barcodes_merged_HGSOC_doublets20pc_debris10pc.tsv \
-O ../../../supplementary_debris/scenarios/HGSOC/20pc/debris10pc/1000GenomesFilt_cellSNPVireo/cellSNP \
-R ../../../data/cellSNP/genome1K.phase3.SNP_AF5e2.chr1toX.hg38.threeUTRs.vcf \
-p 4 \
--minMAF=0.1 \
--minCOUNT=20 \
--gzip
# end runtime
end=`date +%s`
runtime=`expr $end - $start`
# save runtime
mkdir -p ../../../supplementary_debris/scenarios/HGSOC/20pc/debris10pc/1000GenomesFilt_cellSNPVireo/runtimes
echo runtime: $runtime seconds > ../../../supplementary_debris/scenarios/HGSOC/20pc/debris10pc/1000GenomesFilt_cellSNPVireo/runtimes/runtime_1000GenomesFilt_cellSNPVireo_cellSNP_HGSOC_doublets20pc_debris10pc.txt
# start runtime
start=`date +%s`
# run Vireo
# note parameter for known number of samples (3 for HGSOC dataset, 6 for lung dataset)
vireo \
-c ../../../supplementary_debris/scenarios/HGSOC/20pc/debris10pc/1000GenomesFilt_cellSNPVireo/cellSNP \
-N 3 \
-o ../../../supplementary_debris/scenarios/HGSOC/20pc/debris10pc/1000GenomesFilt_cellSNPVireo/vireo \
--randSeed=123
# end runtime
end=`date +%s`
runtime=`expr $end - $start`
# save runtime
mkdir -p ../../../supplementary_debris/scenarios/HGSOC/20pc/debris10pc/1000GenomesFilt_cellSNPVireo/runtimes
echo runtime: $runtime seconds > ../../../supplementary_debris/scenarios/HGSOC/20pc/debris10pc/1000GenomesFilt_cellSNPVireo/runtimes/runtime_1000GenomesFilt_cellSNPVireo_vireo_HGSOC_doublets20pc_debris10pc.txt
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.