text stringlengths 1 1.05M |
|---|
package com.atjl.dbtiming.service;
import com.atjl.dbtiming.helper.TimingLockHelper;
import org.junit.*;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.transaction.TransactionConfiguration;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
@RunWith(SpringJUnit4ClassRunner.class)
@TransactionConfiguration(defaultRollback = false, transactionManager = "transactionManager")
@ContextConfiguration(locations = {"classpath:test-service.xml"})
@Transactional
public class TimingLockHelperTest {
@Resource
TimingLockHelper timingLockHelper;
@Test
public void testHasLockForMutexMutexTm() throws Exception {
}
@Test
public void testHasLockMutexTm() throws Exception {
}
@Test
public void testLock() throws Exception {
}
@Test
public void testUnlock() throws Exception {
}
@Before
public void before() throws Exception {
}
@After
public void after() throws Exception {
}
@BeforeClass
public static void beforeClass() throws Exception{
}
@Rule
public final ExpectedException expectedException = ExpectedException.none();
}
|
<gh_stars>0
import React from 'react';
// material ui
import { Checkbox, ExpansionPanel, ExpansionPanelDetails, ExpansionPanelSummary, IconButton } from '@material-ui/core';
import { ArrowDropDown as ArrowDropDownIcon, ArrowDropUp as ArrowDropUpIcon } from '@material-ui/icons';
import { DEVICE_STATES } from '../../constants/deviceConstants';
import ExpandedDevice from './expanded-device';
const DeviceListItem = props => {
const { columnHeaders, device, expandable = true, expanded, globalSettings, onClick, onRowSelect, selectable, selected } = props;
const id_attribute = globalSettings.id_attribute !== 'Device ID' ? (device.identity_data || {})[globalSettings.id_attribute] : device.id;
const columnWidth = `${(selectable ? 90 : 100) / columnHeaders.length}%`;
return expandable ? (
<ExpansionPanel className="deviceListItem" square expanded={expanded} onChange={onClick}>
<ExpansionPanelSummary style={{ padding: '0 12px' }}>
{selectable ? <Checkbox checked={selected} onChange={onRowSelect} /> : null}
<div style={Object.assign({ width: columnHeaders[0].width || columnWidth, padding: '0 24px' }, columnHeaders[0].style)}>{id_attribute}</div>
{/* we'll skip the first column, since this is the id and that gets resolved differently in the lines above */}
{columnHeaders.slice(1).map((item, index) => (
<div
key={`column-${index}`}
style={Object.assign(
{ width: item.width || columnWidth, padding: '0 24px', overflow: 'hidden', wordBreak: 'break-all', maxHeight: 48 },
item.style
)}
>
{item.render(device)}
</div>
))}
<IconButton className="expandButton">{expanded ? <ArrowDropUpIcon /> : <ArrowDropDownIcon />}</IconButton>
</ExpansionPanelSummary>
<ExpansionPanelDetails>
{expanded ? (
<ExpandedDevice
{...props}
className="expandedDevice"
id_attribute={(globalSettings || {}).id_attribute}
id_value={id_attribute}
device={device}
attrs={device.attributes}
unauthorized={device.status !== DEVICE_STATES.accepted}
device_type={device.attributes ? device.attributes.device_type : null}
/>
) : (
<div />
)}
</ExpansionPanelDetails>
</ExpansionPanel>
) : (
<div className="deviceListItem flexbox" style={{ padding: '0px 12px', alignItems: 'center' }}>
{selectable ? <Checkbox checked={selected} onChange={onRowSelect} /> : null}
<div style={Object.assign({ width: columnHeaders[0].width || columnWidth, padding: '0 24px' }, columnHeaders[0].style)}>{id_attribute}</div>
{/* we'll skip the first column, since this is the id and that gets resolved differently in the lines above */}
{columnHeaders.slice(1).map((item, index) => (
<div
key={`column-${index}`}
style={Object.assign({ width: item.width || columnWidth, padding: '0 24px', overflow: 'hidden', wordBreak: 'break-all', maxHeight: 48 }, item.style)}
>
{item.render(device)}
</div>
))}
</div>
);
};
export default DeviceListItem;
|
package com.iterlife.zeus.spring.context;
public interface ApplicationListener {
}
|
package io.opensphere.mantle.data.geom.style.dialog;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.Font;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import javax.imageio.ImageIO;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.SwingConstants;
import org.apache.log4j.Logger;
import io.opensphere.core.Toolbox;
import io.opensphere.core.common.collapsablepanel.CollapsiblePanel;
import io.opensphere.core.event.EventListener;
import io.opensphere.core.util.Utilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.concurrent.ProcrastinatingExecutor;
import io.opensphere.core.util.concurrent.SuppressableRejectedExecutionHandler;
import io.opensphere.core.util.lang.NamedThreadFactory;
import io.opensphere.core.util.swing.EventQueueUtilities;
import io.opensphere.mantle.controller.event.AbstractRootDataGroupControllerEvent;
import io.opensphere.mantle.controller.event.impl.ActiveDataGroupsChangedEvent;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.MapVisualizationType;
import io.opensphere.mantle.data.geom.style.FeatureVisualizationStyle;
import io.opensphere.mantle.data.geom.style.InterpolatedTileVisualizationStyle;
import io.opensphere.mantle.data.geom.style.VisualizationStyleDatatypeChangeEvent;
import io.opensphere.mantle.data.geom.style.dialog.DataTypeNodeUserObject.NodeListener;
import io.opensphere.mantle.data.geom.style.dialog.DataTypeNodeUserObject.NodeType;
import io.opensphere.mantle.data.geom.style.dialog.ShowTypeVisualizationStyleEvent.StyleAction;
import io.opensphere.mantle.data.geom.style.impl.VisualizationStyleRegistryChangeAdapter;
import io.opensphere.mantle.util.MantleToolboxUtils;
/**
* The Class VisualizationStyleDataTypeTreePanel.
*/
@SuppressWarnings("PMD.GodClass")
public class VisualizationStyleDataTypeTreePanel extends JPanel implements NodeListener
{
/**
* The {@link Logger} instance used to capture output.
*/
private static final Logger LOG = Logger.getLogger(VisualizationStyleDataTypeTreePanel.class);
/**
* The scheduled executor service for executing rebuilds of the
* visualization trees.
*/
private static final ScheduledExecutorService EXECUTOR = ProcrastinatingExecutor.protect(new ScheduledThreadPoolExecutor(1,
new NamedThreadFactory("VisualizationStyleTree"), SuppressableRejectedExecutionHandler.getInstance()));
/** The Constant NODE_HEIGHT_PIXELS. */
static final int NODE_HEIGHT_PIXELS = 25;
/**
* serialVersionUID.
*/
private static final long serialVersionUID = 1L;
/** The Active data groups changed listener. */
private final transient EventListener<ActiveDataGroupsChangedEvent> myActiveDataGroupsChangedListener;
/** The Collapse icon. */
private ImageIcon myCollapseIcon;
/** The Data group controller listener. */
@SuppressWarnings("PMD.SingularField")
private final transient EventListener<AbstractRootDataGroupControllerEvent> myDataGroupControllerListener;
/** The Expand icon. */
private ImageIcon myExpandIcon;
/** The type of data this panel will hold. */
private final VisualizationStyleGroup myGroupType;
/** The Node key to node obj map. */
private final transient Map<String, DataTypeNodeUserObject> myNodeKeyToNodeObjMap;
/** The node obj to leaf node panel map. */
private final transient Map<DataTypeNodeUserObject, LeafNodePanel> myNodeObjToLeafNodePanelMap;
/**
* A procrastinating executor for rebuilds of the visualization trees. Since
* there is an update for each added layer at startup, this prevents the
* tree from being potentially rebuilt dozens of times and causing the
* application from becoming unresponsive.
*/
private final transient ProcrastinatingExecutor myRebuildExecutor = new ProcrastinatingExecutor(EXECUTOR, 500);
/** The Registry change listener. */
private transient VisualizationStyleRegistryChangeAdapter myRegistryStyleChangeListener;
/** The Scroll pane. */
@SuppressWarnings("PMD.SingularField")
private final JScrollPane myScrollPane;
/** The Style data type tree listener. */
private final transient StyleDataTypeTreeListener myStyleDataTypeTreeListener;
/** The Style manager controller. */
private final transient StyleManagerController myStyleManagerController;
/** The Toolbox. */
private final transient Toolbox myToolbox;
/** The Feature label group. */
private final transient SelectableLabelGroup myTreeNodeLabelGroup;
/** The Feature tree panel. */
@SuppressWarnings("PMD.SingularField")
private final JPanel myTreePanel;
/** The Feature tree panel. */
@SuppressWarnings("PMD.SingularField")
private final JPanel myTypePanel;
/**
* Instantiates a new visualization style data type tree panel.
*
* @param tb the {@link Toolbox}
* @param styleManagerController the style manager controller
* @param listener the {@link StyleDataTypeTreeListener}
* @param groupType the type of data this panel is configured to contain.
*/
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public VisualizationStyleDataTypeTreePanel(Toolbox tb, StyleManagerController styleManagerController,
StyleDataTypeTreeListener listener, VisualizationStyleGroup groupType)
{
super();
myStyleManagerController = styleManagerController;
myGroupType = groupType;
myStyleDataTypeTreeListener = listener;
myToolbox = tb;
myTypePanel = new JPanel(new BorderLayout());
myTreeNodeLabelGroup = new SelectableLabelGroup();
myNodeKeyToNodeObjMap = New.map();
myNodeObjToLeafNodePanelMap = New.map();
try
{
myExpandIcon = new ImageIcon(ImageIO.read(VisualizationStyleDataTypeTreePanel.class.getResource("/images/down.png")));
}
catch (IOException e)
{
myExpandIcon = null;
}
try
{
myCollapseIcon = new ImageIcon(
ImageIO.read(VisualizationStyleDataTypeTreePanel.class.getResource("/images/right.png")));
}
catch (IOException e)
{
myCollapseIcon = null;
}
myTreePanel = new JPanel();
myTreePanel.setLayout(new BoxLayout(myTreePanel, BoxLayout.Y_AXIS));
myScrollPane = new JScrollPane(myTreePanel);
myTypePanel.add(myScrollPane, BorderLayout.CENTER);
setLayout(new BorderLayout());
JLabel title = new JLabel("Data Types");
title.setFont(title.getFont().deriveFont(Font.BOLD, title.getFont().getSize() + 4));
title.setHorizontalAlignment(SwingConstants.CENTER);
add(title, BorderLayout.NORTH);
add(myTypePanel, BorderLayout.CENTER);
myDataGroupControllerListener = event -> handleDataGroupInfoChangeEvent(event);
tb.getEventManager().subscribe(AbstractRootDataGroupControllerEvent.class, myDataGroupControllerListener);
myActiveDataGroupsChangedListener = event -> handleActiveDataGroupsChangedEvent(event);
tb.getEventManager().subscribe(ActiveDataGroupsChangedEvent.class, myActiveDataGroupsChangedListener);
MantleToolboxUtils.getMantleToolbox(tb).getVisualizationStyleRegistry()
.addVisualizationStyleRegistryChangeListener(getStyleRegistryChangeListener());
rebuildFromModel();
}
/**
* {@inheritDoc}
*
* @see DataTypeNodeUserObject.NodeListener#nodeCheckStateChanged(DataTypeNodeUserObject,
* boolean)
*/
@Override
public void nodeCheckStateChanged(DataTypeNodeUserObject node, boolean checked)
{
fireDataTypeCheckStateChanged(node);
}
/**
* {@inheritDoc}
*
* @see DataTypeNodeUserObject.NodeListener#nodeSelectStateChanged(DataTypeNodeUserObject,
* boolean)
*/
@Override
public void nodeSelectStateChanged(DataTypeNodeUserObject node, boolean selected)
{
DataTypeNodeUserObject selNode = getSelectedNode();
if (selNode == null)
{
fireNoDataTypeSelected();
}
else
{
fireDataTypeSelected(selNode);
}
}
/**
* Switch to data type.
*
* @param event the event
* @return true, if successful
*/
public boolean switchToDataType(ShowTypeVisualizationStyleEvent event)
{
synchronized (myNodeKeyToNodeObjMap)
{
for (Map.Entry<String, DataTypeNodeUserObject> entry : myNodeKeyToNodeObjMap.entrySet())
{
if (entry.getValue().getDataTypeInfo() != null
&& event.getType().getTypeKey().equals(entry.getValue().getDataTypeInfo().getTypeKey()))
{
LeafNodePanel lnp = myNodeObjToLeafNodePanelMap.get(entry.getValue());
if (lnp != null)
{
if (event.getStyleAction() == StyleAction.SHOW_ONLY
|| event.getStyleAction() == StyleAction.ACTIVATE_IF_INACTIVE)
{
if (!lnp.getSelectableLabel().isSelected())
{
lnp.getSelectableLabel().setSelected(true, true);
}
// For case StyleAction.DEACTIVATE_IF_ACTIVE
if (event.getStyleAction() == StyleAction.ACTIVATE_IF_INACTIVE && !lnp.getCheckBox().isSelected())
{
lnp.getCheckBox().doClick();
}
}
else
{
// For case StyleAction.DEACTIVATE_IF_ACTIVE
if (lnp.getCheckBox().isSelected())
{
lnp.getCheckBox().doClick();
}
}
return true;
}
}
}
}
return false;
}
/**
* Fire node select state changed.
*
* @param node the node
*/
protected void fireDataTypeCheckStateChanged(final DataTypeNodeUserObject node)
{
if (myStyleManagerController != null)
{
EventQueueUtilities.runOnEDT(() -> myStyleManagerController.setUseCustomStyleForDataType(node.getNodeKey(),
node.isChecked(), VisualizationStyleDataTypeTreePanel.this));
}
}
/**
* Fire node select state changed.
*
* @param node the node
*/
protected void fireDataTypeSelected(final DataTypeNodeUserObject node)
{
if (myStyleDataTypeTreeListener != null)
{
EventQueueUtilities.runOnEDT(() -> myStyleDataTypeTreeListener.dataTypeSelected(node));
}
}
/**
* Fire node select state changed.
*
*/
protected void fireForceRebuild()
{
if (myStyleDataTypeTreeListener != null)
{
EventQueueUtilities.runOnEDT(() -> myStyleDataTypeTreeListener.forceRebuild());
}
}
/**
* Fire no data type selected.
*/
protected void fireNoDataTypeSelected()
{
if (myStyleDataTypeTreeListener != null)
{
EventQueueUtilities.runOnEDT(() -> myStyleDataTypeTreeListener.noDataTypeSelected());
}
}
/**
* Builds the tree model.
*
* @param dgiList the list of {@link DataGroupInfo}
* @param lbGrp the {@link SelectableLabelGroup} for the tree
* @param nodeKeyToNodeMap the node key to node map
* @param lastSelectedNodeKey the last selected node key
* @param styleGroup the type of data to store in the tree.
* @return the panel with the mock Component tree. {@link SelectableLabel}
*/
private JPanel buildTree(List<DataGroupInfo> dgiList, SelectableLabelGroup lbGrp,
Map<String, DataTypeNodeUserObject> nodeKeyToNodeMap, String lastSelectedNodeKey, VisualizationStyleGroup styleGroup)
{
JPanel rootPanel = new JPanel(new BorderLayout());
BoxLayout bl = new BoxLayout(rootPanel, BoxLayout.Y_AXIS);
rootPanel.setLayout(bl);
// Build the default Node.
rootPanel.add(Box.createVerticalStrut(3));
myNodeObjToLeafNodePanelMap.clear();
NodeType rootNodeType;
switch (styleGroup)
{
case FEATURES:
rootNodeType = NodeType.DEFAULT_ROOT_FEATURE;
break;
case TILES:
rootNodeType = NodeType.DEFAULT_ROOT_TILE;
break;
case HEATMAPS:
rootNodeType = NodeType.DEFAULT_ROOT_HEATMAP;
break;
default:
// fail fast:
throw new UnsupportedOperationException("Unrecognized visualization style group: " + styleGroup.name());
}
DataTypeNodeUserObject aNode = new DataTypeNodeUserObject("Default Styles", rootNodeType, this);
if (aNode.getNodeKey().equals(lastSelectedNodeKey))
{
aNode.setSelectedNoEvent(true);
}
nodeKeyToNodeMap.put(aNode.getNodeKey(), aNode);
LeafNodePanel lnp = new LeafNodePanel(lbGrp, aNode, false);
rootPanel.add(lnp);
myNodeObjToLeafNodePanelMap.put(aNode, lnp);
rootPanel.add(Box.createVerticalStrut(3));
// Now go through each of the DataGroupInfo and build a CollapsablePanel
// to represent its folder. Fill each panel with our LeafNodePanels for
// each leaf node.
Map<DataGroupInfo, List<DataTypeNodeUserObject>> topParentToLeafNodeMap = determineGroupsAndNodes(dgiList,
lastSelectedNodeKey, styleGroup);
List<DataGroupInfo> dgiKeyList = New.list(topParentToLeafNodeMap.keySet());
Collections.sort(dgiKeyList, (o1, o2) -> o1.getDisplayName().compareTo(o2.getDisplayName()));
Map<DataGroupInfo, CollapsiblePanel> topParentToNodeMap = New.map();
for (DataGroupInfo topNode : dgiKeyList)
{
List<DataTypeNodeUserObject> nodeList = topParentToLeafNodeMap.get(topNode);
if (!nodeList.isEmpty())
{
Collections.sort(nodeList, (o1, o2) -> o1.getDisplayName().compareTo(o2.getDisplayName()));
CollapsiblePanel topParentNode = topParentToNodeMap.get(topNode);
if (topParentNode == null)
{
JPanel subPanel = new JPanel();
subPanel.setLayout(new BoxLayout(subPanel, BoxLayout.Y_AXIS));
topParentNode = new CollapsiblePanel(topNode.getDisplayName(), subPanel, 10);
topParentNode.setExpandedIcon(myExpandIcon);
topParentNode.setCollapsedIcon(myCollapseIcon);
rootPanel.add(topParentNode);
topParentToNodeMap.put(topNode, topParentNode);
}
// Build up the sub-labels.
int numAdded = 0;
for (DataTypeNodeUserObject node : nodeList)
{
nodeKeyToNodeMap.put(node.getNodeKey(), node);
numAdded += createAndAddLeafPanel(styleGroup, lbGrp, node, topParentNode);
}
// Size the collapsible panels so it looks right in its parent
// panel.
topParentNode.setMaximumSize(new Dimension(1000, numAdded * NODE_HEIGHT_PIXELS + NODE_HEIGHT_PIXELS));
}
}
rootPanel.add(new JPanel());
return rootPanel;
}
/**
* Process data type info to create a leaf node for use in the panel.
*
* @param groupType the group type for which the leaf panel will be created.
* @param labelGroup the group to which the label will be assigned.
* @param node the node to embed into the panel.
* @param topParentNode the parent node to which the leaf will be added.
* @return the number of items added to the parentNode
*/
private int createAndAddLeafPanel(VisualizationStyleGroup groupType, SelectableLabelGroup labelGroup,
DataTypeNodeUserObject node, CollapsiblePanel topParentNode)
{
LeafNodePanel leafNodePanel = new LeafNodePanel(labelGroup, node, true);
myNodeObjToLeafNodePanelMap.put(node, leafNodePanel);
((JPanel)topParentNode.getComponent()).add(leafNodePanel);
return 1;
}
/**
* Organizes the supplied {@link List} of data groups, only selecting items
* that correspond with the supplied {@link VisualizationStyleGroup} for
* inclusion into the generated dictionary.
*
* @param dgiList the list of data groups to organize.
* @param lastSelectedNodeKey the last selected node key
* @param groupType the type of data to be organized into the dictionary.
* @return the map
*/
private Map<DataGroupInfo, List<DataTypeNodeUserObject>> determineGroupsAndNodes(List<DataGroupInfo> dgiList,
String lastSelectedNodeKey, VisualizationStyleGroup groupType)
{
DataTypeNodeUserObject aNode;
Map<DataGroupInfo, List<DataTypeNodeUserObject>> topParentToLeafNodeMap = New.map();
if (dgiList != null && !dgiList.isEmpty())
{
for (DataGroupInfo dgi : dgiList)
{
DataGroupInfo topParent = dgi.getTopParent();
List<DataTypeNodeUserObject> nodeList = topParentToLeafNodeMap.get(topParent);
if (nodeList == null)
{
nodeList = New.list();
topParentToLeafNodeMap.put(topParent, nodeList);
}
for (DataTypeInfo dti : dgi.getMembers(false))
{
String name = dti.getDisplayName();
if (dti.getMapVisualizationInfo() != null)
{
MapVisualizationType type = dti.getMapVisualizationInfo().getVisualizationType();
boolean add = false;
NodeType nodeType = null;
switch (groupType)
{
case FEATURES:
if (type.isMapDataElementType())
{
add = true;
nodeType = NodeType.FEATURE_TYPE_LEAF;
}
break;
case TILES:
if (type.isImageTileType() || type.isImageType())
{
add = true;
nodeType = NodeType.TILE_TYPE_LEAF;
}
break;
case HEATMAPS:
if (type.isHeatmapType())
{
add = true;
nodeType = NodeType.HEATMAP_TYPE_LEAF;
}
break;
default:
// note the failure:
LOG.info("Unable to set selected style group type " + groupType);
add = false;
}
if (add)
{
aNode = new DataTypeNodeUserObject(name, nodeType, dgi, dti, this);
if (Objects.equals(aNode.getNodeKey(), lastSelectedNodeKey))
{
aNode.setSelectedNoEvent(true);
}
if (myStyleManagerController.isTypeUsingCustom(aNode.getNodeKey()))
{
aNode.setChecked(true, false);
}
nodeList.add(aNode);
}
}
}
}
}
return topParentToLeafNodeMap;
}
/**
* Gets the selected node.
*
* @return the selected {@link DataTypeNodeUserObject}
*/
private DataTypeNodeUserObject getSelectedNode()
{
synchronized (myNodeKeyToNodeObjMap)
{
return myNodeKeyToNodeObjMap.entrySet().stream().filter(e -> e.getValue().isSelected()).findFirst()
.map(e -> e.getValue()).orElse(null);
}
}
/**
* Gets the selected node key.
*
* @return the selected node key
*/
private String getSelectedNodeKey()
{
DataTypeNodeUserObject node = getSelectedNode();
return node == null ? null : node.getNodeKey();
}
/**
* Gets the style registry change listener.
*
* @return the style registry change listener
*/
private VisualizationStyleRegistryChangeAdapter getStyleRegistryChangeListener()
{
if (myRegistryStyleChangeListener == null)
{
myRegistryStyleChangeListener = new VisualizationStyleRegistryChangeAdapter()
{
@Override
public void visualizationStyleDatatypeChanged(VisualizationStyleDatatypeChangeEvent evt)
{
handleVisualizationStyleDatatypeChangeEvent(evt);
}
};
}
return myRegistryStyleChangeListener;
}
/**
* Handle data group info change event.
*
* @param event the event
*/
private void handleActiveDataGroupsChangedEvent(ActiveDataGroupsChangedEvent event)
{
rebuildFromModel();
}
/**
* Handle data group info change event.
*
* @param event the event
*/
private void handleDataGroupInfoChangeEvent(AbstractRootDataGroupControllerEvent event)
{
rebuildFromModel();
}
/**
* Handle visualization style datatype change event.
*
* @param evt the evt
*/
private void handleVisualizationStyleDatatypeChangeEvent(final VisualizationStyleDatatypeChangeEvent evt)
{
if (!Utilities.sameInstance(evt.getSource(), this))
{
boolean isFeatureType = FeatureVisualizationStyle.class.isAssignableFrom(evt.getNewStyle().getClass());
boolean isHeatmapType = InterpolatedTileVisualizationStyle.class.isAssignableFrom(evt.getNewStyle().getClass());
if (isFeatureType && myGroupType == VisualizationStyleGroup.FEATURES
|| isHeatmapType && myGroupType == VisualizationStyleGroup.HEATMAPS)
{
rebuildFromModel(true);
}
}
}
/**
* Rebuild from model.
*/
private void rebuildFromModel()
{
rebuildFromModel(false);
}
/**
* Rebuild from model.
*
* @param forceRebuildOfEditor a flag used to force reselect last selected
* item after a rebuild is complete.
*/
private void rebuildFromModel(final boolean forceRebuildOfEditor)
{
EventQueueUtilities.runOnEDT(() -> rebuildFromModelImpl(forceRebuildOfEditor));
}
/**
* Rebuilds the model.
*
* @param forceRebuildOfEditor a flag used to force reselect last selected
* item after a rebuild is complete.
*/
private void rebuildFromModelImpl(final boolean forceRebuildOfEditor)
{
synchronized (myNodeKeyToNodeObjMap)
{
String oldSelectedNodeKey = getSelectedNodeKey();
myNodeKeyToNodeObjMap.clear();
myTreeNodeLabelGroup.removeAllLabels();
rebuildFromModelInternal(myTreePanel, myTreeNodeLabelGroup, myNodeKeyToNodeObjMap, oldSelectedNodeKey, myGroupType);
myScrollPane.revalidate();
if (forceRebuildOfEditor)
{
fireForceRebuild();
}
}
}
/**
* Rebuild from model internal (assumes working in AWT event thread).
* Actually does all the work to rebuild the selection trees.
*
* @param treePanel the tree panel to be rebuild.
* @param lbGrp the {@link SelectableLabelGroup} for the labels in the tree.
* @param nodeKeyToNodeMap the node key to node map
* @param lastSelectedNodeKey the last selected node key
* @param styleGroup the type of data to be contained by the tree.
*/
private void rebuildFromModelInternal(final JPanel treePanel, final SelectableLabelGroup lbGrp,
final Map<String, DataTypeNodeUserObject> nodeKeyToNodeMap, final String lastSelectedNodeKey,
final VisualizationStyleGroup styleGroup)
{
myRebuildExecutor.execute(() -> EventQueueUtilities
.runOnEDT(() -> rebuildTreePanel(treePanel, lbGrp, nodeKeyToNodeMap, lastSelectedNodeKey, styleGroup)));
}
/**
* Rebuilds the tree on the panel.
*
* @param pTreePanel the panel to rebuild.
* @param pLabelGroup the {@link SelectableLabelGroup} for the tree used to
* organized labels.
* @param pNodeKeyToNodeMap a dictionary of node objects, using the node's
* key as the hash key for fast lookups.
* @param pLastSelectedNodeKey the key that was selected on before the
* rebuild was initiated.
* @param pStyleGroup the type of data to be contained by the tree.
*/
private void rebuildTreePanel(final JPanel pTreePanel, final SelectableLabelGroup pLabelGroup,
final Map<String, DataTypeNodeUserObject> pNodeKeyToNodeMap, final String pLastSelectedNodeKey,
final VisualizationStyleGroup pStyleGroup)
{
pTreePanel.removeAll();
List<DataGroupInfo> dgiList = MantleToolboxUtils.getMantleToolbox(myToolbox).getDataGroupController()
.createGroupList(null, new VisualizationStyleDataGroupTreeFilter(myToolbox));
pTreePanel.add(buildTree(dgiList, pLabelGroup, pNodeKeyToNodeMap, pLastSelectedNodeKey, pStyleGroup));
pTreePanel.revalidate();
pTreePanel.repaint();
}
}
|
setup() {
cd $BATS_TEST_DIRNAME
}
teardown() {
werf stages purge -s :local --force
}
@test "Non standard PATH should not be redefined in stapel build container (https://github.com/werf/werf/issues/1836)" {
werf build -s :local
}
|
# Serializers
from rest_framework import serializers
from account.models import Account
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = Account
depth = 0
fields = [
'id',
'username',
'lang',
'solved',
'submit_time',
'email',
'is_staff',
'is_superuser',
'is_active',
'is_judger',
'date_joined',
'last_login',
'email_verified',
'avatar_url',
]
read_only_fields = [
'id',
'solved',
'submit_time',
'date_joined',
'last_login',
'email',
'email_verified'
]
class AccountIntroductionSerializer(serializers.ModelSerializer):
class Meta:
model = Account
depth = 0
fields = [
'id',
'introduction'
]
read_only_fields = ['id']
class AccountExtraDataSerializer(serializers.ModelSerializer):
class Meta:
model = Account
depth = 0
fields = [
'id',
'extra_data'
]
read_only_fields = ['id']
|
bash link.sh
bash migrate.sh
curl -fLo ~/.vim/autoload/plug.vim --create-dirs \
https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
sudo apt-get install fonts-powerline
pip install powerline-shell
|
def input_required(message, args):
context = get_context(args)
cmdstr = context.input_value
if cmdstr is None:
result_type = "success"
else:
result_type = "items"
return result([{"label": "", "description": message}], cmdstr, type=result_type) |
/*
8- Solicitar um número inteiro positivo ( consistir ), calcular o fatorial e mostrar
*/
#include <iostream>
using namespace std;
int main() {
int num=0, i=0, aux=1;
cout << "Calcular fatorial. Consistir se numero negativo \n";
cout <<"Digite um numero inteiro ";
cin >> num;
while(num<0){
cout <<"Digite um numero inteiro ";
cin >> num;
}
while(num>0){
aux = aux*num;
cout <<"" <<num <<" x ";
num = num - 1;
}
cout <<" = " <<aux;
}
|
<filename>materialySzkoleniowe/operatory.js
// NOTE: Operatory
/*
// NOTE: Arytmetyczne
+ - dodawanie
- - odejmowanie
/ - dzielenie
* - mnożenie
% - dzielenie module (reszta z dzielenia)
++ - inkrementacja
-- - dekrementacja
// NOTE: LOGICZNE
== - równe
=== - identyczne (do typu zmiennej)
!= - różne
< <= - mniejsz, mniejsze bądź równe
> >= - większe, większe bądź równe
! - negacja wartości logicznej
&& - koniunkcja wartości logicznych (coś "i" coś)
|| - alternatywa wartości logicznych (coś "lub" coś)
// NOTE: WARUNKOWY
warunek ? wartość : wartość
let memberNumber = 2
console.log("Numer wynosi " + (memberNumber === 2 ? "$2.00" : "$10.00"))
// NOTE: PRZYPISANIA
= - przypisanie wartości
+= - powiększenie wartości zmiennej
-= - pomniejszenie wartości zmiennej
*= - pomnożenie wartości zmiennej
/= %= - podzielenie, podzielenie modulo zmiennej
Przykład Równoznaczne z Wynik
= x = y x = y x = 5
+= x += y x = x + y x = 15
-= x -= y x = x - y x = 5
*= x *= y x = x * y x = 50
/= x /= y x = x / y x = 2
%= x %= y x = x % y x = 0
*/
// NOTE: Przykład
// let memberNumber = "Krzysztof"
// console.log("Numer wynosi " + (memberNumber === "Krzysztof" ? "$2.00" : "$10.00"))
// let a = 2
// console.log(a -= 3) |
#!/bin/sh
echo "Deploy for tag: '$1'";
date +'%D %T %:z (%Z)'
npm run build
if echo "$1" | grep -Eq "^v[0-9]+\.[0-9]+\.[0-9]+$"
then
npm publish ./dist;
npm run coveralls;
else
npm publish ./dist --tag next;
fi
npm run github-release || true;
|
#!/usr/bin/env bash
set -ex
source utils/logging.sh
sudo yum install -y libselinux-utils
if selinuxenabled ; then
sudo setenforce permissive
sudo sed -i "s/=enforcing/=permissive/g" /etc/selinux/config
fi
# Update to latest packages first
sudo yum -y update
# Install EPEL required by some packages
if [ ! -f /etc/yum.repos.d/epel.repo ] ; then
if grep -q "Red Hat Enterprise Linux" /etc/redhat-release ; then
sudo yum -y install http://mirror.centos.org/centos/7/extras/x86_64/Packages/epel-release-7-11.noarch.rpm
else
sudo yum -y install epel-release --enablerepo=extras
fi
fi
# Work around a conflict with a newer zeromq from epel
if ! grep -q zeromq /etc/yum.repos.d/epel.repo; then
sudo sed -i '/enabled=1/a exclude=zeromq*' /etc/yum.repos.d/epel.repo
fi
# Install required packages
# python-{requests,setuptools} required for tripleo-repos install
sudo yum -y install \
crudini \
curl \
dnsmasq \
figlet \
golang \
NetworkManager \
nmap \
patch \
psmisc \
python-pip \
python-requests \
python-setuptools \
vim-enhanced \
wget
# We're reusing some tripleo pieces for this setup so clone them here
cd
if [ ! -d tripleo-repos ]; then
git clone https://git.openstack.org/openstack/tripleo-repos
fi
pushd tripleo-repos
sudo python setup.py install
popd
# Needed to get a recent python-virtualbmc package
sudo tripleo-repos current-tripleo
# There are some packages which are newer in the tripleo repos
sudo yum -y update
# Setup yarn and nodejs repositories
sudo curl -sL https://dl.yarnpkg.com/rpm/yarn.repo -o /etc/yum.repos.d/yarn.repo
curl -sL https://rpm.nodesource.com/setup_10.x | sudo bash -
# make sure additional requirments are installed
sudo yum -y install \
ansible \
bind-utils \
jq \
libguestfs-tools \
libvirt \
libvirt-devel \
libvirt-daemon-kvm \
nodejs \
podman \
python-ironicclient \
python-ironic-inspector-client \
python-lxml \
python-netaddr \
python-openstackclient \
python-virtualbmc \
qemu-kvm \
virt-install \
unzip \
yarn
# Install python packages not included as rpms
sudo pip install \
lolcat \
yq
if ! which minikube 2>/dev/null ; then
curl -Lo minikube https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64 \
&& chmod +x minikube && sudo mv minikube /usr/local/bin/.
fi
if ! which docker-machine-driver-kvm2 >/dev/null ; then
curl -LO https://storage.googleapis.com/minikube/releases/latest/docker-machine-driver-kvm2 \
&& sudo install docker-machine-driver-kvm2 /usr/local/bin/
fi
if ! which kubectl 2>/dev/null ; then
curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl \
&& chmod +x kubectl && sudo mv kubectl /usr/local/bin/.
fi
|
#include<iostream>
#include<math.h>
using namespace std;
int main()
{
int a,s;
cout<<"enter a decimal no";
int b;
cin>>b;
int c,d,e[100],count=0,i=0;
while(b!=1)
{
c=b%2;
b=b/2;
e[i]=c;
count++;
i++;
}
e[i]=1;
int j;
for(j=count;j>=0;j--)
{
cout<<e[j];
}
}
|
<filename>tests/smoke-testing.ts
import { writeFileSync } from 'fs'
import OvonicMQTT from '../src/ovonic-mqtt'
import { QoS } from 'mqtt'
/**
* emqx 支持链接登陆认证
* emqx_auth_clientid: clinetId 认证
* emqx_auth_username: 用户名、密码认证插件
* emqx_auth_jwt: JWT 认证/访问控制 本示例为 jwt链接认证 私密: secret
*/
const clientId = 'test_client_b'
const options1 = {
connectTimeout: 4000,
// 认证信息
username: 'secret',
password: '<KEY>',
clientId,
keepalive: 60,
clean: true,
will: {
topic: 'MQTT_DISCONNET',
payload: clientId,
qos: 1 as (QoS),
retain: false,
},
}
// const options2 = {
// connectTimeout: 4000,
// // 认证信息
// username: 'secret',
// password: '<KEY>',
// clientId,
// keepalive: 60,
// clean: true,
// }
let tryReconnect = 0
const MQTT_URL = process.env.MQTT_URL || 'mqtt://mqtt.ldmxxz.com:1883'
const serverId = 'server_57290af3acbe1be60db61e20ef54f055'
const test = async () => {
const ovonic1 = new OvonicMQTT()
ovonic1.on('reconnect', async (data) => {
console.log('reconnect', data)
if (tryReconnect++ > 5) {
ovonic1.client.end()
}
})
ovonic1.on('connect', () => {
tryReconnect = 0
})
ovonic1.on('error', (error) => {
console.log('mqtt error', error)
})
ovonic1.on('close', () => {
console.log('mqtt close')
})
ovonic1.on('disconnect', () => {
console.log('mqtt disconnect')
})
ovonic1.on('offline', () => {
console.log('mqtt offline')
})
try {
await ovonic1.connect(MQTT_URL, options1)
ovonic1.on(clientId, async (message) => {
console.log(message)
})
const result = await ovonic1.request(serverId, {
userId: '1',
msgId: 'WXInitialize_Api12',
message: JSON.stringify({ uuid: '2a2612f6-f64c-4848-bf1f-53c535b139a11' }),
apiName: 'WXInitialize',
// responseClient: clientId,
})
console.log('result: ', result)
const qrcode = await ovonic1.request(serverId, {
userId: '1',
msgId: 'WXGetQRCode_Api12',
message: '',
apiName: 'WXGetQRCode',
// responseClient: clientId,
})
const qrData = JSON.parse(qrcode.message)
writeFileSync('./qrcode.png', Buffer.from(qrData.qr_code || '', 'base64'))
console.log('qrcode', JSON.parse(qrcode.message))
const timeId = setInterval(async () => {
const msgId = 'WXCheckQrCode_Api111' + Math.floor(Math.random() * 100)
const checkQrcode = await ovonic1.request(serverId, {
userId: '1',
msgId,
message: '',
apiName: 'WXCheckQRCode',
})
const data = JSON.parse(checkQrcode.message)
switch (data.status) {
case 0: // 等待扫码
break;
case 1: // 已扫码
break;
case 2: // 已授权
clearInterval(timeId)
const res = await ovonic1.request(serverId, {
userId: '1',
msgId: 'WXQRCodeLogin_APi112',
message: JSON.stringify({
user_name: data.user_name,
password: <PASSWORD>,
}),
apiName: 'WXQRCodeLogin',
})
console.log(res)
break;
case -106:
case -2007:
case 3: // 已过期
case 4: // 取消登陆
clearInterval(timeId)
break;
}
}, 1000)
} catch (error) {
console.log('test error', error)
}
}
test()
|
<reponame>kilinmao/sarl_star<filename>navigation/arena_local_planner/learning_based/arena_local_planner_drl/scripts/custom_policy.py<gh_stars>0
import os
from typing import Callable, Dict, List, Optional, Tuple, Type, Union
import gym
import rospkg
import torch as th
import yaml
from torch import nn
from stable_baselines3.common.policies import ActorCriticPolicy
from stable_baselines3.common.torch_layers import BaseFeaturesExtractor
_RS = 2 # robot state size
ROBOT_SETTING_PATH = rospkg.RosPack().get_path('simulator_setup')
yaml_ROBOT_SETTING_PATH = os.path.join(ROBOT_SETTING_PATH, 'robot', 'myrobot.model.yaml')
with open(yaml_ROBOT_SETTING_PATH, 'r') as fd:
robot_data = yaml.safe_load(fd)
for plugin in robot_data['plugins']:
if plugin['type'] == 'Laser':
laser_angle_min = plugin['angle']['min']
laser_angle_max = plugin['angle']['max']
laser_angle_increment = plugin['angle']['increment']
_L = int(round((laser_angle_max - laser_angle_min) / laser_angle_increment) + 1) # num of laser beams
break
class MLP_ARENA2D(nn.Module):
"""
Custom Multilayer Perceptron for policy and value function.
Architecture was taken as reference from: https://github.com/ignc-research/arena2D/tree/master/arena2d-agents.
:param feature_dim: dimension of the features extracted with the features_extractor (e.g. features from a CNN)
:param last_layer_dim_pi: (int) number of units for the last layer of the policy network
:param last_layer_dim_vf: (int) number of units for the last layer of the value network
"""
def __init__(
self,
feature_dim: int,
last_layer_dim_pi: int = 32,
last_layer_dim_vf: int = 32,
):
super(MLP_ARENA2D, self).__init__()
# Save output dimensions, used to create the distributions
self.latent_dim_pi = last_layer_dim_pi
self.latent_dim_vf = last_layer_dim_vf
# Body network
self.body_net = nn.Sequential(
nn.Linear(feature_dim, 64),
nn.ReLU(),
nn.Linear(64, 64),
nn.ReLU()
)
# Policy network
self.policy_net = nn.Sequential(
nn.Linear(64, last_layer_dim_pi),
nn.ReLU()
)
# Value network
self.value_net = nn.Sequential(
nn.Linear(64, last_layer_dim_vf),
nn.ReLU()
)
def forward(self, features: th.Tensor) -> Tuple[th.Tensor, th.Tensor]:
"""
:return: (th.Tensor, th.Tensor) latent_policy, latent_value of the specified network.
If all layers are shared, then ``latent_policy == latent_value``
"""
body_x = self.body_net(features)
return self.policy_net(body_x), self.value_net(body_x)
class MLP_ARENA2D_POLICY(ActorCriticPolicy):
"""
Policy using the custom Multilayer Perceptron.
"""
def __init__(
self,
observation_space: gym.spaces.Space,
action_space: gym.spaces.Space,
lr_schedule: Callable[[float], float],
net_arch: Optional[List[Union[int, Dict[str, List[int]]]]] = None,
activation_fn: Type[nn.Module] = nn.ReLU,
*args,
**kwargs,
):
super(MLP_ARENA2D_POLICY, self).__init__(
observation_space,
action_space,
lr_schedule,
net_arch,
activation_fn,
*args,
**kwargs,
)
# Enable orthogonal initialization
self.ortho_init = True
def _build_mlp_extractor(self) -> None:
self.mlp_extractor = MLP_ARENA2D(self.features_dim)
class DRL_LOCAL_PLANNER(BaseFeaturesExtractor):
"""
Custom Convolutional Neural Network to serve as feature extractor ahead of the policy and value network.
Architecture was taken as reference from: https://arxiv.org/abs/1808.03841
:param observation_space: (gym.Space)
:param features_dim: (int) Number of features extracted.
This corresponds to the number of unit for the last layer.
"""
def __init__(self, observation_space: gym.spaces.Box, features_dim: int = 128):
super(DRL_LOCAL_PLANNER, self).__init__(observation_space, features_dim)
self.cnn = nn.Sequential(
nn.Conv1d(1, 32, 5, 2),
nn.ReLU(),
nn.Conv1d(32, 32, 3, 2),
nn.ReLU(),
nn.Flatten(),
)
# Compute shape by doing one forward pass
with th.no_grad():
# tensor_forward = th.as_tensor(observation_space.sample()[None]).float()
tensor_forward = th.randn(1, 1, _L)
n_flatten = self.cnn(tensor_forward).shape[1]
self.fc_1 = nn.Sequential(
nn.Linear(n_flatten, 256 - _RS),
nn.ReLU(),
)
self.fc_2 = nn.Sequential(
nn.Linear(256, features_dim),
nn.ReLU()
)
def forward(self, observations: th.Tensor) -> th.Tensor:
"""
:return: (th.Tensor),
extracted features by the network
"""
laser_scan = th.unsqueeze(observations[:, :-_RS], 1)
robot_state = observations[:, -_RS:]
extracted_features = self.fc_1(self.cnn(laser_scan))
features = th.cat((extracted_features, robot_state), 1)
return self.fc_2(features)
"""
Global constant to be passed as an argument to the PPO of Stable-Baselines3 in order to build both the policy
and value network.
:constant policy_drl_local_planner: (dict)
"""
policy_kwargs_drl_local_planner = dict(features_extractor_class=DRL_LOCAL_PLANNER,
features_extractor_kwargs=dict(features_dim=128))
class CNN_NAVREP(BaseFeaturesExtractor):
"""
Custom Convolutional Neural Network (Nature CNN) to serve as feature extractor ahead of the policy and value head.
Architecture was taken as reference from: https://github.com/ethz-asl/navrep
:param observation_space: (gym.Space)
:param features_dim: (int) Number of features extracted.
This corresponds to the number of unit for the last layer.
"""
def __init__(self, observation_space: gym.spaces.Box, features_dim: int = 32):
super(CNN_NAVREP, self).__init__(observation_space, features_dim)
self.cnn = nn.Sequential(
nn.Conv1d(1, 32, 8, 4),
nn.ReLU(),
nn.Conv1d(32, 64, 9, 4),
nn.ReLU(),
nn.Conv1d(64, 128, 6, 4),
nn.ReLU(),
nn.Conv1d(128, 256, 4, 4),
nn.ReLU(),
nn.Flatten(),
)
# Compute shape by doing one forward pass
with th.no_grad():
tensor_forward = th.randn(1, 1, _L)
n_flatten = self.cnn(tensor_forward).shape[1]
self.fc = nn.Sequential(
nn.Linear(n_flatten, features_dim - _RS),
)
def forward(self, observations: th.Tensor) -> th.Tensor:
"""
:return: (th.Tensor) features,
extracted features by the network
"""
laser_scan = th.unsqueeze(observations[:, :-_RS], 1)
robot_state = observations[:, -_RS:]
extracted_features = self.fc(self.cnn(laser_scan))
features = th.cat((extracted_features, robot_state), 1)
return features
"""
Global constant to be passed as an argument to the PPO of Stable-Baselines3 in order to build both the policy
and value network.
:constant policy_kwargs_navrep: (dict)
"""
policy_kwargs_navrep = dict(features_extractor_class=CNN_NAVREP,
features_extractor_kwargs=dict(features_dim=32),
net_arch=[dict(vf=[64, 64], pi=[64, 64])], activation_fn=th.nn.ReLU)
|
// See license.txt, BSD
// Copyright 2011 Google, Inc. author: <EMAIL>
/*
* Uglify AST walker
*
*/
define([],function UglyWalker() {
var UglyWalker = {
namingStack: []
};
UglyWalker.getType = function(uglyArray) {
return uglyArray[0] ? uglyArray[0].toString(): null; // if embed_tokens === true, type is NodeWithToken
};
UglyWalker.getStack = function() {
return this.namingStack;
};
UglyWalker.pushParent = function(uglyArray) {
this.namingStack.push(uglyArray);
};
UglyWalker.popParent = function() {
var popped = this.namingStack.pop();
};
// ----------------------------------------------------------------------------------------------
// BranchActions must all return something > 0 (incl true) or the walker will stop.
UglyWalker.branchActions = {};
UglyWalker.branchActions.LITERAL = function(val) {
return true;
};
UglyWalker.branchActions.NODE = function(val) {
// recurse in to children
return this.seekInStatement(val);
};
UglyWalker.branchActions.ARRAY = function(val) {
// recurse into entries
return this.seekInStatements(val);
};
UglyWalker.branchActions.PAIRS = function(statements) {
// recurse into children, pairs are [[name, node]...]
var rc = true;
for (var i = 0; i < statements.length; i++) {
var statement = statements[i];
// ['pair', LITERAL, NODE]
rc = this.seekInStatement(['pair', statement[0], statement[1]]);
if ( rc <= 0 ) {
break;
}
}
return rc;
};
UglyWalker.branchActions.CATCH = function(val) {
// TODO rewrite as ['catch', 'name', 'block']
this.pushParent(this.getType(val));
var rc = this.seekInStatements(val[1]);
this.popParent();
return rc;
};
UglyWalker.branchActions.CASES = function(cases) {
// re-write the node to expand it
var caseStatements = [];
for (var i = 0; i < cases.length; i++) {
var aCase = cases[i];
// ['case', 'condition', 'block']
var caseStatement = ['case', aCase[0], ['block', aCase[1]] ];
caseStatements.push(caseStatement);
if (aCase.length > 2) {
throw new Error("Nonymous a case statement with more than 2 sub nodes", aCase);
}
}
return this.seekInStatements(caseStatements);
};
UglyWalker.branchActions.DECLS = function(declarations) {
// eg: var Baz = Bar = function() {};
var rc = true;
for (var i = 0; i < declarations.length; i++) {
var declaration = declarations[i];
var statement = declaration[1]; // each decl has a name an
if (statement) {
var decl = ['decl'].concat(declaration);
rc = this.seekInStatement(decl);
if (rc <= 0) {
break;
}
} // else declaration with no initializer, eg var foo;
}
return rc;
};
//-----------------------------------------------------------------------------------------------
// We walk the syntax tree from the root downward.
// For each node in the syntax tree we look up the node type (cell 0) in the typeToBranch table
// and then operate on the node's branch data (cell 1,...) based on the entry. Each entry has two
// values for each branch data value: a label for the branch and an action for that branch.
// Blocks, array entries, declaration lists, switch cases, and catch clauses are all special
// cases. The listy special cases are array of arrays (.length but no type).
// This entire approach copies https://github.com/joehewitt/transformjs/lib/transformjs.js
UglyWalker.createTypeToBranch = function(branchActions) {
var LITERAL = branchActions.LITERAL;
var ARRAY = branchActions.ARRAY;
var PAIRS = branchActions.PAIRS;
var NODE = branchActions.NODE;
var DECLS = branchActions.DECLS;
var CATCH = branchActions.CATCH;
var CASES = branchActions.CASES;
return { // BranchName/BranchAction for each Uglify statement type
'num': ['value', LITERAL],
'string': ['value', LITERAL],
'regexp': ['value', LITERAL, 'flags', LITERAL],
'array': ['items', ARRAY],
'object': ['items', PAIRS],
'name': ['name', LITERAL],
'stat': ['expr', NODE],
'block': ['statements', ARRAY],
'var': ['decls', DECLS],
'decl': ['left', LITERAL, 'right', NODE],
'pair': ['left', LITERAL, 'right', NODE],
'assign': ['um', LITERAL, 'left', NODE, 'right', NODE],
'unary-prefix': ['op', LITERAL, 'expr', NODE],
'unary-postfix': ['op', LITERAL, 'expr', NODE],
'binary': ['op', LITERAL, 'left', NODE, 'right', NODE],
'conditional': ['condition', NODE, 'ifBlock', NODE, 'elseBlock', NODE],
'call': ['left', NODE, 'args', ARRAY],
'new': ['expr', NODE, 'args', ARRAY],
'dot': ['left', NODE, 'right', LITERAL],
'sub': ['left', NODE, 'right', NODE],
'defun': ['name', LITERAL, 'args', LITERAL, 'block', ARRAY],
'function': ['name', LITERAL, 'args', LITERAL, 'block', ARRAY],
'return': ['expr', NODE],
'continue': [],
'break': [],
'if': ['condition', NODE, 'ifBlock', NODE, 'elseBlock', NODE],
'for-in': ['iter', NODE, 'left', NODE, 'right', NODE, 'block', NODE],
'for': ['init', NODE, 'condition', NODE, 'increment', NODE, 'block', NODE],
'while': ['condition', NODE, 'block', NODE],
'try': ['try', ARRAY, 'catch', CATCH, 'finally', ARRAY],
'switch': ['expr', NODE, 'cases', CASES],
'label': ['name', LITERAL],
'case': ['condition', NODE, 'block', NODE], // ast is dynamically extended to add this node
'throw': ['expr', NODE],
'toplevel': ['statements', ARRAY]
};
};
UglyWalker.getTypeToBranch = function(statementType) {
var typeToBranch = this.typeToBranch || this.createTypeToBranch(this.branchActions);
return typeToBranch[statementType];
};
//-----------------------------------------------------------------------------------------------
UglyWalker.getBranches = function(statement) {
var statementType = this.getType(statement);
var branches = this.getTypeToBranch(statementType);
if (!branches) {
throw new Error("Nonymous ERROR: no branches for "+statementType+" statement "+statement);
}
return branches;
};
UglyWalker.processStatement = function(statement, namingStack) {
// To be implemented by dependent
return true;
};
// ----------------------------------------------------------------------------------------------
// We work from root to leaf from statements to branches looking for functions
// As we descend we record the path back to the root. When reversed this gives the
// parents of the function node at the time we find it. That way we don't need to
// make a pass over the tree to set the parent pointers and we don't have to store them.
//
UglyWalker.seekInStatement = function(statement) {
var rc = this.processStatement(statement, this.getStack());
if (rc < 0) { // the goal is behind us
return rc;
} // else the goal is within or ahead of us
var branches = this.getBranches(statement);
if (!branches) {
return true;
}
// establish the current parent for the branch processing
this.pushParent(statement);
// iterate the branches and statement parts in tandem
var statementPartIndex = 1; // [0] is the typeName
rc = true;
for (var i = 0; i < branches.length; i += 2)
{
var statementPart = statement[statementPartIndex++];
if (statementPart) {
var branchName = branches[i]; // eg 'condition' for first branch of 'if'
var branchAction = branches[i+1]; // eg NODE for first branch of 'if;'
rc = branchAction.apply(this,[statementPart]);
if (rc <= 0) {
break;
}
} // else null function name or var without initiailizer...
}
// all branches at this level are done.
this.popParent();
return rc;
};
// Blocks, decls, case switch statements
UglyWalker.seekInStatements = function(statements) {
var rc = true;
for (var i = 0; i < statements.length; i++) {
var statement = statements[i];
rc = this.seekInStatement(statement);
if (rc <= 0) {
break;
}
}
return rc;
};
return UglyWalker;
}); // end of define
|
/**
*
*/
export interface IMedMeJsonRpcEnv {
CORE_API_ENDPOINT: string
CRAC_SLOTS_API_ENDPOINT: string
CRAC_API_ENDPOINT: string
CRAC3_API_ENDPOINT: string
OAUTH_OTP_SEND: string
OAUTH_OTP_VERIFY: string
OAUTH_OTP_WEBLOGIN: string
JSONRPC_REQUEST_DEBUG: boolean
OTP_REQUEST_DEBUG: boolean
}
|
<filename>CPP/winsock-codes/tcpip-simple-server.cpp
/***
* I refered the below youtube link.
* https://www.youtube.com/watch?v=WDn-htpBlnU&list=PLZo2FfoMkJeEogzRXEJeTb3xpA2RAzwCZ&index=7
***/
#include <iostream>
#include <ws2tcpip.h>
#include <string>
#pragma comment(lib, "ws2_32.lib")
using namespace std;
int main() {
WSADATA wsaDATA;
WORD ver = MAKEWORD(2,2);
int wsOK = WSAStartup(ver, &wsaDATA);
if(wsOK != 0){
cerr << "Could not initialize winsock" << endl;
return -1;
}
SOCKET listening = socket(AF_INET, SOCK_STREAM, 0);
if(listening == INVALID_SOCKET) {
cerr << "Can not create listening socket" << endl;
return -1;
}
sockaddr_in hint;
hint.sin_family = AF_INET;
hint.sin_port = htons(54000);
hint.sin_addr.S_un.S_addr = INADDR_ANY;
bind(listening, (sockaddr*)&hint, sizeof(hint));
listen(listening, SOMAXCONN);
sockaddr_in client;
int clientSize = sizeof(client);
SOCKET clientSocket = accept(listening, (sockaddr*)&client, &clientSize);
char host[NI_MAXHOST];
char service[NI_MAXSERV];
ZeroMemory(host, NI_MAXHOST);
ZeroMemory(service, NI_MAXSERV);
if(getnameinfo((sockaddr*)&client, sizeof(client), host, NI_MAXHOST, service, NI_MAXSERV, 0) == 0){
cout << host <<" connected on port " << service << endl;
} else {
inet_ntop(AF_INET, &client.sin_addr, host, NI_MAXHOST);
cout << host << " conneceted on port " << ntohs(client.sin_port) << endl;
}
char buf[4096];
while(true) {
ZeroMemory(buf, 4096);
int bytesReceived = recv(clientSocket, buf, 4096, 0);
if (bytesReceived == INVALID_SOCKET) {
cerr << "Error in recv(), Quitting.. " << endl;
return -2;
} else if(bytesReceived == 0) {
cout << "Client Disconnected" << endl;
break;
} else {
cout << string(buf, 0, bytesReceived) << endl;
send(clientSocket, buf, bytesReceived + 1, 0);
}
}
closesocket(clientSocket);
WSACleanup();
return 0;
}
|
import random
random_number = random.randint(0, 5) |
// Setup the view for the list of books
RecyclerView bookList = findViewById(R.id.books_list);
bookList.setLayoutManager(new LinearLayoutManager(this));
// Create the books adapter and connect it to the views
BooksAdapter adapter = new BooksAdapter();
bookList.setAdapter(adapter);
// Get the books from a remote API
getBooksFromAPI(adapter);
// Set an onTextChangedListener to the search bar
EditText searchBar = findViewById(R.id.edit_text_search);
searchBar.addTextChangedListener(new TextWatcher(){
@Override
public void onTextChanged(CharSequence s, int start, int before, int count){
// Pass the text to the adapter to enable filtering
adapter.filter(s.toString());
}
});
// Set an onItemSelectedListener to the spinner
Spinner filterSpinner = findViewById(R.id.spinner_filter);
filterSpinner.setOnItemSelectedListener(new OnItemSelectedListener(){
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id){
// Pass the selection to the adapter to enable filtering
adapter.filterBySelection(position);
}
}); |
<filename>src/colors/animate.js
import React from 'react';
import {Spring} from 'react-motion';
export function animateTo(n, t, color){
const node = document.getElementById(n);
const target = document.getElementById(t);
if(node && target){
const nodeRect = node.getBoundingClientRect();
const targetRect = target.getBoundingClientRect();
const container = document.createElement('div');
document.body.appendChild(container);
React.render(
<Spring
defaultValue={{ val: {
left: nodeRect.left + nodeRect.width / 2,
top: nodeRect.top + nodeRect.height / 2,
opacity: 1
}}}
endValue={{val: {
left: targetRect.left + targetRect.width / 2,
top: targetRect.top + targetRect.height / 2,
opacity: 0
}}}>
{interpolated =>
<div className="color-ball" style={{
backgroundColor: `rgb(${color.r},${color.g},${color.b})`,
left: interpolated.val.left,
top: interpolated.val.top,
opacity: interpolated.val.opacity
}} />
}
</Spring>
, container);
setTimeout(function(){
container.remove();
}, 1000);
}
}
|
def count_words(phrase):
return len(phrase.split())
print(count_words("I love programming")) # prints 3 |
#include<stdio.h>
int main()
{
int arr[] = {1, 2, 3, 4, 5, 6, 7, 8, 1, 2};
int n = sizeof(arr)/sizeof(arr[0]);
// curr_length stores the current length of longest
// increasing subarray
int curr_length = 1;
// best_length stores the best(maximum) length together
// with the index, best_idx, of the last element
int best_length = 1;
int best_idx = 0;
// i represents the current index from which length
// of longest increasing subarray is considered
int i = 0;
while (i < n - 1) {
if (arr[i] < arr[i + 1]) {
curr_length++;
if (curr_length > best_length) {
best_length = curr_length;
best_idx = i + 1;
}
}
else {
i = i + curr_length;
curr_length = 1;
}
i++;
}
// Print the longest increasing subarray
printf("The longest increasing subarray is: \n");
for (int j = best_idx - best_length + 1; j <= best_idx; j++ )
printf("%d ",arr[j]);
return 0;
} |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/cloud/asset/v1p5beta1/asset_service.proto for package 'google.cloud.asset.v1p5beta1'
# Original file comments:
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/cloud/asset/v1p5beta1/asset_service_pb'
module Google
module Cloud
module Asset
module V1p5beta1
module AssetService
# Asset service definition.
class Service
include ::GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.cloud.asset.v1p5beta1.AssetService'
# Lists assets with time and resource types and returns paged results in
# response.
rpc :ListAssets, ::Google::Cloud::Asset::V1p5beta1::ListAssetsRequest, ::Google::Cloud::Asset::V1p5beta1::ListAssetsResponse
end
Stub = Service.rpc_stub_class
end
end
end
end
end
|
<reponame>VasilStoyanov/holiday-extras-tasks
const pipe = (...fns) => obj => fns.reduce((acc, curr) => curr(acc), obj);
const asyncPipe = (...fns) => x => fns.reduce(async (y, f) => f(await y), x);
module.exports = { pipe, asyncPipe };
|
package br.com.controle.financeiro.model.entity;
import java.io.Serializable;
import java.util.UUID;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import org.springframework.data.jpa.domain.AbstractPersistable;
@Entity(name = "client")
public class Client extends AbstractPersistable<UUID> implements Serializable {
private String name;
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.DETACH)
@JoinColumn(name = "id_user")
private UserEntity owner;
public Client() {
super();
}
public Client(final UUID id, final String name, final UserEntity owner) {
super();
this.setId(id);
this.name = name;
this.owner = owner;
}
public String getName() {
return this.name;
}
public void setName(final String name) {
this.name = name;
}
public UserEntity getOwner() {
return owner;
}
public void setOwner(UserEntity owner) {
this.owner = owner;
}
}
|
import requests
# Send request
url = "http://www.example.com"
r = requests.get(url)
# Get response
print(r.text) |
package com.example.admin.bluetoothrwdemo.ui.fragment;
import android.support.v4.app.Fragment;
public abstract class BaseFragment extends Fragment {
public void runOnUiThread(Runnable runnable) {
if (getActivity() == null) {
return;
}
getActivity().runOnUiThread(runnable);
}
}
|
#!/bin/bash
set -o nounset
set -o errexit
shopt -s nullglob
mkdir -p logs/versions
(scutil --get ComputerName || hostname || echo "unknown host") 2> /dev/null | tee logs/versions/computer_name.txt
conjure --version | tee logs/versions/conjure_version.txt
nb_cores=$1
scripts/modelling/gen_conjure_commands.sh
scripts/modelling/gen_models.sh $nb_cores
|
class Row:
def __init__(self):
self._columns = None
def to_dict(self):
if self._columns is None:
raise ValueError("Columns are not defined")
else:
return {c: getattr(self, c) for c in self._columns} |
<filename>GradientDescent/gradient_descent.py
import numpy as np
def magnitude(array):
return((np.sum(array * array))**0.5)
def gradient_descent(gradient_func, x_init, neeta):
while True:
# Computing the gradient using the given function
grad = gradient_func(x_init)
# if the magnitude of the gradient is less than 0.0001
# then return the x_init which gave that particular magnitude
if (magnitude(grad) <= 0.0001):
return(x_init)
# Performing the gradient descent calculation
x_init -= (neeta * grad)
|
#!/bin/env bash
## Record the start time
start=`date +%s`
## Record the host being run on
echo "Hostname: $(eval hostname)"
# If not already on the path
#export PATH=/share/workshop/adv_scrnaseq/$USER/HTStream/bin:$PATH
## Set the parameters for the run
basepath=/share/workshop/adv_scrnaseq/$USER/scrnaseq_processing
resources=${basepath}'/resources'
fastqpath=${basepath}'/00-RawData'
echo $basepath
echo $resources
echo $fastqpath
[[ -d ${basepath}/01-HTStream ]] || mkdir ${basepath}/01-HTStream
for sample in `cat samples.txt`
do
outpath=${basepath}/01-HTStream/${sample}_htstream/${sample}_htstream
[[ -d ${basepath}/01-HTStream/${sample}_htstream ]] || mkdir ${basepath}/01-HTStream/${sample}_htstream
echo "SAMPLE: ${sample}"
echo "OUTPUT: ${outpath}"
call="hts_Stats -L ${outpath}_scRNA.log -N 'compute stats on original dataset' \
-1 ${fastqpath}/${sample}/${sample}_S*_R1_001.fastq.gz \
-2 ${fastqpath}/${sample}/${sample}_S*_R2_001.fastq.gz | \
hts_SeqScreener -A ${outpath}_scRNA.log -N 'screen for PhiX because I always do' \
--check-read-2 | \
hts_Overlapper -A ${outpath}_scRNA.log -N 'overlap reads' | \
extract_BC-UMI.py --extract --read 1 --length 28 | \
hts_PolyATTrim -A ${outpath}_scRNA.log -N 'trim 3 prime plolyA' \
--skip_polyA \
--no-right -x 100 | \
hts_NTrimmer -A ${outpath}_scRNA.log -N 'Remove any N characters' | \
hts_QWindowTrim -A ${outpath}_scRNA.log -N 'Quality window trim' | \
hts_LengthFilter -A ${outpath}_scRNA.log -N 'Removed any read shorter than 50bp' \
-m 50 -s | \
hts_SeqScreener -A ${outpath}_scRNA.log -N 'Screen out any potential adapter dimers' \
--check-read-2 \
-s ${resources}/screen.fa | \
hts_SeqScreener -A ${outpath}_scRNA.log -N 'count the number of rRNA reads'\
-r \
--check-read-2 \
-s ${resources}/mouse_rrna.fasta | \
extract_BC-UMI.py --insert --read 1 | \
hts_Stats -A ${outpath}_scRNA.log -N 'final stats' \
-f ${outpath} -F"
echo $call
eval $call
done
end=`date +%s`
runtime=$((end-start))
echo $runtime
|
<gh_stars>1-10
package client
import (
"fmt"
"net/url"
"strings"
)
// extractHostFromRemoteURL will convert Gitaly-style URL addresses of the form
// scheme://host:port to the "host:port" addresses used by `grpc.Dial`
func extractHostFromRemoteURL(rawAddress string) (hostAndPort string, err error) {
u, err := url.Parse(rawAddress)
if err != nil {
return "", fmt.Errorf("failed to parse remote addresses: %w", err)
}
if u.Path != "" {
return "", fmt.Errorf("remote addresses should not have a path: %q", u.Path)
}
if u.Host == "" {
return "", fmt.Errorf("remote addresses should have a host")
}
return u.Host, nil
}
// extractPathFromSocketURL will convert Gitaly-style URL addresses of the form
// unix:/path/to/socket into file paths: `/path/to/socket`
const unixPrefix = "unix:"
func extractPathFromSocketURL(rawAddress string) (socketPath string, err error) {
if !strings.HasPrefix(rawAddress, unixPrefix) {
return "", fmt.Errorf("invalid socket address: %s", rawAddress)
}
return strings.TrimPrefix(rawAddress, unixPrefix), nil
}
|
<reponame>lburgazzoli/Chronicle-Queue<filename>transport-demo/src/main/java/net/openhft/chronicle/queue/transport/demo/package-info.java
/**
* Created by peter on 16/03/16.
*/
package net.openhft.chronicle.queue.transport.demo; |
package com.donler.gym.service;
import com.donler.gym.expection.AttrValidateException;
import com.donler.gym.expection.AuthValidateException;
import com.donler.gym.expection.TokenExpiredException;
import com.donler.gym.model.Config;
import com.donler.gym.model.Token;
import com.donler.gym.model.User;
import com.donler.gym.repo.TokenRepo;
import com.donler.gym.repo.UserRepo;
import com.donler.gym.util.NullCheckUtils;
import io.jsonwebtoken.Jwts;
import io.jsonwebtoken.SignatureAlgorithm;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Date;
/**
* Created by jason on 4/15/16.
*/
@Service
public class TokenService {
@Autowired
private Config config;
@Autowired
private TokenRepo tokenRepo;
@Autowired
private UserRepo userRepo;
/**
* 生成token
* @param str
* @return
*/
public String encodeToken(String str) {
String afterStr = Jwts.builder().setSubject(str).signWith(SignatureAlgorithm.HS512, config.getAppSecret()).compact();
return afterStr;
}
public User getUserFromCheckingToken(String tokenStr) {
Token token = tokenRepo.findTokenByToken(tokenStr);
if (NullCheckUtils.isNullOrEmpty(token)) {
throw new AuthValidateException("token不能为空");
}
/**
* TODO: 待测试
*/
if (token.getExpiredTime().before(new Date())) {
throw new TokenExpiredException("token已过期,请重新登录");
}
User user = userRepo.findUserById(token.getUserId());
if (NullCheckUtils.isNullOrEmpty(user)) {
throw new AttrValidateException("未找到该用户");
}
return user;
}
}
|
#
# Tests for bash's type flags on cells. Hopefully we don't have to implement
# this, but it's good to know the behavior.
#
# OSH follows a Python-ish model of types carried with values/objects, not
# locations.
#
# See https://github.com/oilshell/oil/issues/26
#### declare -i with +=
declare s
s='1 '
s+=' 2 ' # string append
declare -i i
i='1 '
i+=' 2 ' # arith add
declare -i j
j=x # treated like zero
j+=' 2 ' # arith add
echo "[$s]"
echo $i
echo $j
## STDOUT:
[1 2 ]
3
2
## END
#### declare -i with arithmetic inside strings (Nix, issue 864)
# example
# https://github.com/NixOS/nixpkgs/blob/master/pkgs/stdenv/generic/setup.sh#L379
declare -i s
s='1 + 2'
echo s=$s
declare -a array=(1 2 3)
declare -i item
item='array[1+1]'
echo item=$item
## STDOUT:
s=3
item=3
## END
#### append in arith context
declare s
(( s='1 '))
(( s+=' 2 ')) # arith add
declare -i i
(( i='1 ' ))
(( i+=' 2 ' ))
declare -i j
(( j='x ' )) # treated like zero
(( j+=' 2 ' ))
echo "$s|$i|$j"
## stdout: 3|3|2
#### declare array vs. string: mixing -a +a and () ''
# dynamic parsing of first argument.
declare +a 'xyz1=1'
declare +a 'xyz2=(2 3)'
declare -a 'xyz3=4'
declare -a 'xyz4=(5 6)'
argv.py "${xyz1}" "${xyz2}" "${xyz3[@]}" "${xyz4[@]}"
## stdout: ['1', '(2 3)', '4', '5', '6']
#### declare array vs. associative array
# Hm I don't understand why the array only has one element. I guess because
# index 0 is used twice?
declare -a 'array=([a]=b [c]=d)'
declare -A 'assoc=([a]=b [c]=d)'
argv.py "${#array[@]}" "${!array[@]}" "${array[@]}"
argv.py "${#assoc[@]}" "${!assoc[@]}" "${assoc[@]}"
## stdout-json: "['1', '0', 'd']\n['2', 'a', 'c', 'b', 'd']\n"
|
#!/usr/bin/env bash
# constants of the universe
export TZ='UTC' LC_ALL='C'
umask 0002
scriptsDir="$(dirname "$(readlink -f "$BASH_SOURCE")")"
self="$(basename "$0")"
#getFeatures() {
# featureDir="$thisDir/../features"
# features="$(echo $1 | tr "," "\n")"
#
# if [ "$1" = "full" ]; then
# features=($(ls $featureDir | grep -v '^_' | grep -v '.dpkg-'))
# else
# IFS=', ' read -r -a features <<< "$1"
#
# i=0
# processed=
# exclude=
# while [ i -lt ${#feature[@]} ]; do
# i=$(echo "$features" | head -n1)
#
# [[ " ${array[@]} " =~ " $i " ]] && continue
# processsed+=( $i )
#
# [ ! -d $featureDir/$i] && continue
#
# [ -s $featureDir/$i/feature.include ] && for i in $(cat $featureDir/$i/include); do include+=( $i ); done
# [ -s $featureDir/$i/feature.exclude ] && for i in $(cat $featureDir/$i/exclude); do include+=( $i ); done
# done
#}
options="$(getopt -n "$BASH_SOURCE" -o '+' --long 'flags:,flags-short:,help:,usage:,sample:' -- "$@")"
dFlags='help,version'
dFlagsShort='h?'
dHelp=
dUsage=
__cgetopt() {
eval "set -- $options" # in a function since otherwise "set" will overwrite the parent script's positional args too
unset options
local usagePrefix='usage:'
local samplePrefix=' eg.:'
while true; do
local flag="$1"; shift
case "$flag" in
--flags) dFlags="${dFlags:+$dFlags,}$1"; shift ;;
--flags-short) dFlagsShort="${dFlagsShort}$1"; shift ;;
--help) dHelp+="$1"$'\n'; shift ;;
--usage) dUsage+="$usagePrefix $self${1:+ $1}"$'\n'
usagePrefix=' '
samplePrefix=' eg.:'
shift
;;
--sample) dUsage+="$samplePrefix $self${1:+ $1}"$'\n'
samplePrefix=' '
usagePrefix=$'\n''usage:'
shift
;;
--) break ;;
*) echo >&2 "error: unexpected $BASH_SOURCE flag '$flag'"; exit 1 ;;
esac
done
local dup=$(sort <<< ${dFlags//,/$'\n'} | uniq -d)
[ -n "$dup" ] && { echo "error: duplicate in flags definition \"${dup//$'\n'/\" \"}\""; exit 1; }
dup=$(grep -o . <<< ${dFlagsShort} | sort | uniq -d)
[ -n "$dup" ] && { echo "error: duplicate in flags-short definition \"${dup//$'\n'/\" \"}\""; exit 1; }
return 0
}
__cgetopt
usage() {
echo -n "${dUsage:+$dUsage$'\n'}"
echo "$self: gardenlinux build version $($scriptsDir/garden-version)"
}
xusage() {
echo "$self: gardenlinux build version $($scriptsDir/garden-version)"$'\n'
echo -n "${dUsage:+$dUsage$'\n'}"
echo "$dHelp"
}
eusage() {
if [ "$#" -gt 0 ]; then
echo >&2 "error: $*"
fi
echo >&2
usage >&2
exit 1
}
_dgetopt() {
getopt -n "error" \
-o "+$dFlagsShort" \
--long "$dFlags" \
-- "$@" \
|| eusage
}
dgetopt='options="$(_dgetopt "$@")"; eval "set -- $options"; unset options'
dgetopt-case() {
local flag="$1"; shift
case "$flag" in
-h|'-?'|--help) xusage; exit 0 ;;
--version) echo "version: $($scriptsDir/garden-version)"; exit 0 ;;
esac
}
|
<filename>first-example-application/src/main/java/com/github/robindevilliers/onlinebankingexample/model/StandingOrder.java<gh_stars>10-100
package com.github.robindevilliers.onlinebankingexample.model;
import java.util.Date;
public class StandingOrder {
private Integer id;
private Date dueDate;
private String description;
private String reference;
private StandingOrderPeriod period;
private Integer amount;
private String accountNumber;
private String sortCode;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Date getDueDate() {
return dueDate;
}
public void setDueDate(Date dueDate) {
this.dueDate = dueDate;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getReference() {
return reference;
}
public void setReference(String reference) {
this.reference = reference;
}
public StandingOrderPeriod getPeriod() {
return period;
}
public void setPeriod(StandingOrderPeriod period) {
this.period = period;
}
public Integer getAmount() {
return amount;
}
public void setAmount(Integer amount) {
this.amount = amount;
}
public String getAccountNumber() {
return accountNumber;
}
public void setAccountNumber(String accountNumber) {
this.accountNumber = accountNumber;
}
public String getSortCode() {
return sortCode;
}
public void setSortCode(String sortCode) {
this.sortCode = sortCode;
}
}
|
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
import { Interpreter } from "../interpreter/interpreter";
import { TestInfo } from "./test_runner";
import { ValueOps } from "../interpreter/value";
import { MIRAssembly, PackageConfig } from "../compiler/mir_assembly";
import { MIREmitter } from "../compiler/mir_emitter";
const corelib_test = `
namespace NSTestCoreLibraries;
entrypoint function mathAbs(): Float {
var n: Float = '-2.0'@Float;
return Math::abs(n);
}
entrypoint function mathAcos(): Float {
var x: Float = '0.0'@Float;
return Math::acos(x);
}
entrypoint function mathAsin(): Float {
var x: Float = '1.0'@Float;
return Math::asin(x);
}
entrypoint function mathAtan(): Float {
var x: Float = '3.0'@Float;
return Math::atan(x);
}
entrypoint function mathAtan2(): Float {
var y: Float = '3.0'@Float;
var x: Float = '5.0'@Float;
return Math::atan2(y, x);
}
entrypoint function mathCeil(): Float {
var n: Float = '6.4'@Float;
return Math::ceil(n);
}
entrypoint function mathCos(): Float {
var x: Float = '90.0'@Float;
return Math::cos(x);
}
entrypoint function mathFloor(): Float {
var n: Float = '3.1'@Float;
return Math::floor(n);
}
entrypoint function mathLog(): Float {
var n: Float = '4.0'@Float;
return Math::log(n);
}
entrypoint function mathPow(): Float {
var b: Float = '3.0'@Float;
var e: Float = '3.0'@Float;
return Math::pow(b, e);
}
entrypoint function mathRound(): Float {
var n: Float = '3.4'@Float;
return Math::round(n);
}
entrypoint function mathSin(): Float {
var x: Float = '90.0'@Float;
return Math::sin(x);
}
entrypoint function mathSqrt(): Float {
var x: Float = '36.0'@Float;
return Math::sqrt(x);
}
entrypoint function mathTan(): Float {
var x: Float = '60.0'@Float;
return Math::tan(x);
}
entrypoint function literalFooBarReverse(): String {
var foobar: String = "foobar";
return foobar->reverse();
}
entrypoint function literalUpCase(): String {
var foobar: String = "foobar";
return foobar->upperCase();
}
entrypoint function literalDownCase(): String {
var foobar: String = "FOOBAR";
return foobar->lowerCase();
}
`;
const corelib_tests: TestInfo[] = [
{ name: "mathAbs", input: ["mathAbs"], expected: "2" },
{ name: "mathAcos", input: ["mathAcos"], expected: "1.5707963267948966" },
{ name: "mathAsin", input: ["mathAsin"], expected: "1.5707963267948966" },
{ name: "mathAtan", input: ["mathAtan"], expected: "1.2490457723982544" },
{ name: "mathAtan2", input: ["mathAtan2"], expected: "0.5404195002705842" },
{ name: "mathCeil", input: ["mathCeil"], expected: "7" },
{ name: "mathCos", input: ["mathCos"], expected: "-0.4480736161291702" },
{ name: "mathFloor", input: ["mathFloor"], expected: "3" },
{ name: "mathLog", input: ["mathLog"], expected: "1.3862943611198906" },
{ name: "mathPow", input: ["mathPow"], expected: "27" },
{ name: "mathRound", input: ["mathRound"], expected: "3" },
{ name: "mathSin", input: ["mathSin"], expected: "0.8939966636005579" },
{ name: "mathSqrt", input: ["mathSqrt"], expected: "6" },
{ name: "mathTan", input: ["mathTan"], expected: "0.320040389379563" },
{ name: "stringReverse", input: ["literalFooBarReverse"], expected: "\"raboof\"" },
{ name: "stringUpCase", input: ["literalUpCase"], expected: "\"FOOBAR\"" },
{ name: "stringDownCase", input: ["literalDownCase"], expected: "\"foobar\"" }
];
function corelib_setup(core: { relativePath: string, contents: string }[]): { masm: MIRAssembly | undefined, errors: string[] } {
const files = core.concat([{ relativePath: "corelib_tests.bsq", contents: corelib_test }]);
return MIREmitter.generateMASM(new PackageConfig(), files);
}
function corelib_action(assembly: MIRAssembly, args: any[]): any {
let ip = new Interpreter(assembly, true, true, true);
return ValueOps.diagnosticPrintValue(ip.evaluateRootNamespaceCall("NSTestCoreLibraries", args[0], []));
}
const testCoreLibs = { name: "CoreLibs", setup: corelib_setup, action: corelib_action, tests: corelib_tests, xmlid: "CoreLibUnitTests" };
const collectionlib_test = `
namespace NSTestCollections;
entrypoint function findLastMatchingElementInList(): { f: Int, b: Int } {
return List[{ f: Int, b: Int }]@{ @{ f = 1, b = 2 }, @{ f = 2, b = 3 }, @{ f = 2, b = 4 } }->findLast(fn(x) => x.f == 2);
}
entrypoint function tryFindLastMatchingElementInList1(): { f: Int, b: Int } | None {
return List[{ f: Int, b: Int }]@{ @{ f = 1, b = 2 }, @{ f = 2, b = 3 }, @{ f = 2, b = 4 } }->tryFindLast(fn(x) => x.f == 3);
}
entrypoint function tryFindLastMatchingElementInList2(): { f: Int, b: Int } | None {
return List[{ f: Int, b: Int }]@{ @{ f = 1, b = 2 }, @{ f = 2, b = 3 }, @{ f = 2, b = 4 } }->tryFindLast(fn(x) => x.f == 2);
}
entrypoint function fillList(): List[Int] {
var list: List[Int] = List[Int]@{1,2,3,4,5};
return list->fill(1);
}
`;
const collectionlib_tests: TestInfo[] = [
{ name: "findLastMatchingElementInList", input: ["findLastMatchingElementInList"], expected: "@{ b=4, f=2 }" },
{ name: "tryFindLastMatchingElementInList1", input: ["tryFindLastMatchingElementInList1"], expected: "none" },
{ name: "tryFindLastMatchingElementInList2", input: ["tryFindLastMatchingElementInList2"], expected: "@{ b=4, f=2 }" },
{ name: "fillList", input: ["fillList"], expected: "NSCore::List[T=NSCore::Int]@{ 1, 1, 1, 1, 1 }" }
];
function collectionlib_setup(core: { relativePath: string, contents: string }[]): { masm: MIRAssembly | undefined, errors: string[] } {
const files = core.concat([{ relativePath: "collectionlib_tests.bsq", contents: collectionlib_test }]);
return MIREmitter.generateMASM(new PackageConfig(), files);
}
function collectionlib_action(assembly: MIRAssembly, args: any[]): any {
let ip = new Interpreter(assembly, true, true, true);
return ValueOps.diagnosticPrintValue(ip.evaluateRootNamespaceCall("NSTestCollections", args[0], []));
}
const testCollectionLibs = { name: "CollectionLibs", setup: collectionlib_setup, action: collectionlib_action, tests: collectionlib_tests, xmlid: "CollectionLibUnitTests" };
export { testCoreLibs, testCollectionLibs };
|
#!/bin/sh
#go build --tags "icu json1 fts5 secure_delete" -ldflags='-s -w' -o ~/Public/gnote-linux-amd64
ARCH=$(uname -m)
docker run --rm -v $(pwd):/work --entrypoint /usr/local/go/bin/go --workdir /work golang-ubuntu1804-build:latest build --tags "json1 fts5 secure_delete" -ldflags='-s -w' -o gnote-ubuntu1804-${ARCH}
mv gnote-ubuntu1804-${ARCH} ~/Public/
#go build --tags "json1 fts5 secure_delete" -ldflags='-s -w' -o ~/Public/gnote-linux-amd64
cd ~/Public
echo "Quit current gnote to allow update. Build windows version and manually sync to Public. Then hit enter"
read _junk
cp -a gnote-ubuntu1804-${ARCH} ~/gnote/gnote
for f in gnote-bundle-windows-amd64.7z gnote-ubuntu1804-${ARCH}; do
aws s3 cp $f s3://xvt-public-repo/pub/devops/ --profile xvt_aws
done
|
const config = {
botName: '20TIN_BOT',
ownerName: '20TIN',
}
|
<reponame>kevanantha/radix<gh_stars>10-100
export { wrapPageElement } from './src/wrapPageElement';
|
class OverlayView {
private _codeTextArea: HTMLTextAreaElement;
private readonly codeTemplate =
'<canvas id="wheel-of-fortune"></canvas>\n'+
'<button id="wheel-of-fortune--spin">Spin</button>\n'+
'<output id="wheel-of-fortune--output"></output>\n'+
'<script>\n'+
'\tvar wheelOfFortune = new WheelOfFortune(document.getElementById(\'wheel-of-fortune\'), {{sections}}, {{config}});\n'+
'\twheelOfFortune.setWinCallback(function(section) {\n'+
'\t\tdocument.getElementById(\'wheel-of-fortune--output\').value = section.text;\n' +
'\t});\n'+
'\tdocument.getElementById(\'wheel-of-fortune--spin\').addEventListener(\'click\', function() {wheelOfFortune.spin()});\n'+
'</script>\n';
constructor(sections: SectionData[], config: WheelOfFortuneConfig) {
const overlayContainer = document.createElement('div');
overlayContainer.addEventListener('click', (e) => {
if(e.target === overlayContainer) {
document.body.removeChild(overlayContainer);
}
});
overlayContainer.classList.add('overlay--wrapper');
const overlay = document.createElement('div');
overlay.classList.add('container-fluid', 'overlay');
this._codeTextArea = document.createElement('textarea');
this._codeTextArea.classList.add('form-control');
this._codeTextArea.readOnly = true;
overlay.appendChild(this._codeTextArea);
overlayContainer.appendChild(overlay);
document.body.appendChild(overlayContainer);
this.updateCode(sections, config);
}
public updateCode(sections: SectionData[], config: WheelOfFortuneConfig) {
this._codeTextArea.value = this.codeTemplate
.replace('{{sections}}', JSON.stringify(sections))
.replace('{{config}}', JSON.stringify(config));
}
}
|
#!/usr/bin/env bash
# -----------------------------------------------------------------------------
# This file is part of the xPack distribution.
# (https://xpack.github.io)
# Copyright (c) 2020 Liviu Ionescu.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose is hereby granted, under the terms of the MIT license.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Safety settings (see https://gist.github.com/ilg-ul/383869cbb01f61a51c4d).
if [[ ! -z ${DEBUG} ]]
then
set ${DEBUG} # Activate the expand mode if DEBUG is anything but empty.
else
DEBUG=""
fi
set -o errexit # Exit if command failed.
set -o pipefail # Exit if pipe failed.
set -o nounset # Exit if variable not set.
# Remove the initial space and instead use '\n'.
IFS=$'\n\t'
# -----------------------------------------------------------------------------
# Identify the script location, to reach, for example, the helper scripts.
script_path="$0"
if [[ "${script_path}" != /* ]]
then
# Make relative path absolute.
script_path="$(pwd)/$0"
fi
script_name="$(basename "${script_path}")"
script_folder_path="$(dirname "${script_path}")"
script_folder_name="$(basename "${script_folder_path}")"
# =============================================================================
scripts_folder_path="$(dirname $(dirname $(dirname "${script_folder_path}")))/scripts"
helper_folder_path="${scripts_folder_path}/helper"
# -----------------------------------------------------------------------------
source "${scripts_folder_path}/defs-source.sh"
# Helper functions
source "${helper_folder_path}/common-functions-source.sh"
source "${helper_folder_path}/common-apps-functions-source.sh"
source "${helper_folder_path}/test-functions-source.sh"
# Reuse the test functions defined in the build scripts.
source "${scripts_folder_path}/common-apps-functions-source.sh"
# Common native & docker functions (like run_tests()).
source "${scripts_folder_path}/tests/common-functions-source.sh"
# -----------------------------------------------------------------------------
if [ $# -lt 1 ]
then
echo "usage: $(basename $0) [--32] [--version X.Y.Z] --base-url URL"
exit 1
fi
force_32_bit=""
image_name=""
RELEASE_VERSION="${RELEASE_VERSION:-$(get_current_version)}"
BASE_URL="${BASE_URL:-release}"
while [ $# -gt 0 ]
do
case "$1" in
--32)
force_32_bit="y"
shift
;;
--image)
image_name="$2"
shift 2
;;
--version)
RELEASE_VERSION="$2"
shift 2
;;
--base-url)
BASE_URL="$2"
shift 2
;;
--*)
echo "Unsupported option $1."
exit 1
;;
*)
echo "Unsupported arg $1."
exit 1
;;
esac
done
echo "BASE_URL=${BASE_URL}"
# -----------------------------------------------------------------------------
if [ -f "/.dockerenv" ]
then
if [ -n "${image_name}" ]
then
# When running in a Docker container, update it.
update_image "${image_name}"
else
echo "No image defined, quit."
exit 1
fi
else
# Not inside a Docker; perhaps a GitHub Actions VM.
if [ "${GITHUB_ACTIONS:-""}" == "true" -a "${RUNNER_OS:-""}" == "Linux" ]
then
# Currently "ubuntu20".
update_image "github-actions-${ImageOS}"
fi
fi
# -----------------------------------------------------------------------------
detect_architecture
prepare_env "$(dirname "${scripts_folder_path}")"
if [ "${BASE_URL}" == "release" ]
then
BASE_URL=https://github.com/xpack-dev-tools/${APP_LC_NAME}-xpack/releases/download/${RELEASE_VERSION}/
fi
install_archive
run_tests
good_bye
# Completed successfully.
exit 0
# -----------------------------------------------------------------------------
|
<reponame>kporten/template-react
import '@cypress/code-coverage/support';
import '@testing-library/cypress/add-commands';
Cypress.on('window:before:load', (win) => {
Object.defineProperty(win.navigator, 'language', { value: 'en' });
Object.defineProperty(win.navigator, 'languages', { value: ['en'] });
});
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Configure built-in history.
__set_at_least() {
# Ensure that the environment variable named in $1 is greater than or equal
# to $2.
eval "[[ $2 -gt \${$1:-0} ]] && $1=$2"
}
HISTCONTROL=ignoredups
__set_at_least HISTSIZE $((10 * 1000))
__set_at_least HISTFILESIZE $((100 * 1000))
shopt -s histappend
unset -f __set_at_least
# Public function to temporarily enable/disable history.
histctl() {
# Remove the histctl call itself from history.
if shopt -oq history; then
history -d -1
fi
case "$*" in
on)
shopt -so history
;;
off)
shopt -uo history
;;
*)
printf '%s\n' 'Usage: histctl on|off' >&2
return 1
;;
esac
}
# Configure custom history logs.
__history_prompt_index=-1
__history_command=
__history_fill_common_args() {
args+=(
"pid=${$}"
"prompt_index=${__history_prompt_index}"
"shell=${BASH:-bash}"
"pwd=${PWD}"
"jobs=$(jobs -l)"
"command=${__history_command}"
)
}
__history_preecmd() {
if [[ -n "$__history_command" ]]; then
local args
__history_fill_common_args
args+=("stage=command_end")
if [[ -n "$BPE_LAST_COMMAND_DID_BG" ]]; then
args+=("command_pid=${!}")
else
args+=("command_retvals=${BP_PIPESTATUS[*]}")
fi
shell-history append "${args[@]}"
__history_command=
fi
let ++__history_prompt_index
}
precmd_functions+=(__history_preecmd)
__history_preexec() {
shopt -oq history || return 0
local histctl_regex='^[[:space:]]*histctl([[:space:]]|$)'
[[ "$1" =~ $histctl_regex ]] && return 0
__history_command="$1"
local args
__history_fill_common_args
args+=("stage=command_begin")
shell-history append "${args[@]}"
return 0 # Don't prevent the command from running when history saving fails.
}
preexec_functions+=(__history_preexec)
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if readlink -f "$0" > /dev/null 2>&1
then
SERVER_BIN=`readlink -f "$0"`
else
SERVER_BIN="$0"
fi
BIN_HOME=`dirname ${SERVER_BIN}`
JAFKA_HOME=`dirname ${BIN_HOME}`
export JAFKA_HOME
. ${BIN_HOME}/env.sh
java io.jafka.console.GetOffsetShell $*
|
/*
* dynamic_conf.c
*
* Created on: Dec 27, 2017
* by: <NAME>
*/
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include <errno.h>
#include <string.h>
#include "dynamic_conf.h"
#include "../../configure.h"
#include "../../context.h"
#include "../../configure_override.h"
#include "../../lib/memory.h"
#include "mmt_bus.h"
#include "server.h"
static int _receive_message( const char *message, size_t message_size, void *user_data ){
const command_t *cmd = (command_t *) message;
pid_t *pid = (pid_t *) user_data;
int i;
size_t size = conf_get_identities( NULL );
command_param_t params[ size ];
probe_conf_t *config;
bool is_need_to_restart;
ASSERT( pid != NULL, "Must not be NULL" );
switch( cmd->id ){
case DYN_CONF_CMD_START:
//it is running
if( *pid > 0 ){
return DYN_CONF_CMD_REPLY_CHILD_RUNNING;
}else{
*pid = 0; //once the value is changed to 0, the main process will (re)create the processing process
return DYN_CONF_CMD_REPLY_OK;
}
break;
case DYN_CONF_CMD_STOP:
if( *pid <= 0 )
return DYN_CONF_CMD_REPLY_CHILD_STOPPING;
else{
//send a Ctrl+C signal to the processing process
kill( *pid, SIGINT );
return DYN_CONF_CMD_REPLY_OK;
}
break;
//The main process must always take into account the modifications of these parameters.
// these modifications will be transfered to its child when restarting
case DYN_CONF_CMD_UPDATE:
//As the message was validated and parsed summarily by server.c, we do not need to check it again
size = parse_command_parameters( cmd->parameter, cmd->parameter_length, params, size );
config = get_context()->config;
//do we need to restart the processing process to take into account the parameters ???
is_need_to_restart = false;
for( i=0; i<size; i++ ){
//update the config in the main process.
//this update will be transfered to its children once they are created (by using fork)
conf_override_element_by_id( config, params[i].ident, params[i].data );
//do we need to restart the processing process to take into account this parameter??
if( dynamic_conf_need_to_restart_to_update( params[i].ident ) )
is_need_to_restart = true;
}
//the child processing process is running
//if need to restart the child to take into account the parameters
if( *pid > 0 && is_need_to_restart ){
//tell it to restart
kill( *pid, SIGRES );
//take in charge the command
return DYN_CONF_CMD_REPLY_OK;
}
//the processing process is not running => this process will take in charge the command
if( *pid < 0 ){
//take in charge the command
return DYN_CONF_CMD_REPLY_OK;
}
break;
default: //must not happen
log_write( LOG_ERR, "Command is not supported: %d", cmd->id );
}
return DYN_CONF_CMD_REPLY_DO_NOTHING;
}
bool dynamic_conf_alloc_and_init( pid_t *processing_pid ){
bool ret = mmt_bus_create();
mmt_bus_subscribe( _receive_message, processing_pid );
return ret;
}
void dynamic_conf_release(){
mmt_bus_release();
}
void dynamic_conf_check(){
mmt_bus_subcriber_check();
}
pid_t dynamcic_conf_create_new_process_to_receive_command( const char * unix_socket_domain_descriptor_name, void (*clean_resource)() ){
//duplicate the current process into 2 different processes
pid_t child_pid = fork();
if( child_pid < 0 ) {
ABORT( "Fork error: %s", strerror(errno) );
return EXIT_FAILURE;
}
if (child_pid == 0) {
//we are in child process
log_write( LOG_INFO, "Create a new sub-process %d for dynamic configuration server", getpid() );
dynamic_conf_server_start_processing( unix_socket_domain_descriptor_name );
//clean resource
clean_resource();
return EXIT_SUCCESS;
}
//in parent process
return child_pid;
}
|
<reponame>smagill/opensphere-desktop
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.2-147
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.01.26 at 02:04:22 PM MST
//
package net.opengis.gml._311;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
* Position error estimate (or accuracy) data.
*
* <p>Java class for AbstractPositionalAccuracyType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="AbstractPositionalAccuracyType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://www.opengis.net/gml}measureDescription" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "AbstractPositionalAccuracyType", propOrder = {
"measureDescription"
})
@XmlSeeAlso({
RelativeInternalPositionalAccuracyType.class,
CovarianceMatrixType.class,
AbsoluteExternalPositionalAccuracyType.class
})
public abstract class AbstractPositionalAccuracyType {
protected CodeType measureDescription;
/**
* Gets the value of the measureDescription property.
*
* @return
* possible object is
* {@link CodeType }
*
*/
public CodeType getMeasureDescription() {
return measureDescription;
}
/**
* Sets the value of the measureDescription property.
*
* @param value
* allowed object is
* {@link CodeType }
*
*/
public void setMeasureDescription(CodeType value) {
this.measureDescription = value;
}
}
|
ALTER TABLE users
ADD email VARCHAR(255) NOT NULL AFTER name; |
import React, { Component } from 'react';
import Card from '@material-ui/core/Card';
import Typography from '@material-ui/core/Typography';
import Grid from '@material-ui/core/Grid';
class NoteList extends Component {
state = {
notes: [
{ name: 'Note 1' },
{ name: 'Note 2' },
{ name: 'Note 3' },
// ... additional notes
]
};
render() {
return (
<div>
<Grid container spacing={3}>
{this.state.notes.map((note, index) => {
return (
<Grid item key={index}>
<Card style={{ height: 200 }} className="selectable-project">
<Typography variant="button" color="secondary" noWrap gutterBottom>{note.name}</Typography>
</Card>
</Grid>
);
})}
</Grid>
</div>
);
}
}
export default NoteList; |
<reponame>idcodeoverflow/TravelApplication<filename>cblibrary/src/main/java/cbedoy/cblibrary/services/RestService.java
package cbedoy.cblibrary.services;
import android.os.AsyncTask;
import android.os.Build;
import android.util.Log;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONTokener;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import cbedoy.cblibrary.business.InjectionManager;
import cbedoy.cblibrary.interfaces.IRestService;
import cbedoy.cblibrary.utils.Utils;
/**
* Created by <NAME> on 28/12/2014.
*
* Mobile App Developer
* CBLibrary
*
* E-mail: <EMAIL>
* Facebook: https://www.facebook.com/carlos.bedoy
* Github: https://github.com/cbedoy
*/
public class RestService implements IRestService {
private int mPort;
private String mUrl;
@Override
public void setURL(String url) {
mUrl = url;
}
@Override
public void setPort(int port) {
mPort = port;
}
@Override
@SuppressWarnings("unchecked")
public void request(String url, HashMap<String, Object> parameters, IRestCallback callback) {
HashMap<String, Object> request = new HashMap<String, Object>();
request.put("url", url);
request.put("callback", callback);
request.put("parameters", parameters);
AsyncCall call = new AsyncCall();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
call.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, request);
} else {
call.execute(request);
}
}
private class AsyncCall extends AsyncTask<HashMap<String, Object>, Void, HashMap<String, Object>> {
@Override
@SuppressWarnings("unchecked")
protected HashMap<String, Object> doInBackground(HashMap<String, Object>... params) {
HashMap<String, Object> request = params[0];
String url = request.get("url").toString();
HashMap<String, Object> parameters = (HashMap<String, Object>) request.get("parameters");
if (parameters == null) {
parameters = new HashMap<String, Object>();
}
HttpResponse httpResponse;
HttpUriRequest httpUriRequest;
HttpClient defaultHttpClient = new DefaultHttpClient();
HashMap<String, Object> response;
try {
if (InjectionManager.getInstance().isProduction())
{
ArrayList<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>();
for (Map.Entry<String, Object> entry : parameters.entrySet())
{
NameValuePair pair = new BasicNameValuePair(entry.getKey(), entry.getValue().toString());
nameValuePairs.add(pair);
}
httpUriRequest = new HttpPost(mUrl + mPort + url);
UrlEncodedFormEntity body = new UrlEncodedFormEntity(nameValuePairs);
((HttpPost) httpUriRequest).setEntity(body);
}
else
{
String query = Utils.mapToUrlDjangoString(url, parameters);
httpUriRequest = new HttpGet(mUrl +":"+ mPort + (query.length() > 0 ? ("" + query) : ""));
Log.e("Request", mUrl +":"+ mPort + (query.length() > 0 ? ("" + query) : ""));
}
httpResponse = defaultHttpClient.execute(httpUriRequest);
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent(), "UTF-8"));
StringBuilder builder = new StringBuilder();
for (String line; (line = bufferedReader.readLine()) != null; )
{
builder.append(line);
}
JSONTokener jsonTokener = new JSONTokener(builder.toString());
JSONArray jsonArray = new JSONArray(jsonTokener);
if(jsonArray.length()>1)
{
HashMap<String, Object> objects = new HashMap<String, Object>();
for(int i=0; i<jsonArray.length(); i++)
{
objects.put("row_"+(i+1), Utils.toMap(jsonArray.getJSONObject(i)));
}
response = objects;
}
else
{
response = (HashMap<String, Object>) Utils.toMap(jsonArray.getJSONObject(0));
}
} catch (UnsupportedEncodingException uee) {
response = new HashMap<String, Object>();
response.put("status", false);
response.put("error", "char_encoding");
response.put("message", "Character Conversion Unavailable");
} catch (ClientProtocolException cpe) {
response = new HashMap<String, Object>();
response.put("status", false);
response.put("error", "http_protocol");
response.put("message", "HTTP Error Protocol");
} catch (IOException ioe) {
response = new HashMap<String, Object>();
response.put("status", false);
response.put("error", "io_exception");
response.put("message", "Connection Un Available");
} catch (JSONException jsone) {
response = new HashMap<String, Object>();
response.put("status", false);
response.put("error", "json_exception");
response.put("message", "Incorrect JSON Format");
}
HashMap<String, Object> result = new HashMap<String, Object>();
result.put("callback", request.get("callback"));
result.put("response", response);
return result;
}
@Override
@SuppressWarnings("unchecked")
protected void onPostExecute(HashMap<String, Object> result) {
IRestCallback callback = (IRestCallback) result.get("callback");
HashMap<String, Object> response = (HashMap<String, Object>) result.get("response");
callback.run(response);
super.onPostExecute(result);
}
}
}
|
./dehydrated -c -t dns-01 -k 'hooks/letsencrypt-cloudflare-hook/hook.py'
|
int test2(){ return 2; }
|
import os
import OpenSSL.crypto
def validate_certificate(cert_file: str) -> bool:
# Verify if the file exists
if not os.path.exists(cert_file):
return False
# Check if the file is in PEM format
try:
with open(cert_file, 'rb') as f:
cert_data = f.read()
OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_data)
except (OpenSSL.crypto.Error, FileNotFoundError):
return False
# Validate the certificate contents to ensure it is a valid X.509 certificate
try:
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_data)
if cert.get_subject().CN:
return True
else:
return False
except OpenSSL.crypto.Error:
return False |
export {
reactiveEqual as eq$$,
reactiveNotEqual as neq$$,
reactiveGreaterThan as gt$$,
reactiveGreaterThanOrEqual as gte$$,
reactiveLowerThan as lt$$,
reactiveLowerThanOrEqual as lte$$,
} from '@lifaon/rx-js-light';
|
<filename>api/modules/middleware/has-session.ts
import type { NextFunction, Request, Response } from 'express';
import AppError from '../../util/app-error';
import getDeviceID from '../../util/device-id';
import UserService from '../user/service';
/**
* Validates whether a user is authenticated or not (via sessions).
* Also performs a check as to whether the user's session ID exists in Redis.
*
* @param req - Express.js's request object.
* @param _ - Express.js's response object.
* @param next - Express.js's next function.
*/
const hasSession = async (req: Request, _: Response, next: NextFunction) => {
const { userID } = req.session;
// Validates whether the session exists or not.
if (!userID) {
next(new AppError('You are not logged in yet! Please log in first!', 401));
return;
}
// Check in an unlikely scenario: a user has already deleted his account but their session is still active.
const user = await UserService.getUser({ userID });
if (!user) {
next(new AppError('User belonging to this session does not exist.', 400));
return;
}
// Verifies if the user is not banned (isActive is true).
if (!user.isActive) {
next(new AppError('User is not active. Please contact the admin.', 403));
return;
}
// Refresh session data to contain the new session information.
req.session.lastActive = Date.now().toString();
req.session.sessionInfo = getDeviceID(req);
// Go to the next middleware.
next();
};
export default hasSession;
|
#! /bin/sh
echo "Make sure you have connected device with usb, you can exec <adb devices> to do it."
port_running=`adb logcat -d | grep 'AndroidGodEye monitor is running at port' | tail -1 | cut -d '[' -f2|cut -d ']' -f1`
if [[ ! -n "$port_running" ]] ;then
echo "Can not find which port AndroidGodEye monitor is running at."
port_running=5390
else
echo "AndroidGodEye monitor is running at ${port_running}"
fi
read -p "Input monitor port, press 'Enter' for default ${port_running}: " MONITOR_PORT
if [[ ! -n "$MONITOR_PORT" ]] ;then
MONITOR_PORT=${port_running}
fi
echo "Use port $MONITOR_PORT"
adb forward tcp:${MONITOR_PORT} tcp:${MONITOR_PORT} && open "http://localhost:$MONITOR_PORT/index.html" && echo "If it is always loading, try close and open the browser tab again." && read -p "Press any key to continue..." |
package com.shareyi.molicode.common.utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
/**
* 系统命令调用工具
*/
public class SystemInvoker {
public static void executeWithOutReturn(String command) {
if (Profiles.instance.isHeadLess()) {
return;
}
try {
Runtime.getRuntime().exec(command);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 执行并返回结果,如果脚本一直不返回,可能会出现问题
*
* @param command
* @return
* @throws IOException
*/
public static String executeWithReturn(String command) throws IOException {
if (Profiles.instance.isHeadLess()) {
return "headless下,不允许执行本地命令!";
}
StringBuilder output = new StringBuilder();
Runtime run = Runtime.getRuntime();
Process process = run.exec(command);
BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream()));
while (true) {
String s = br.readLine();
if (s == null) break;
output.append(s + "\n");
}
return output.toString();
}
}
|
package io.opensphere.core.units.angle;
import org.junit.Test;
import org.junit.Assert;
/**
* Test for {@link DecimalDegrees}.
*/
public class DecimalDegreesTest
{
/**
* Test {@link DecimalDegrees#clone()}.
*/
@Test
public void testClone()
{
Coordinates ang = new DecimalDegrees(34.54512);
Assert.assertEquals(ang.getMagnitude(), ang.clone().getMagnitude(), 0.);
}
/**
* Test {@link Coordinates#compareTo(Coordinates)}.
*/
@Test
public void testCompareTo()
{
Coordinates ang = new DecimalDegrees(34.54512);
Assert.assertEquals(0, ang.compareTo(new DecimalDegrees(34.54512)));
Assert.assertTrue(ang.compareTo(new DecimalDegrees(34.54513)) < 0);
Assert.assertTrue(ang.compareTo(new DecimalDegrees(34.54511)) > 0);
}
/**
* Test {@link #equals(Object)}.
*/
public void testEquals()
{
Assert.assertTrue(new DecimalDegrees(34.54512).equals(new DecimalDegrees(34.54512)));
Assert.assertFalse(new DecimalDegrees(34.54512).equals(new DecimalDegrees(34.545125)));
}
/**
* Test {@link #hashCode()}.
*/
@Test
public void testHashCode()
{
Coordinates ang = new DecimalDegrees(34.54512);
Assert.assertEquals(ang.hashCode(), new DecimalDegrees(34.54512).hashCode());
Assert.assertFalse(ang.hashCode() == new DecimalDegrees(34.545125).hashCode());
}
/**
* Test the label and toString methods.
*/
@Test
public void testLabels()
{
Coordinates ang = new DecimalDegrees(34.54512);
Assert.assertTrue(ang.getLongLabel().length() > 0);
Assert.assertTrue(ang.getShortLabel().length() > 0);
Assert.assertTrue(ang.toString().length() > 0);
}
/**
* Test {@link Coordinates#toShortLabelString()}.
*/
@Test
public void testToShortLabelString()
{
Assert.assertEquals(" 34.545120\u00B0", new DecimalDegrees(34.54512).toShortLabelString());
Assert.assertEquals(" 134.545120\u00B0", new DecimalDegrees(134.54512).toShortLabelString());
Assert.assertEquals(" 134.545125\u00B0", new DecimalDegrees(134.545125).toShortLabelString());
Assert.assertEquals(" 134.545126\u00B0", new DecimalDegrees(134.5451255).toShortLabelString());
Assert.assertEquals("-34.545120\u00B0", new DecimalDegrees(-34.54512).toShortLabelString());
Assert.assertEquals("-134.545120\u00B0", new DecimalDegrees(-134.54512).toShortLabelString());
Assert.assertEquals(" -0.000000\u00B0", new DecimalDegrees(-0.0000004).toShortLabelString());
}
/**
* Test {@link Coordinates#toShortLabelString(char, char)}.
*/
@Test
public void testToShortLabelStringCharChar()
{
Assert.assertEquals(" 34.545120\u00B0N", new DecimalDegrees(34.54512).toShortLabelString('N', 'S'));
Assert.assertEquals(" 134.545120\u00B0N", new DecimalDegrees(134.54512).toShortLabelString('N', 'S'));
Assert.assertEquals(" 134.545125\u00B0N", new DecimalDegrees(134.545125).toShortLabelString('N', 'S'));
Assert.assertEquals(" 134.545126\u00B0N", new DecimalDegrees(134.5451255).toShortLabelString('N', 'S'));
Assert.assertEquals(" 34.545120\u00B0S", new DecimalDegrees(-34.54512).toShortLabelString('N', 'S'));
Assert.assertEquals(" 134.545120\u00B0S", new DecimalDegrees(-134.54512).toShortLabelString('N', 'S'));
Assert.assertEquals(" 0.000000\u00B0S", new DecimalDegrees(-0.).toShortLabelString('N', 'S'));
}
}
|
#
# Name of File : DVD_auto_copy.sh
# DVD Copy shell script
#
clear
echo "Knowledge is Power"
eject -t cdrom
sleep 30
rsync -avzt --progress /media/cdrom/ /<path to Save>/DVD$1/
notify-send "Video Backup"
echo "DVD Compleated copy"
eject cdrom
exit 0
|
<filename>app/src/main/java/com/codernauti/sweetie/BaseActivity.java
package com.codernauti.sweetie;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.widget.Toast;
import com.google.android.gms.auth.api.Auth;
import com.google.android.gms.auth.api.signin.GoogleSignInOptions;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationServices;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.codernauti.sweetie.chat.MessagesMonitorService;
import com.codernauti.sweetie.couple.CoupleActivity;
import com.codernauti.sweetie.registration.RegisterActivity;
import com.codernauti.sweetie.utils.SharedPrefKeys;
import com.codernauti.sweetie.utils.Utility;
/**
* BaseActivity class for every activity of app, her responsibilities are:
* - Manage the login status
* - Manage the user relationship status
* if app in foreground onSharedPreferenceChange() is trigger
* if app in background onStart() check if couple_uid changed
*/
public class BaseActivity extends AppCompatActivity implements
GoogleApiClient.OnConnectionFailedListener,
SharedPreferences.OnSharedPreferenceChangeListener, GoogleApiClient.ConnectionCallbacks {
private static final String BASE_TAG = "BaseActivity";
// utils fields for the correct working of all activity
protected String mUserUid;
protected String mCoupleUid;
protected String mPartnerUid;
private GoogleApiClient mGoogleApiClient;
private FirebaseAuth mFirebaseAuth;
private FirebaseAuth.AuthStateListener mAuthListener;
@Override
protected void onCreate(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
/* Setup the Google API object to allow Google logins */
GoogleSignInOptions gso = new GoogleSignInOptions.Builder(GoogleSignInOptions.DEFAULT_SIGN_IN)
.requestIdToken(getString(R.string.server_client_ID))
.requestEmail()
.build();
/* Setup the Google API object to allow Google+ logins */
mGoogleApiClient = new GoogleApiClient.Builder(this)
.enableAutoManage(this, this /* OnConnectionFailedListener */)
.addConnectionCallbacks(this)
.addApi(Auth.GOOGLE_SIGN_IN_API, gso)
.addApi(LocationServices.API)
.build();
mFirebaseAuth = FirebaseAuth.getInstance();
// Getting utils data for all activity
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this);
mUserUid = sp.getString(SharedPrefKeys.USER_UID, SharedPrefKeys.DEFAULT_VALUE);
mCoupleUid = sp.getString(SharedPrefKeys.COUPLE_UID, mUserUid);
mPartnerUid = sp.getString(SharedPrefKeys.PARTNER_UID, SharedPrefKeys.DEFAULT_VALUE);
}
@Override
protected void onStart() {
super.onStart();
Log.d(BASE_TAG, "onStart()");
if (mUserUid.equals(SharedPrefKeys.DEFAULT_VALUE)) {
signOutAutomatically();
}
boolean coupleUidChanged = Utility.getBooleanPreference(this, SharedPrefKeys.USER_RELATIONSHIP_STATUS_CHANGED);
if (coupleUidChanged) {
checkUserRelationshipStatus();
}
final SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this);
sp.registerOnSharedPreferenceChangeListener(this);
if (!(this instanceof RegisterActivity)) {
mAuthListener = new FirebaseAuth.AuthStateListener() {
@Override
public void onAuthStateChanged(@NonNull FirebaseAuth firebaseAuth) {
FirebaseUser user = firebaseAuth.getCurrentUser();
if (user == null) { // user sign out
stopServices();
signOutAutomatically();
takeUserToLoginScreenOnUnAuth();
}
}
};
mFirebaseAuth.addAuthStateListener(mAuthListener);
}
if (!isConnected()) {
Toast.makeText(this, "No connection detect", Toast.LENGTH_LONG).show();
}
}
private void stopServices() {
stopService(new Intent(this, UserMonitorService.class));
stopService(new Intent(this, MessagesMonitorService.class));
stopService(new Intent(this, GeogiftMonitorService.class));
}
private void signOutAutomatically() {
Utility.clearSharedPreferences(BaseActivity.this);
FirebaseAuth.getInstance().signOut();
signOutFromGoogleAPIClient();
}
private void checkUserRelationshipStatus() {
// Reset coupleUidChanged
Utility.saveBooleanPreference(this, SharedPrefKeys.USER_RELATIONSHIP_STATUS_CHANGED, false);
int userRelationshipStatus = Utility.getUserRelationshipStatus(this,
SharedPrefKeys.USER_RELATIONSHIP_STATUS);
String partnerUsername = Utility.getStringPreference(this, SharedPrefKeys.PARTNER_USERNAME);
String partnerImageUri = Utility.getStringPreference(this, SharedPrefKeys.PARTNER_IMAGE_URI);
mCoupleUid = Utility.getStringPreference(this, SharedPrefKeys.COUPLE_UID);
Log.d(BASE_TAG, "onSharedPreferenceChanged() - USER_RELATIONSHIP_STATUS = " + userRelationshipStatus);
if (userRelationshipStatus == UserMonitorService.BREAK_SINGLE) {
removePartnerPreferenceValues();
startCoupleActivity(getString(R.string.break_up_notification) + partnerUsername, partnerImageUri, true);
}
else if (userRelationshipStatus == UserMonitorService.COUPLED) {
startCoupleActivity(getString(R.string.couple_notification) + partnerUsername, partnerImageUri, false);
}
}
private void removePartnerPreferenceValues() {
Utility.removePreference(this, SharedPrefKeys.PARTNER_UID);
Utility.removePreference(this, SharedPrefKeys.PARTNER_USERNAME);
Utility.removePreference(this, SharedPrefKeys.FUTURE_PARTNER_PAIRING_REQUEST);
}
private void startCoupleActivity(String messageToShow, String uriImage, boolean coupleBreak) {
// start activity and clean the Task (back stack of activity)
Intent intent = new Intent(this, CoupleActivity.class);
intent.putExtra(CoupleActivity.FIRST_MESSAGE_KEY, messageToShow);
intent.putExtra(CoupleActivity.IMAGE_PARTNER_KEY, uriImage);
intent.putExtra(CoupleActivity.BREAK_KEY, coupleBreak);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
startActivity(intent);
finish();
}
private void takeUserToLoginScreenOnUnAuth() {
// TODO move user to MainActivity or RegisterActivity?
Intent intent = new Intent(this, MainActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
startActivity(intent);
finish();
}
private boolean isConnected() {
ConnectivityManager cm =
(ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetwork = cm.getActiveNetworkInfo();
return activeNetwork != null && activeNetwork.isConnectedOrConnecting();
}
@Override
protected void onPause() {
super.onPause();
Log.d(BASE_TAG, "onPause()");
if (!(this instanceof RegisterActivity)) {
mFirebaseAuth.removeAuthStateListener(mAuthListener);
}
// remove shared preferences listener
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this);
sp.unregisterOnSharedPreferenceChangeListener(this);
}
// Google API listener
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {
Log.d(BASE_TAG, "onConnectionFailed:" + connectionResult);
Toast.makeText(this, "Google Play Services error.", Toast.LENGTH_SHORT).show();
}
@Override
public void onConnected(@Nullable Bundle bundle) {
// nothing, for Dashboard
}
@Override
public void onConnectionSuspended(int i) {
}
// Register
public void signOutFromGoogleAPIClient() {
if (mGoogleApiClient.isConnected()) {
//Auth.GoogleSignInApi.signOut(mGoogleApiClient); generate nullpointerex if mGoogleApiClient is not ready
Auth.GoogleSignInApi.revokeAccess(mGoogleApiClient);
}
}
// com_sweetcompany_sweetie_preferences.xml listener
@Override
public void onSharedPreferenceChanged(SharedPreferences sp, String key) {
if (key.equals(SharedPrefKeys.USER_RELATIONSHIP_STATUS)) {
checkUserRelationshipStatus();
} else if (key.equals(SharedPrefKeys.USER_UID)) {
signOutAutomatically();
}
}
public boolean userIsSingle() {
return mCoupleUid.equals(mUserUid) || mCoupleUid.equals(SharedPrefKeys.DEFAULT_VALUE);
}
}
|
#!/usr/bin/env bash
set -e
while [ `systemctl is-active docker` != 'active' ]; do echo 'waiting for docker'; sleep 5; done
kubeadm init --kubernetes-version=v1.17.9 > /root/kubeadm-init.out
mkdir -p /root/.kube
cp -f /etc/kubernetes/admin.conf /root/.kube/config
chown $(id -u):$(id -g) /root/.kube/config
kubectl apply -f /root/weave_v2-6-0.yml
systemctl restart kubelet
while [ `systemctl is-active kubelet` != 'active' ]; do echo 'waiting for kubelet'; sleep 5; done
|
<reponame>srepollock/deno-testing-playground
import { assertEquals } from "./test_deps.ts";
import { add } from "../src/add.ts";
Deno.test("Add should pass", function (): void {
assertEquals(add(1, 2), 3);
});
Deno.test("Add should pass", function (): void {
assertEquals(add(15, 15), 30);
});
|
package main;
import java.awt.BorderLayout;
import java.awt.GridLayout;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JPanel;
/**
* @author <NAME>
*
*/
public class UsingTheGridLayout extends JFrame
{
public UsingTheGridLayout()
{
setLayout(new BorderLayout());
JPanel p1 = new JPanel(new GridLayout(1, 3, 5, 0));
for (int button = 1; button < 4; button++)
{
p1.add(new JButton("Button " + button));
}
JPanel p2 = new JPanel(new GridLayout(1, 3, 5, 0));
for (int button = 4; button < 7; button++)
{
p2.add(new JButton("Button " + button));
}
add(p1, BorderLayout.CENTER);
add(p2, BorderLayout.SOUTH);
}
public static void main(String[] args)
{
UsingTheGridLayout frame = new UsingTheGridLayout();
frame.setTitle("Exercise12_3");
frame.setSize(270, 90);
frame.setLocationRelativeTo(null);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setVisible(true);
}
} |
#!/bin/sh
DB=$HOME/data/hier/hier.db
sqlite3 $DB <<-EoF
.mode csv
.headers on
.output q3.txt
with recursive expand(level, from_id, to_id) AS (
select 1 as level, from_id, to_id
from edge as e
where from_id = '/usr/lib/grub'
union
select x.level+1, e.from_id, e.to_id
from expand x
inner join edge e
on e.from_id = x.to_id
)
select level, from_id as parent, to_id as child from expand
;
.quit
EoF
|
override func restoreUserActivityState(activity: NSUserActivity) {
if activity.activityType == CSSearchableItemActionType {
if let userInfo = activity.userInfo {
if let selectedMovieIdentifier = userInfo[CSSearchableItemActivityIdentifier] as? String {
// Extract the selected movie identifier from the user activity
if let selectedMovieIndex = Int(selectedMovieIdentifier.components(separatedBy: ".").last ?? "") {
// Perform the segue to show the movie details using the extracted selected movie index
performSegue(withIdentifier: "idSegueShowMovieDetails", sender: selectedMovieIndex)
}
}
}
}
} |
package com.mounacheikhna.decor;
import android.content.Context;
import android.content.ContextWrapper;
import android.view.LayoutInflater;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Created by cheikhna on 05/04/2015.
*/
public class DecorContextWrapper extends ContextWrapper {
private DecorLayoutInflater mInflater;
private List<Decorator> mDecorators;
/**
* @param base ContextBase to Wrap.
*/
public DecorContextWrapper(Context base) {
super(base);
mDecorators = new ArrayList<>();
}
/**
* wrap the context
* @param base ContextBase to Wrap.
* @return ContextWrapper to pass back to the activity.
*/
public static DecorContextWrapper wrap(Context base) {
return new DecorContextWrapper(base);
}
public ContextWrapper with(Decorator... decorators) {
Collections.addAll(mDecorators, decorators);
return this;
}
@Override
public Object getSystemService(String name) {
if(LAYOUT_INFLATER_SERVICE.equals(name)) {
if(mInflater == null) {
mInflater = new DecorLayoutInflater(LayoutInflater.from(getBaseContext()), this, mDecorators, false);
}
return mInflater;
}
return super.getSystemService(name);
}
}
|
time_stamp=$(date +%s%N | cut -b1-13)
systemd-cgtop > $FAAS_ROOT'/logs/systemd-cgtop_'$time_stamp'.txt' |
function handler(_req, res) {
res.status(233).send({ message: `hello` })
}
module.exports = handler
|
#Define a function to return maximum
def getMax(arr):
#Set maximum to the first element
max = arr[0]
#Iterate array
for i in range(1, len(arr)):
#Compare maximum to the each of the elements
if arr[i] > max:
max = arr[i]
return max
# Create an array
arr = [1, 5, 10, 55, 7]
# Get the largest element
max_element = getMax(arr)
# Print the maximum element
print("The largest element of the array is: "+str(max_element)) |
<filename>src/models/Usuario.model.ts
import { Column, Model, Table, Default, AllowNull, PrimaryKey, AutoIncrement, Unique } from "sequelize-typescript";
import { DataTypes } from "sequelize";
@Table({ tableName: "usuario", timestamps: false })
export class Usuario extends Model<Usuario>{
@PrimaryKey
@AutoIncrement
@Column
id_usuario: number;
@Default('N/A')
@Unique
@Column
nombre: string;
@Default('N/A')
@Unique
@Column
dpi: string;
@Default('N/A')
@Unique
@Column
correo: string;
// @Default('B\'0\'')
@Column
debaja: string;
@Column
id_rol: number;
@AllowNull(true)
@Column
direccion: string;
@AllowNull(true)
@Column
telefono: string;
@AllowNull(true)
@Column
foto: string;
@AllowNull(true)
@Unique
@Column
carne: string;
@AllowNull(true)
@Column
carrera: string;
@AllowNull(true)
@Column
firma: string;
@Column(DataTypes.VIRTUAL)
password: any;
@AllowNull(true)
@Column
img_seguridad: string;
} |
<reponame>intxlog/React-ui-library
import React from 'react'
import { shallow } from 'enzyme'
import LoadingGif from '..'
describe('LoadingGif', () => {
it('renders correctly', () => {
const wrapper = shallow(<LoadingGif/>)
expect(wrapper).toMatchSnapshot()
})
}) |
#!/bin/bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eo pipefail
## Get the directory of the build script
scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}"))
## cd to the parent directory, i.e. the root of the git repo
cd ${scriptDir}/..
# include common functions
source ${scriptDir}/common.sh
# Print out Maven & Java version
mvn -version
echo ${JOB_TYPE}
# attempt to install 3 times with exponential backoff (starting with 10 seconds)
retry_with_backoff 3 10 \
mvn install -B -V -ntp \
-DskipTests=true \
-Dclirr.skip=true \
-Denforcer.skip=true \
-Dmaven.javadoc.skip=true \
-Dgcloud.download.skip=true \
-T 1C
# if GOOGLE_APPLICATION_CREDENTIALS is specified as a relative path, prepend Kokoro root directory onto it
if [[ ! -z "${GOOGLE_APPLICATION_CREDENTIALS}" && "${GOOGLE_APPLICATION_CREDENTIALS}" != /* ]]; then
export GOOGLE_APPLICATION_CREDENTIALS=$(realpath ${KOKORO_GFILE_DIR}/${GOOGLE_APPLICATION_CREDENTIALS})
fi
RETURN_CODE=0
set +e
case ${JOB_TYPE} in
test)
mvn test -B -Dclirr.skip=true -Denforcer.skip=true
RETURN_CODE=$?
;;
lint)
mvn com.coveo:fmt-maven-plugin:check
RETURN_CODE=$?
;;
javadoc)
mvn javadoc:javadoc javadoc:test-javadoc
RETURN_CODE=$?
;;
integration)
mvn -B ${INTEGRATION_TEST_ARGS} \
-ntp \
-Penable-integration-tests \
-DtrimStackTrace=false \
-Dclirr.skip=true \
-Denforcer.skip=true \
-fae \
verify
RETURN_CODE=$?
;;
graalvm)
# Run Unit and Integration Tests with Native Image
mvn -ntp -Pnative -Penable-integration-tests test
RETURN_CODE=$?
;;
samples)
SAMPLES_DIR=samples
# only run ITs in snapshot/ on presubmit PRs. run ITs in all 3 samples/ subdirectories otherwise.
if [[ ! -z ${KOKORO_GITHUB_PULL_REQUEST_NUMBER} ]]
then
SAMPLES_DIR=samples/snapshot
fi
if [[ -f ${SAMPLES_DIR}/pom.xml ]]
then
for FILE in ${KOKORO_GFILE_DIR}/secret_manager/*-samples-secrets; do
[[ -f "$FILE" ]] || continue
source "$FILE"
done
pushd ${SAMPLES_DIR}
mvn -B \
-ntp \
-DtrimStackTrace=false \
-Dclirr.skip=true \
-Denforcer.skip=true \
-fae \
verify
RETURN_CODE=$?
popd
else
echo "no sample pom.xml found - skipping sample tests"
fi
;;
clirr)
mvn -B -Denforcer.skip=true clirr:check
RETURN_CODE=$?
;;
*)
;;
esac
if [ "${REPORT_COVERAGE}" == "true" ]
then
bash ${KOKORO_GFILE_DIR}/codecov.sh
fi
# fix output location of logs
bash .kokoro/coerce_logs.sh
if [[ "${ENABLE_FLAKYBOT}" == "true" ]]
then
chmod +x ${KOKORO_GFILE_DIR}/linux_amd64/flakybot
${KOKORO_GFILE_DIR}/linux_amd64/flakybot -repo=googleapis/java-dialogflow-cx
fi
echo "exiting with ${RETURN_CODE}"
exit ${RETURN_CODE}
|
#ifndef __ASMARM_ARCH_TIMER_H
#define __ASMARM_ARCH_TIMER_H
#include <asm/barrier.h>
#include <asm/errno.h>
#include <linux/clocksource.h>
#include <linux/init.h>
#include <linux/types.h>
#include <clocksource/arm_arch_timer.h>
#ifdef CONFIG_ARM_ARCH_TIMER
int arch_timer_arch_init(void);
/*
* These register accessors are marked inline so the compiler can
* nicely work out which register we want, and chuck away the rest of
* the code. At least it does so with a recent GCC (4.6.3).
*/
static __always_inline
void arch_timer_reg_write_cp15(int access, enum arch_timer_reg reg, u32 val)
{
if (access == ARCH_TIMER_PHYS_ACCESS) {
switch (reg) {
case ARCH_TIMER_REG_CTRL:
asm volatile("mcr p15, 0, %0, c14, c2, 1" : : "r" (val));
break;
case ARCH_TIMER_REG_TVAL:
asm volatile("mcr p15, 0, %0, c14, c2, 0" : : "r" (val));
break;
}
} else if (access == ARCH_TIMER_VIRT_ACCESS) {
switch (reg) {
case ARCH_TIMER_REG_CTRL:
asm volatile("mcr p15, 0, %0, c14, c3, 1" : : "r" (val));
break;
case ARCH_TIMER_REG_TVAL:
asm volatile("mcr p15, 0, %0, c14, c3, 0" : : "r" (val));
break;
}
}
isb();
}
static __always_inline
u32 arch_timer_reg_read_cp15(int access, enum arch_timer_reg reg)
{
u32 val = 0;
if (access == ARCH_TIMER_PHYS_ACCESS) {
switch (reg) {
case ARCH_TIMER_REG_CTRL:
asm volatile("mrc p15, 0, %0, c14, c2, 1" : "=r" (val));
break;
case ARCH_TIMER_REG_TVAL:
asm volatile("mrc p15, 0, %0, c14, c2, 0" : "=r" (val));
break;
}
} else if (access == ARCH_TIMER_VIRT_ACCESS) {
switch (reg) {
case ARCH_TIMER_REG_CTRL:
asm volatile("mrc p15, 0, %0, c14, c3, 1" : "=r" (val));
break;
case ARCH_TIMER_REG_TVAL:
asm volatile("mrc p15, 0, %0, c14, c3, 0" : "=r" (val));
break;
}
}
return val;
}
static __always_inline
u64 arch_timer_reg_read_cval(int access)
{
u64 cval;
if (access == ARCH_TIMER_PHYS_ACCESS)
asm volatile("mrrc p15, 2, %Q0, %R0, c14" : "=r" (cval));
else if (access == ARCH_TIMER_VIRT_ACCESS)
asm volatile("mrrc p15, 3, %Q0, %R0, c14" : "=r" (cval));
else
cval = 0;
return cval;
}
static inline u32 arch_timer_get_cntfrq(void)
{
u32 val;
asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (val));
return val;
}
#ifdef CONFIG_ARCH_SUN50I
#define ARCH_PCNT_TRY_MAX_TIME (12)
#define ARCH_PCNT_MAX_DELTA (8)
static inline u64 arch_counter_get_cntpct(void)
{
u64 pct0;
u64 pct1;
u64 delta;
u32 retry = 0;
/* sun50i vcnt maybe imprecise,
* we should try to fix this.
*/
while (retry < ARCH_PCNT_TRY_MAX_TIME) {
isb();
asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (pct0));
isb();
asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (pct1));
delta = pct1 - pct0;
if ((pct1 >= pct0) && (delta < ARCH_PCNT_MAX_DELTA)) {
/* read valid vcnt */
return pct1;
}
/* vcnt value error, try again */
retry++;
}
/* Do not warry for this, just return the last time vcnt.
* arm64 have enabled CONFIG_CLOCKSOURCE_VALIDATE_LAST_CYCLE.
*/
return pct1;
}
#else
static inline u64 arch_counter_get_cntpct(void)
{
u64 cval;
isb();
asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (cval));
return cval;
}
#endif /* CONFIG_ARCH_SUN50I */
#ifdef CONFIG_ARCH_SUN50I
#define ARCH_VCNT_TRY_MAX_TIME (12)
#define ARCH_VCNT_MAX_DELTA (8)
static inline u64 arch_counter_get_cntvct(void)
{
u64 vct0;
u64 vct1;
u64 delta;
u32 retry = 0;
/* sun50i vcnt maybe imprecise,
* we should try to fix this.
*/
while (retry < ARCH_VCNT_TRY_MAX_TIME) {
isb();
asm volatile("mrrc p15, 1, %Q0, %R0, c14" : "=r" (vct0));
isb();
asm volatile("mrrc p15, 1, %Q0, %R0, c14" : "=r" (vct1));
delta = vct1 - vct0;
if ((vct1 >= vct0) && (delta < ARCH_VCNT_MAX_DELTA)) {
/* read valid vcnt */
return vct1;
}
/* vcnt value error, try again */
retry++;
}
/* Do not warry for this, just return the last time vcnt.
* arm64 have enabled CONFIG_CLOCKSOURCE_VALIDATE_LAST_CYCLE.
*/
return vct1;
}
#else
static inline u64 arch_counter_get_cntvct(void)
{
u64 cval;
isb();
asm volatile("mrrc p15, 1, %Q0, %R0, c14" : "=r" (cval));
return cval;
}
#endif /* CONFIG_ARCH_SUN50I */
static inline u32 arch_timer_get_cntkctl(void)
{
u32 cntkctl;
asm volatile("mrc p15, 0, %0, c14, c1, 0" : "=r" (cntkctl));
return cntkctl;
}
static inline void arch_timer_set_cntkctl(u32 cntkctl)
{
asm volatile("mcr p15, 0, %0, c14, c1, 0" : : "r" (cntkctl));
}
static inline void __cpuinit arch_counter_set_user_access(void)
{
u32 cntkctl = arch_timer_get_cntkctl();
/* Disable user access to both physical/virtual counters/timers */
/* Also disable virtual event stream */
cntkctl &= ~(ARCH_TIMER_USR_PT_ACCESS_EN
| ARCH_TIMER_USR_VT_ACCESS_EN
| ARCH_TIMER_VIRT_EVT_EN
| ARCH_TIMER_USR_VCT_ACCESS_EN
| ARCH_TIMER_USR_PCT_ACCESS_EN);
arch_timer_set_cntkctl(cntkctl);
}
static inline void arch_timer_evtstrm_enable(int divider)
{
u32 cntkctl = arch_timer_get_cntkctl();
cntkctl &= ~ARCH_TIMER_EVT_TRIGGER_MASK;
/* Set the divider and enable virtual event stream */
cntkctl |= (divider << ARCH_TIMER_EVT_TRIGGER_SHIFT)
| ARCH_TIMER_VIRT_EVT_EN;
arch_timer_set_cntkctl(cntkctl);
elf_hwcap |= HWCAP_EVTSTRM;
}
#endif
#endif
|
<reponame>ideacrew/pa_edidb
module Protocols
HandlerKey = Struct.new(:klass, :action)
class Registry
include Singleton
def initialize
@handlers = {}
register_handler(Person, :update, ::Protocols::Amqp::PersonUpdateHandler)
end
def handlers_for(obj, action)
handler_def = HandlerKey.new(obj.class, action)
fetch_handlers(handler_def).map(&:new)
end
def fetch_handlers(handler_def)
@handlers.fetch(handler_def)
end
def register_handler(cls, action, handler)
handler_def = HandlerKey.new(cls, action)
register_handler_key(handler_def, handler)
end
def register_handler_key(handler_def, handler)
if @handlers.has_key?(handler_def)
@handlers[handler_def] = @handlers[handler_def] + [handler]
else
@handlers[handler_def] = [handler]
end
end
def self.handlers_for(obj, action)
self.instance.handlers_for(obj, action)
end
end
class Notifier
include Singleton
def update_notification(obj, delta)
Registry.handlers_for(obj, :update).each do |handler|
unless delta.empty?
handler.handle_update(obj, delta)
end
end
end
def self.update_notification(obj, delta)
self.instance.update_notification(obj, delta)
end
end
end
|
SELECT *
FROM blog.posts
ORDER BY date DESC
LIMIT 5; |
<reponame>sakai-mirror/evaluation
/**
* Copyright 2005 Sakai Foundation Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.sakaiproject.evaluation.tool.producers;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.sakaiproject.authz.api.PermissionsHelper;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.tool.api.ToolSession;
import org.sakaiproject.util.ResourceLoader;
import uk.ac.cam.caret.sakai.rsf.helper.HelperViewParameters;
import uk.org.ponder.rsf.components.UICommand;
import uk.org.ponder.rsf.components.UIContainer;
import uk.org.ponder.rsf.components.UIOutput;
import uk.org.ponder.rsf.flow.jsfnav.NavigationCase;
import uk.org.ponder.rsf.flow.jsfnav.NavigationCaseReporter;
import uk.org.ponder.rsf.view.ComponentChecker;
import uk.org.ponder.rsf.view.ViewComponentProducer;
import uk.org.ponder.rsf.viewstate.SimpleViewParameters;
import uk.org.ponder.rsf.viewstate.ViewParameters;
import uk.org.ponder.rsf.viewstate.ViewParamsReporter;
/**
* AssignPermissionsProducer handles the server side operations to ready the permissions helper
*
* @author chasegawa
*/
public class AssignPermissionsProducer implements ViewComponentProducer, ViewParamsReporter, NavigationCaseReporter {
/**
* @see uk.org.ponder.rsf.view.ComponentProducer#fillComponents(uk.org.ponder.rsf.components.UIContainer,
* uk.org.ponder.rsf.viewstate.ViewParameters, uk.org.ponder.rsf.view.ComponentChecker)
*/
public void fillComponents(UIContainer tofill, ViewParameters viewparams, ComponentChecker checker) {
ToolSession session = sessionManager.getCurrentToolSession();
session.setAttribute(PermissionsHelper.TARGET_REF, site.getReference());
session.setAttribute(PermissionsHelper.PREFIX, "eval.");
ResourceLoader resourceLoader = new ResourceLoader("org.sakaiproject.evaluation.tool.bundle.permissions");
HashMap<String, String> permissionsDescriptions = new HashMap<String, String>();
for (Object key : resourceLoader.keySet()) {
permissionsDescriptions.put(key.toString(), (String) resourceLoader.get(key));
}
session.setAttribute("permissionDescriptions", permissionsDescriptions);
UIOutput.make(tofill, HelperViewParameters.HELPER_ID, "sakai.permissions.helper");
UICommand.make(tofill, HelperViewParameters.POST_HELPER_BINDING, "", null);
}
public static final String VIEW_ID = "assign_permissions";
public String getViewID() {
return VIEW_ID;
}
public ViewParameters getViewParameters() {
return new HelperViewParameters();
}
/**
* @see uk.org.ponder.rsf.flow.jsfnav.NavigationCaseReporter#reportNavigationCases()
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public List reportNavigationCases() {
ArrayList result = new ArrayList();
result.add(new NavigationCase(null, new SimpleViewParameters(SummaryProducer.VIEW_ID)));
return result;
}
private SessionManager sessionManager;
public void setSessionManager(SessionManager sessionManager) {
this.sessionManager = sessionManager;
}
private Site site;
public void setSite(Site site) {
this.site = site;
}
} |
<filename>packages/server/src/clientOptions.js<gh_stars>10-100
const clientOptions = {
apiHostname: process.env.BASE_URL
};
const oauthClientOptions = {
apiHostname: process.env.OAUTH_URL
};
export { clientOptions, oauthClientOptions };
|
<gh_stars>0
(function () {
'use strict';
// Avaliacoes controller
angular
.module('avaliacoes')
.controller('AvaliacoesController', AvaliacoesController);
AvaliacoesController.$inject = ['$scope', '$state', '$timeout', '$interval','$window', 'Authentication', 'avaliacoeResolve', 'FileUploader','$mdSidenav','$mdDialog'];
function AvaliacoesController ($scope, $state, $timeout, $interval, $window, Authentication, avaliacoe, FileUploader,$mdDialog) {
var vm = this;
vm.authentication = Authentication;
vm.avaliacoe = avaliacoe;
vm.error = null;
vm.success = null;
vm.form = {};
vm.remove = remove;
vm.save = save;
vm.addLine = addLine;
vm.rmvLine = rmvLine;
vm.selectLine = selectLine;
/* Control Variables */
// Block button remove line
vm.blockBtnRmvLine = false;
vm.blockButtonRmv = blockButtonRmv;
// Block button add line
vm.blockBtnAddLine = true;
vm.blockButtonAddLine = blockButtonAddLine;
// Block button upload image
vm.blockBtnUp = false;
vm.blockButtonUp = blockButtonUp;
// Block button select image
vm.blockBtnSlct = true;
vm.blockButtonSlct = blockButtonSlct;
vm.count = [0];
vm.avaliacoe.control=-1;
vm.lineSelected = null;
vm.carregando = null;
vm.controle = null;
vm.imageLoad = "./modules/avaliacoes/client/img/loader.gif";
var dir = "./modules/avaliacoes/client/img/";
var defaultImage = "./modules/avaliacoes/client/img/not-available.jpg";
vm.iconMenu = "./teste.ico";
vm.imageDefault = "./modules/avaliacoes/client/img/default";
vm.imgDefault = [];
if(vm.avaliacoe.severidade===undefined){
vm.avaliacoe.aacpd = 0;
vm.avaliacoe.dia = [];
vm.avaliacoe.severidade = [];
vm.blockBtnAddLine = false;
//vm.avaliacoe.imagem = [vm.imageDefault];
} else {
if(vm.avaliacoe.severidade.length===0){
vm.count = [0];
vm.blockBtnAddLine = false;
} else{
for(var i=1; i<vm.avaliacoe.severidade.length; i++){
vm.count.push(i);
}
vm.avaliacoe.control = vm.count.length-1;
}
}
// Call functions
vm.blockButtonRmv();
function selectLine(line) {
vm.lineSelected = line;
//console.log(vm.lineSelected);
}
function addLine(){
//vm.resetImgDefault();
vm.count.push(vm.count.length);
//vm.imgDefault.push(vm.imageDefault);
vm.blockButtonRmv();
vm.blockButtonAddLine();
vm.blockButtonUp();
vm.blockButtonSlct(true);
//console.log(vm.imgDefault);
//vm.avaliacoe.imagem.push(defaultImage);
//console.log(vm.count);
}
function clicouAqui(){
print("ClicouAqui");
}
function rmvLine() {
//console.log("executou");
vm.count.splice(-1,1);
vm.imgDefault.splice(-1,1);
//console.log(vm.imgDefault);
vm.blockButtonRmv();
vm.blockButtonAddLine();
}
function blockButtonUp(a) {
//console.log("Executou blockButtonUp");
vm.blockBtnUp = a;
}
function blockButtonRmv() {
//console.log("Bloquenado botao");
var ef=0;
for(var i=0; i<vm.avaliacoe.severidade.length; i++){
if(vm.avaliacoe.severidade[i]!==null){
ef++;
}
}
if(vm.count.length<=ef){
vm.blockBtnRmvLine = true;
} else {
vm.blockBtnRmvLine = false;
}
}
function blockButtonAddLine() {
//console.log("Bloquenado botao");
var ef=0;
for(var i=0; i<vm.avaliacoe.severidade.length; i++){
if(vm.avaliacoe.severidade[i]!==null){
ef++;
}
}
if(vm.count.length===ef){
vm.blockBtnAddLine = true;
} else {
vm.blockBtnAddLine = false;
}
}
function blockButtonSlct(a) {
vm.blockBtnSlct = a;
}
// Remove existing Avaliacoe
function remove() {
//console.log("remover");
if ($window.confirm('Are you sure you want to delete?')) {
vm.avaliacoe.$remove($state.go('avaliacoes.list'));
}
}
// Save Avaliacoe
function save(isValid,image) {
//console.log(vm.avaliacoe);
if (!isValid) {
$scope.$broadcast('show-errors-check-validity', 'vm.form.avaliacoeForm');
return false;
}
// Make calc AACPD
//console.log(vm.avaliacoe.dia[0]);
//console.log(vm.count.length);
//console.log(avaliacoe.dia);
if(vm.avaliacoe.severidade.length>1)
{
//console.log("Aqui");
var lenght = vm.avaliacoe.severidade.length;
vm.avaliacoe.aacpd = 0;
for(var i=1; i<lenght; i++){
//console.log("Aqui"+vm.avaliacoe.severidade[1]);
vm.avaliacoe.aacpd = vm.avaliacoe.aacpd + (((parseFloat(vm.avaliacoe.severidade[i]))+(parseFloat(vm.avaliacoe.severidade[i-1])))/2)*((vm.avaliacoe.dia[i])-(vm.avaliacoe.dia[i-1]));
}
vm.avaliacoe.aacpd.toFixed(2);
//console.log(vm.avaliacoe.aacpd);
}
if(!image && vm.avaliacoe.severidade!==undefined && vm.avaliacoe.imagem!==undefined){
var severity = vm.avaliacoe.severidade.length;
var imagem = vm.avaliacoe.imagem.length;
var dif = severity-imagem;
if(dif>0){
for(var c=0; c<dif; c++){
vm.avaliacoe.imagem.push(vm.imageDefault);
}
}
}
//console.log(vm.avaliacoe);
// TODO: move create/update logic to service
if (vm.avaliacoe._id) {
vm.avaliacoe.$update(successCallbackU, errorCallback);
} else {
vm.avaliacoe.$save(successCallback, errorCallback);
}
function successCallback(res) {
$state.go('avaliacoes.edit', {
avaliacoeId: res._id
});
}
function errorCallback(res) {
vm.error = res.data.message;
}
function successCallbackU(res) {
$state.go('avaliacoes.view', {
avaliacoeId: res._id
});
}
}
$scope.successCallback = function (res) {
// Não faz nada quando a imagem é upada com sucesso.
/*$state.go('avaliacoes.view', {
avaliacoeId: res._id
});*/
};
$scope.errorCallback = function (res) {
$scope.error = res.data.message;
};
// Cria uma instância do file Upload.
$scope.uploaderImage = new FileUploader({
url: '/api/avaliacoes',
alias: 'newPicture'
});
// Set file uploader image filter
$scope.uploaderImage.filters.push({
name: 'imageFilter',
fn: function (item, options) {
var type = '|' + item.type.slice(item.type.lastIndexOf('/') + 1) + '|';
return '|jpg|png|jpeg|bmp|gif|'.indexOf(type) !== -1;
}
});
// Called after the user selected a new picture file
$scope.uploaderImage.onAfterAddingFile = function (fileItem) {
//console.log("Aqui");
if ($window.FileReader) {
//console.log(vm.lineSelected);
//console.log("AfterSelect");
var fileReader = new FileReader();
fileReader.readAsDataURL(fileItem._file);
console.log(fileItem.file);
fileReader.onload = function (fileReaderEvent) {
$timeout(function () {
vm.imgDefault[vm.lineSelected] = fileReaderEvent.target.result;
vm.blockButtonUp(true);
//vm.avaliacoe.imagem[vm.lineSelected] = fileReaderEvent.target.result;
//console.log("AfterSelect");
vm.blockButtonSlct(false);
vm.controle = true;
}, 0);
};
}
};
// Este método é chamado quando o upload é realizado com sucesso.
$scope.uploaderImage.onSuccessItem = function (fileItem, response, status, headers) {
//console.log("SUCESSO");
// Show success message
$scope.success = true;
vm.carregando = false;
// Populate user object
//$scope.user = Authentication.user = response;
//console.log(response);
//$scope.successCallback(response);
//vm.avaliacoe.imagem.push('./modules/avaliacoes/client/img/'+vm.avaliacoe._id+response);
if((vm.avaliacoe.imagem.length===1)
& (vm.avaliacoe.imagem[0]===defaultImage)
& (vm.avaliacoe.severidade.length===0)){
vm.avaliacoe.imagem[0] = dir+vm.avaliacoe.user._id+"/"+response[0];
vm.avaliacoe.severidade[0] = response[1];
//console.log("sim");
} else {
vm.avaliacoe.imagem.push(dir+vm.avaliacoe.user._id+"/"+response[0]);
vm.avaliacoe.severidade.push(response[1]);
//console.log(vm.avaliacoe);
}
vm.blockButtonAddLine();
// Clear messages
$scope.success = vm.error = null;
// Clear upload buttons
$scope.cancelUpload();
};
// Called after the user has failed to uploaded a new picture
$scope.uploaderImage.onErrorItem = function (fileItem, response, status, headers) {
//console.log("ERRO");
// Clear upload buttons
$scope.cancelUpload();
// Show error message
$scope.error = response.message;
};
$scope.uploadProfilePicture = function () {
//console.log("Upload Image");
vm.controle = null;
vm.carregando = true;
// Clear messages
$scope.success = $scope.error = null;
// Start upload
//console.log("Iniciando Upload");
$scope.uploaderImage.uploadAll();
//console.log("Finalizado");
};
// Cancel the upload process
$scope.cancelUpload = function (line) {
//console.log("Executou cancelUpload");
vm.selectLine(line);
vm.blockButtonUp(false);
$scope.uploaderImage.clearQueue();
vm.imgDefault[vm.lineSelected] = " ";
}
/*// Get the modal
var modal = document.getElementById('myModal');
// Get the image and insert it inside the modal - use its "alt" text as a caption
var img = document.getElementById('myImg');
var modalImg = document.getElementById("img01");
var captionText = document.getElementById("caption");
img.onclick = function(){
modal.style.display = "block";
modalImg.src = this.src;
captionText.innerHTML = this.alt;
};
// Get the <span> element that closes the modal
var span = document.getElementsByClassName("close")[0];
// When the user clicks on <span> (x), close the modal
span.onclick = function() {
modal.style.display = "none";
};*/
}
}());
|
<filename>lib/object_hash_rb/version.rb
# frozen_string_literal: true
module ObjectHash
VERSION = "0.1.3"
end
|
def count_vowels(string):
count = 0
vowels = "aeiouAEIOU"
for char in string:
if char in vowels:
count += 1
return count
print(count_vowels("Avengers: Endgame"))
# Output: 6 |
trials <- 10
tails <- 0
heads <- 0
for (i in 1:trials) {
flip <- sample(c("heads", "tails"), 1)
if (flip == "tails") {
tails <- tails + 1
} else {
heads <- heads + 1
}
}
print(paste("Heads:", heads))
print(paste("Tails:", tails)) |
<reponame>thschue/statistics-service
package api
import (
"encoding/json"
"github.com/gin-gonic/gin"
"github.com/keptn-sandbox/statistics-service/controller"
"github.com/keptn-sandbox/statistics-service/db"
"github.com/keptn-sandbox/statistics-service/operations"
keptn "github.com/keptn/go-utils/pkg/lib"
"net/http"
)
// GetStatistics godoc
// @Summary Get statistics
// @Description get statistics about Keptn installation
// @Tags Statistics
// @Security ApiKeyAuth
// @Accept json
// @Produce json
// @Param from query string false "From"
// @Param to query string false "To"
// @Success 200 {object} operations.Statistics "ok"
// @Failure 400 {object} operations.Error "Invalid payload"
// @Failure 500 {object} operations.Error "Internal error"
// @Router /statistics [get]
func GetStatistics(c *gin.Context) {
logger := keptn.NewLogger("", "", "statistics-service")
params := &operations.GetStatisticsParams{}
if err := c.ShouldBindQuery(params); err != nil {
c.JSON(http.StatusBadRequest, operations.Error{
ErrorCode: 400,
Message: "Invalid request format",
})
return
}
if !validateQueryTimestamps(params) {
c.JSON(http.StatusBadRequest, operations.Error{
ErrorCode: 400,
Message: "Invalid time frame: 'from' timestamp must not be greater than 'to' timestamp",
})
return
}
sb := controller.GetStatisticsBucketInstance()
payload, err := getStatistics(params, sb)
if err != nil && err == db.NoStatisticsFoundError {
c.JSON(http.StatusNotFound, operations.Error{
Message: "no statistics found for selected time frame",
ErrorCode: 404,
})
return
} else if err != nil {
logger.Error("could not retrieve statistics: " + err.Error())
c.JSON(http.StatusInternalServerError, operations.Error{
Message: "Internal server error",
ErrorCode: 500,
})
return
}
c.JSON(http.StatusOK, payload)
}
func getStatistics(params *operations.GetStatisticsParams, sb controller.StatisticsInterface) (operations.GetStatisticsResponse, error) {
var mergedStatistics = operations.Statistics{}
cutoffTime := sb.GetCutoffTime()
// check time
if params.From.After(cutoffTime) {
// case 1: time frame within "in-memory" interval (e.g. last 30 minutes)
// -> return in-memory object
mergedStatistics = *sb.GetStatistics()
} else {
var statistics []operations.Statistics
var err error
if params.From.Before(cutoffTime) && params.To.Before(cutoffTime) {
// case 2: time frame outside of "in-memory" interval
// -> return results from database
statistics, err = sb.GetRepo().GetStatistics(params.From, params.To)
if err != nil && err == db.NoStatisticsFoundError {
return operations.GetStatisticsResponse{}, err
}
} else if params.From.Before(cutoffTime) && params.To.After(cutoffTime) {
// case 3: time frame includes "in-memory" interval
// -> get results from database and from in-memory and merge them
statistics, err = sb.GetRepo().GetStatistics(params.From, params.To)
if statistics == nil {
statistics = []operations.Statistics{}
}
statistics = append(statistics, *sb.GetStatistics())
}
mergedStatistics = operations.Statistics{
From: params.From,
To: params.To,
}
mergedStatistics = operations.MergeStatistics(mergedStatistics, statistics)
}
return convertToGetStatisticsResponse(mergedStatistics)
}
func convertToGetStatisticsResponse(mergedStatistics operations.Statistics) (operations.GetStatisticsResponse, error) {
marshal, _ := json.Marshal(mergedStatistics)
var result operations.GetStatisticsResponse
err := json.Unmarshal(marshal, &result)
if err != nil {
return result, err
}
return result, nil
}
func validateQueryTimestamps(params *operations.GetStatisticsParams) bool {
if params.To.Before(params.From) {
return false
}
return true
}
|
package com.oven.netty.study;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import com.oven.netty.study.echo.megpack.MessagePackServer;
import com.oven.netty.study.websocket.WebSocketServer;
@RunWith(SpringRunner.class)
@SpringBootTest
public class StudyServerApplicationTests {
private final int port = 8080;
/**
* 启动不考虑粘包/拆包问题的netty服务
*/
@Test
public void startNettyServer1() throws Exception {
new com.oven.netty.study.time.demo1.TimeServer().bind(port);
}
/**
* 启动模拟粘包/拆包问题的netty服务
*/
@Test
public void startNettyServer2() throws Exception {
new com.oven.netty.study.time.demo2.TimeServer().bind(port);
}
/**
* 启动已解决粘包/拆包问题的netty服务 - LineBasedFrameDecoder 实现
*/
@Test
public void startNettyServer3() throws Exception {
new com.oven.netty.study.time.demo3.TimeServer().bind(port);
}
/**
* 启动已解决粘包/拆包问题的netty服务 - DelimiterBasedFrameDecoder 实现
*/
@Test
public void startNettyServer4() throws Exception {
new com.oven.netty.study.echo.delimiter.EchoServer().bind(port);
}
/**
* 启动已解决粘包/拆包问题的netty服务 - FixedLengthFrameDecoder 实现
*/
@Test
public void startNettyServer5() throws Exception {
new com.oven.netty.study.echo.fixlength.EchoServer().bind(port);
}
/**
* 启动不考虑粘包/拆包问题 基于MessagePack编解码的Netty服务
*/
@Test
public void testMessagePackEchoServer() throws Exception {
new MessagePackServer().bind(port);
}
/**
* 启动WebSocket服务器
*/
@Test
public void startWebSocketServer() throws Exception {
new WebSocketServer().run(9999);
}
} |
/*
Copyright 2020-2021 University of Oxford
and Health and Social Care Information Centre, also known as NHS Digital
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
*/
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { SharedModule } from '@mdm/modules/shared/shared.module';
import { MaterialModule } from '@mdm/modules/material/material.module';
import { MergeDiffContainerComponent } from '@mdm/merge-diff/merge-diff-container/merge-diff-container.component';
import { MergeItemSelectorComponent } from './merge-item-selector/merge-item-selector.component';
import { MergeComparisonComponent } from './merge-comparsion/merge-comparsion.component';
import { CatalogueModule } from '@mdm/modules/catalogue/catalogue.module';
import { MergeFilterPipe } from './pipes/merge-filter.pipe';
import { ConflictEditorModalComponent } from './conflict-editor/conflict-editor-modal/conflict-editor-modal.component';
import { StringConflictEditorComponent } from './conflict-editor/string-conflict-editor/string-conflict-editor.component';
import { NumberConflictEditorComponent } from './conflict-editor/number-conflict-editor/number-conflict-editor.component';
@NgModule({
declarations: [
MergeDiffContainerComponent,
MergeItemSelectorComponent,
MergeComparisonComponent,
MergeFilterPipe,
ConflictEditorModalComponent,
StringConflictEditorComponent,
NumberConflictEditorComponent
],
imports: [
CommonModule,
SharedModule,
MaterialModule,
CatalogueModule
],
exports: [
MergeDiffContainerComponent
]
})
export class MergeDiffModule { }
|
#!/bin/bash
set -e
# Load docker_img_exists function and $CCDL_WORKER_IMAGES
source ~/refinebio/scripts/common.sh
# Github won't set the branch name for us, so do it ourselves.
branch=$(get_master_or_dev "$CI_TAG")
if [[ "$branch" == "master" ]]; then
DOCKERHUB_REPO=ccdl
elif [[ "$branch" == "dev" ]]; then
DOCKERHUB_REPO=ccdlstaging
else
echo "Why in the world was update_docker_img.sh called from a branch other than dev or master?!?!?"
exit 1
fi
echo "$CI_TAG" > ~/refinebio/common/version
# Create ~/refinebio/common/dist/data-refinery-common-*.tar.gz, which is
# required by the workers and data_refinery_foreman images.
## Remove old common distributions if they exist
rm -f ~/refinebio/common/dist/*
cd ~/refinebio/common && python3 setup.py sdist
# Log into DockerHub
docker login -u "$DOCKER_ID" -p "$DOCKER_PASSWD"
cd ~/refinebio
for IMAGE in $CCDL_WORKER_IMAGES; do
image_name="$DOCKERHUB_REPO/dr_$IMAGE"
if docker_img_exists "$image_name" "$CI_TAG"; then
echo "Docker image exists, skipping: $image_name:$CI_TAG"
else
echo "Building docker image: $image_name:$CI_TAG"
# Build and push image. We use the CI_TAG as the system version.
docker build \
-t "$image_name:$CI_TAG" \
-f "workers/dockerfiles/Dockerfile.$IMAGE" \
--build-arg SYSTEM_VERSION="$CI_TAG" .
docker push "$image_name:$CI_TAG"
# Update latest version
docker tag "$image_name:$CI_TAG" "$image_name:latest"
docker push "$image_name:latest"
# Save some space when we're through
docker rmi "$image_name:$CI_TAG"
fi
done
# Build and push foreman image
FOREMAN_DOCKER_IMAGE="$DOCKERHUB_REPO/dr_foreman"
if docker_img_exists "$FOREMAN_DOCKER_IMAGE" "$CI_TAG"; then
echo "Docker image exists, skipping: $FOREMAN_DOCKER_IMAGE:$CI_TAG"
else
# Build and push image. We use the CI_TAG as the system version.
docker build \
-t "$FOREMAN_DOCKER_IMAGE:$CI_TAG" \
-f foreman/dockerfiles/Dockerfile.foreman \
--build-arg SYSTEM_VERSION="$CI_TAG" .
docker push "$FOREMAN_DOCKER_IMAGE:$CI_TAG"
# Update latest version
docker tag "$FOREMAN_DOCKER_IMAGE:$CI_TAG" "$FOREMAN_DOCKER_IMAGE:latest"
docker push "$FOREMAN_DOCKER_IMAGE:latest"
fi
# Build and push API image
API_DOCKER_IMAGE="$DOCKERHUB_REPO/dr_api"
if docker_img_exists "$API_DOCKER_IMAGE" "$CI_TAG"; then
echo "Docker image exists, skipping: $API_DOCKER_IMAGE:$CI_TAG"
else
# Build and push image. We use the CI_TAG as the system version.
docker build \
-t "$API_DOCKER_IMAGE:$CI_TAG" \
-f api/dockerfiles/Dockerfile.api_production \
--build-arg SYSTEM_VERSION="$CI_TAG" .
docker push "$API_DOCKER_IMAGE:$CI_TAG"
# Update latest version
docker tag "$API_DOCKER_IMAGE:$CI_TAG" "$API_DOCKER_IMAGE:latest"
docker push "$API_DOCKER_IMAGE:latest"
fi
|
class AromaticRingInteractionTest(unittest.TestCase):
def fetch_all_by_ligand_id(self, ligand_id):
"""
Fetches all ring interactions associated with a specific ligand ID from the database.
Args:
ligand_id: The ID of the ligand for which ring interactions need to be fetched.
Returns:
List of ring interactions associated with the specified ligand ID.
"""
# Implement the logic to fetch ring interactions by ligand_id from the database
ring_interactions = models.AromaticRingInteraction.query.filter_by(ligand_id=ligand_id).all()
return ring_interactions
def test_fetch_all_by_ligand_id(self):
"""Fetch ring interactions by ligand_id"""
ligand = models.Ligand.query.filter_by(ligand_name='STI').first()
# Call the fetch_all_by_ligand_id method with the ligand ID and assert the expected result
expected_ring_interactions = self.fetch_all_by_ligand_id(ligand.ligand_id)
actual_ring_interactions = models.AromaticRingInteraction.query.filter_by(ligand_id=ligand.ligand_id).all()
self.assertEqual(expected_ring_interactions, actual_ring_interactions) |
<gh_stars>100-1000
import { AnimatePresence } from 'framer-motion';
import { LookingGlass, Results } from '~/components';
import { useLGMethods } from '~/hooks';
import { Frame } from './frame';
export const Layout: React.FC = () => {
const { formReady } = useLGMethods();
return (
<Frame>
{formReady() ? (
<Results />
) : (
<AnimatePresence>
<LookingGlass />
</AnimatePresence>
)}
</Frame>
);
};
|
var dir_f234204b1f126bf9b6cfe31d58f18cc7 =
[
[ "reference", "dir_414f8b02beb3463029aafd015d1bf7a0.xhtml", "dir_414f8b02beb3463029aafd015d1bf7a0" ]
]; |
#!/bin/sh
##
## Copyright (c) 2014 The WebM project authors. All Rights Reserved.
##
## Use of this source code is governed by a BSD-style license
## that can be found in the LICENSE file in the root of the source
## tree. An additional intellectual property rights grant can be found
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
##
## This file tests the libvpx vpx_temporal_svc_encoder example. To add new
## tests to this file, do the following:
## 1. Write a shell function (this is your test).
## 2. Add the function to vpx_tsvc_encoder_tests (on a new line).
##
. $(dirname $0)/tools_common.sh
# Environment check: $YUV_RAW_INPUT is required.
vpx_tsvc_encoder_verify_environment() {
if [ ! -e "${YUV_RAW_INPUT}" ]; then
echo "Libvpx test data must exist in LIBVPX_TEST_DATA_PATH."
return 1
fi
}
# Runs vpx_temporal_svc_encoder using the codec specified by $1 and output file
# name by $2. Additional positional parameters are passed directly to
# vpx_temporal_svc_encoder.
vpx_tsvc_encoder() {
local encoder="${LIBVPX_BIN_PATH}/vpx_temporal_svc_encoder"
encoder="${encoder}${VPX_TEST_EXE_SUFFIX}"
local codec="$1"
local output_file_base="$2"
local output_file="${VPX_TEST_OUTPUT_DIR}/${output_file_base}"
local timebase_num="1"
local timebase_den="1000"
local speed="6"
local frame_drop_thresh="30"
shift 2
[ -x "${encoder}" ] || return 1
eval "${encoder}" "${YUV_RAW_INPUT}" "${output_file}" "${codec}" \
"${YUV_RAW_INPUT_WIDTH}" "${YUV_RAW_INPUT_HEIGHT}" \
"${timebase_num}" "${timebase_den}" "${speed}" "${frame_drop_thresh}" \
"$@" \
${devnull}
}
# Confirms that all expected output files exist given the output file name
# passed to vpx_temporal_svc_encoder.
# The file name passed to vpx_temporal_svc_encoder is joined with the stream
# number and the extension .ivf to produce per stream output files. Here $1 is
# file name, and $2 is expected number of files.
files_exist() {
local file_name="${VPX_TEST_OUTPUT_DIR}/$1"
local num_files="$(($2 - 1))"
for stream_num in $(seq 0 ${num_files}); do
[ -e "${file_name}_${stream_num}.ivf" ] || return 1
done
}
# Run vpx_temporal_svc_encoder in all supported modes for vp8 and vp9.
vpx_tsvc_encoder_vp8_mode_0() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 0 200 || return 1
# Mode 0 produces 1 stream
files_exist "${FUNCNAME}" 1 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_1() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 1 200 400 || return 1
# Mode 1 produces 2 streams
files_exist "${FUNCNAME}" 2 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_2() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 2 200 400 || return 1
# Mode 2 produces 2 streams
files_exist "${FUNCNAME}" 2 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_3() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 3 200 400 600 || return 1
# Mode 3 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_4() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 4 200 400 600 || return 1
# Mode 4 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_5() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 5 200 400 600 || return 1
# Mode 5 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_6() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 6 200 400 600 || return 1
# Mode 6 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_7() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 7 200 400 600 800 1000 || return 1
# Mode 7 produces 5 streams
files_exist "${FUNCNAME}" 5 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_8() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 8 200 400 || return 1
# Mode 8 produces 2 streams
files_exist "${FUNCNAME}" 2 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_9() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 9 200 400 600 || return 1
# Mode 9 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_10() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 10 200 400 600 || return 1
# Mode 10 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp8_mode_11() {
if [ "$(vp8_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp8 "${FUNCNAME}" 11 200 400 600 || return 1
# Mode 11 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_0() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 0 200 || return 1
# Mode 0 produces 1 stream
files_exist "${FUNCNAME}" 1 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_1() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 1 200 400 || return 1
# Mode 1 produces 2 streams
files_exist "${FUNCNAME}" 2 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_2() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 2 200 400 || return 1
# Mode 2 produces 2 streams
files_exist "${FUNCNAME}" 2 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_3() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 3 200 400 600 || return 1
# Mode 3 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_4() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 4 200 400 600 || return 1
# Mode 4 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_5() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 5 200 400 600 || return 1
# Mode 5 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_6() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 6 200 400 600 || return 1
# Mode 6 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_7() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 7 200 400 600 800 1000 || return 1
# Mode 7 produces 5 streams
files_exist "${FUNCNAME}" 5 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_8() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 8 200 400 || return 1
# Mode 8 produces 2 streams
files_exist "${FUNCNAME}" 2 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_9() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 9 200 400 600 || return 1
# Mode 9 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_10() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 10 200 400 600 || return 1
# Mode 10 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_vp9_mode_11() {
if [ "$(vp9_encode_available)" = "yes" ]; then
vpx_tsvc_encoder vp9 "${FUNCNAME}" 11 200 400 600 || return 1
# Mode 11 produces 3 streams
files_exist "${FUNCNAME}" 3 || return 1
fi
}
vpx_tsvc_encoder_tests="vpx_tsvc_encoder_vp8_mode_0
vpx_tsvc_encoder_vp8_mode_1
vpx_tsvc_encoder_vp8_mode_2
vpx_tsvc_encoder_vp8_mode_3
vpx_tsvc_encoder_vp8_mode_4
vpx_tsvc_encoder_vp8_mode_5
vpx_tsvc_encoder_vp8_mode_6
vpx_tsvc_encoder_vp8_mode_7
vpx_tsvc_encoder_vp8_mode_8
vpx_tsvc_encoder_vp8_mode_9
vpx_tsvc_encoder_vp8_mode_10
vpx_tsvc_encoder_vp8_mode_11
vpx_tsvc_encoder_vp9_mode_0
vpx_tsvc_encoder_vp9_mode_1
vpx_tsvc_encoder_vp9_mode_2
vpx_tsvc_encoder_vp9_mode_3
vpx_tsvc_encoder_vp9_mode_4
vpx_tsvc_encoder_vp9_mode_5
vpx_tsvc_encoder_vp9_mode_6
vpx_tsvc_encoder_vp9_mode_7
vpx_tsvc_encoder_vp9_mode_8
vpx_tsvc_encoder_vp9_mode_9
vpx_tsvc_encoder_vp9_mode_10
vpx_tsvc_encoder_vp9_mode_11"
run_tests vpx_tsvc_encoder_verify_environment "${vpx_tsvc_encoder_tests}"
|
<filename>src/__init__.py
from .utils.metric_logger import MetricLogger, SmoothedValue |
import { observer } from "mobx-react";
import React from "react";
import { injectFromApp } from "../../App";
import { IPicturesStore } from "../../stores/PicturesStore";
import { PicturesListPictureComponent } from "./PicturesListPictureComponent";
export interface IPicturesListComponentProps {
picturesStore?: IPicturesStore
}
@injectFromApp<IPicturesListComponentProps>(stores => ({
picturesStore: stores.picturesStore
}))
@observer
export class PicturesListComponent extends React.Component<IPicturesListComponentProps> {
render() {
const { pictureIds } = this.props.picturesStore!;
return <div className="pure-g">
{
pictureIds.map(pictureId => <PicturesListPictureComponent pictureId={ pictureId } />)
}
</div>;
}
} |
#!/usr/bin/env bash
DOCKER_IMAGE="neptunes5thmoon/gunpowder:v0.3-pre6-dask1"
export CONTAINER_NAME=$(basename $PWD-prediction-18)
export USER_ID=${UID}
GUNPOWDER_PATH=$(readlink -f $HOME/Projects/mygunpowder/gunpowder)
SIMPLEFERENCE_PATH=$(readlink -f $HOME/Projects/simpleference)
PRED_PATH=$(readlink -f $HOME/Projects/simpleference/experiments/fafb)
Z_PATH=$(readlink -f $HOME/../papec/Work/my_projects/z5/bld27/python)
nvidia-docker rm -f $NAME
nvidia-docker \
run --rm \
--cgroup-parent=$(cat /proc/self/cpuset) \
--name ${CONTAINER_NAME} \
-u `id -u $USER`:`id -g $USER` \
-v $(pwd):/workspace \
-v /groups/saalfeld:/groups/saalfeld \
-v /nrs/saalfeld/:/nrs/saalfeld \
-w /workspace \
${DOCKER_IMAGE} \
/bin/bash -c "export CUDA_VISIBLE_DEVICES=2; export TF_CUDNN_WORKSPACE_LIMIT_IN_MB=2000; export PYTHONPATH=${GUNPOWDER_PATH}:${SIMPLEFERENCE_PATH}:${Z_PATH}:\$PYTHONPATH; python -u ${PRED_PATH}/run_inference.py 18"
wait
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_send_to_mobile = void 0;
var ic_send_to_mobile = {
"viewBox": "0 0 24 24",
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M17,17h2v4c0,1.1-0.9,2-2,2H7c-1.1,0-2-0.9-2-2V3c0-1.1,0.9-1.99,2-1.99L17,1c1.1,0,2,0.9,2,2v4h-2V6H7v12h10V17z M22,12 l-4-4v3h-5v2h5v3L22,12z"
},
"children": []
}]
};
exports.ic_send_to_mobile = ic_send_to_mobile; |
import { MangaEntity } from 'src/manga/entities/manga.entity';
import { UserEntity } from 'src/user/entities/user.entity';
import {
Entity,
Column,
PrimaryGeneratedColumn,
CreateDateColumn,
UpdateDateColumn,
ManyToOne,
} from 'typeorm';
@Entity({ name: 'comment-mangas' })
export class CommentMangaEntity {
@PrimaryGeneratedColumn()
id: number;
@Column({ length: 500 })
commentText: string;
@Column()
spoiler: boolean;
@Column({ default: 0 })
countLikes: number;
@ManyToOne(() => UserEntity, { eager: true })
user: UserEntity;
@ManyToOne(() => MangaEntity, { eager: true })
manga: MangaEntity;
@CreateDateColumn({ type: 'timestamp' })
createdAt: Date;
@UpdateDateColumn({ type: 'timestamp' })
updatedAt: Date;
}
|
#!/bin/bash
#
#/**
# * Copyright The Apache Software Foundation
# *
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
set -e -u -o pipefail
SCRIPT_NAME=${0##*/}
SCRIPT_DIR=$(cd `dirname $0` && pwd )
print_usage() {
cat >&2 <<EOT
Usage: $SCRIPT_NAME <options>
Options:
--kill
Kill local process-based HBase cluster using pid files.
--show
Show HBase processes running on this machine
EOT
exit 1
}
show_processes() {
ps -ef | grep -P "(HRegionServer|HMaster|HQuorumPeer) start" | grep -v grep
}
cmd_specified() {
if [ "$CMD_SPECIFIED" ]; then
echo "Only one command can be specified" >&2
exit 1
fi
CMD_SPECIFIED=1
}
list_pid_files() {
LOCAL_CLUSTER_DIR=$SCRIPT_DIR/../../target/local_cluster
LOCAL_CLUSTER_DIR=$( cd $LOCAL_CLUSTER_DIR && pwd )
find $LOCAL_CLUSTER_DIR -name "*.pid"
}
if [ $# -eq 0 ]; then
print_usage
fi
IS_KILL=""
IS_SHOW=""
CMD_SPECIFIED=""
while [ $# -ne 0 ]; do
case "$1" in
-h|--help)
print_usage ;;
--kill)
IS_KILL=1
cmd_specified ;;
--show)
IS_SHOW=1
cmd_specified ;;
*)
echo "Invalid option: $1" >&2
exit 1
esac
shift
done
if [ "$IS_KILL" ]; then
list_pid_files | \
while read F; do
PID=`cat $F`
echo "Killing pid $PID from file $F"
# Kill may fail but that's OK, so turn off error handling for a moment.
set +e
kill -9 $PID
set -e
done
elif [ "$IS_SHOW" ]; then
PIDS=""
for F in `list_pid_files`; do
PID=`cat $F`
if [ -n "$PID" ]; then
if [ -n "$PIDS" ]; then
PIDS="$PIDS,"
fi
PIDS="$PIDS$PID"
fi
done
ps -p $PIDS
else
echo "No command specified" >&2
exit 1
fi
|
#!/bin/bash
# Copyright 2019 Proyectos y Sistemas de Mantenimiento SL (eProsima).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
print_usage()
{
echo "------------------------------------------------------------------------"
echo "Scritp to remove old results from a performance test dabatase."
echo "------------------------------------------------------------------------"
echo "REQUIRED ARGUMENTS:"
echo " -r [directory] The directory for the experiments' results"
echo ""
echo "OPTIONAL ARGUMENTS:"
echo " -h Print help"
echo " -n [number] The number of old builds to keep [Defaults: 10]"
echo " -s [directories] Colon-separated list of directory results to keep no matter how old"
echo ""
echo "EXAMPLE: bash remove_old_executions.bash \\"
echo " -r <path_to_results> \\"
echo " -n 5 \\"
echo " -s <path_1>:<path_2>"
echo ""
echo "Note 1: If a directory specified with '-s' is older than the '-n' newer ones, then only"
echo " '-n'-1 newer directories are kept. This means that if '-n' is equal to the"
echo " number of directories in '-s', and those are all older than the '-n' newer ones,"
echo " then only the directories in '-s' are kept."
echo ""
echo "Note 2: If the number of directories specified with '-s' is larger than the one specified"
echo " with -n, then no directories are deleted."
exit ${1}
}
parse_options()
{
if (($# == 0))
then
print_usage 1
fi
RUN_DIR=$(pwd)
EXPERIMENTS_RESULTS_DIR=""
MUST_KEEP=10
SKIP=""
while getopts ':r:n:s:h' flag
do
case "${flag}" in
# Mandatory args
r ) EXPERIMENTS_RESULTS_DIR=${OPTARG};;
# Optional args
h ) print_usage 0;;
n ) MUST_KEEP=${OPTARG};;
s ) SKIP=${OPTARG};;
# Wrong args
\?) echo "Unknown option: -$OPTARG" >&2; print_usage 1;;
: ) echo "Missing option argument for -$OPTARG" >&2; print_usage 1;;
* ) echo "Unimplemented option: -$OPTARG" >&2; print_usage 1;;
esac
done
IFS=':' read -r -a SKIP <<< "${SKIP}"
for DIRECTORY in ${SKIP[@]}
do
if [[ ! -d ${DIRECTORY} ]]
then
echo "Path ${DIRECTORY} does not specify a directory"
print_usage 1
fi
done
if [[ ${EXPERIMENTS_RESULTS_DIR} == "" ]]
then
echo "No experiments results directory provided"
print_usage 1
fi
if [ "${MUST_KEEP}" -lt "0" ]
then
echo "-n must specify a positive number"
print_usage 1
fi
}
full_path ()
{
local CURRENT=$(pwd)
cd -P ${1}
local FULL_PATH=$(pwd)
echo ${FULL_PATH}
cd ${CURRENT}
}
main ()
{
parse_options ${@}
# Full path of EXPERIMENTS_RESULTS_DIR
EXPERIMENTS_RESULTS_DIR=$(full_path ${EXPERIMENTS_RESULTS_DIR})
# Add the skip list to the results to keep
KEPT_RESULTS=()
for ELEMENT in ${SKIP[@]}
do
IFS='/' read -r -a ELEMENT <<< "${ELEMENT}"
KEPT_RESULTS+=(${ELEMENT[-1]})
done
# Get all executions
EXECUTIONS=($(ls -tr ${EXPERIMENTS_RESULTS_DIR}))
# If enough executions in list, then check if need to delete some
if [[ ${#EXECUTIONS[@]} -gt ${#KEPT_RESULTS[@]} ]]
then
# Calculate how many builds to keep
MUST_KEEP=$((${MUST_KEEP}-${#KEPT_RESULTS[@]}))
# Check if can keep more than the skip list
if [ ${MUST_KEEP} -gt 0 ]
then
# Skip directories in ${SKIP} when listing
SKIP_ARGS=""
for ELEMENT in ${SKIP[@]}
do
IFS='/' read -r -a ELEMENT <<< "${ELEMENT}"
SKIP_ARGS="${SKIP_ARGS} -I ${ELEMENT[-1]}"
done
# Get a list of executions without the skipped ones
FILTERED_BUILD=($(ls -tr ${EXPERIMENTS_RESULTS_DIR} ${SKIP_ARGS}))
# Keep the newer ones until keep limit is reached
for (( i=${#FILTERED_BUILD[@]}; i>=0; i-- ))
do
KEPT_RESULTS+=(${FILTERED_BUILD[${i}]})
MUST_KEEP=$((${MUST_KEEP}-1))
if [ ${MUST_KEEP} -lt 0 ]
then
break
fi
done
fi
# Add to REMOVED list the executions that are not to be kept
REMOVED=()
for EXECUTION in ${EXECUTIONS[@]}
do
# Check if EXECUTION is in the KEPT_RESULTS list
KEEP_EXECUTION="FALSE"
for KEPT in ${KEPT_RESULTS[@]}
do
if [ "${EXECUTION}" == "${KEPT}" ]
then
# EXECUTION is in the KEPT_RESULTS list
KEEP_EXECUTION="TRUE"
break
fi
done
# ADD EXECUTION to REMOVED if it is not in the KEPT_RESULTS list
if [[ ${KEEP_EXECUTION} == "FALSE" ]]
then
REMOVED+=(${EXECUTION})
fi
done
# Pretty output for user
echo "${#EXECUTIONS[@]} executions detected in database. Keeping ${#KEPT_RESULTS[@]}, removing ${#REMOVED[@]}"
echo "-------------------------------------------------------------------"
for EXECUTION in ${KEPT_RESULTS[@]}
do
echo "Keeping ${EXECUTION}"
done
echo "-------------------------------------------------------------------"
for EXECUTION in ${REMOVED[@]}
do
echo "Removing ${EXECUTION}"
# Actual removal
rm -rf ${EXPERIMENTS_RESULTS_DIR}/${EXECUTION}
done
fi
}
main ${@}
|
<filename>jhiRoot/plantsMS/src/main/java/fr/syncrase/ecosyst/service/impl/RessemblanceServiceImpl.java
package fr.syncrase.ecosyst.service.impl;
import fr.syncrase.ecosyst.domain.Ressemblance;
import fr.syncrase.ecosyst.repository.RessemblanceRepository;
import fr.syncrase.ecosyst.service.RessemblanceService;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* Service Implementation for managing {@link Ressemblance}.
*/
@Service
@Transactional
public class RessemblanceServiceImpl implements RessemblanceService {
private final Logger log = LoggerFactory.getLogger(RessemblanceServiceImpl.class);
private final RessemblanceRepository ressemblanceRepository;
public RessemblanceServiceImpl(RessemblanceRepository ressemblanceRepository) {
this.ressemblanceRepository = ressemblanceRepository;
}
@Override
public Ressemblance save(Ressemblance ressemblance) {
log.debug("Request to save Ressemblance : {}", ressemblance);
return ressemblanceRepository.save(ressemblance);
}
@Override
public Optional<Ressemblance> partialUpdate(Ressemblance ressemblance) {
log.debug("Request to partially update Ressemblance : {}", ressemblance);
return ressemblanceRepository
.findById(ressemblance.getId())
.map(existingRessemblance -> {
if (ressemblance.getDescription() != null) {
existingRessemblance.setDescription(ressemblance.getDescription());
}
return existingRessemblance;
})
.map(ressemblanceRepository::save);
}
@Override
@Transactional(readOnly = true)
public Page<Ressemblance> findAll(Pageable pageable) {
log.debug("Request to get all Ressemblances");
return ressemblanceRepository.findAll(pageable);
}
@Override
@Transactional(readOnly = true)
public Optional<Ressemblance> findOne(Long id) {
log.debug("Request to get Ressemblance : {}", id);
return ressemblanceRepository.findById(id);
}
@Override
public void delete(Long id) {
log.debug("Request to delete Ressemblance : {}", id);
ressemblanceRepository.deleteById(id);
}
}
|
#!/bin/bash
# Switch Spotify to the next track in one click.
#
# by Aleksei Sotnikov (asotnikov.100@gmail.com)
# metadata
# <bitbar.title>Spotify "Next track" button</bitbar.title>
# <bitbar.version>v1.0</bitbar.version>
# <bitbar.author>Aleksei Sotnikov</bitbar.author>
# <bitbar.author.github>alekseysotnikov</bitbar.author.github>
# <bitbar.desc>Switch Spotify to the next track in one click. Or you can easily reconfigure it for switching to the previous track.</bitbar.desc>
# <bitbar.image>https://i.imgur.com/523Eszv.png</bitbar.image>
case "$1" in
'previous track' | 'next track')
osascript -e "tell application \"Spotify\" to $1"
exit
esac
echo "▶▶ | bash='$0' param1='next track' terminal=false refresh=false"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.