text
stringlengths 1
1.05M
|
|---|
import React from 'react';
import { Table, TableHeader, TableBody, TableProps } from '@patternfly/react-table';
import {
Bullseye,
EmptyState,
EmptyStateVariant,
EmptyStateIcon,
Title,
EmptyStateBody,
Button
} from '@patternfly/react-core';
import SearchIcon from '@patternfly/react-icons/dist/esm/icons/search-icon';
// This example has been simplified to focus on the empty state. In real usage,
// you may want to derive your rows from typed underlying data and minimal state. See other examples.
export const LegacyTableEmptyState: React.FunctionComponent = () => {
const columns: TableProps['cells'] = ['Repositories', 'Branches', 'Pull requests', 'Workspaces', 'Last commit'];
const rows: TableProps['rows'] = [
{
heightAuto: true,
cells: [
{
props: { colSpan: 8 },
title: (
<Bullseye>
<EmptyState variant={EmptyStateVariant.small}>
<EmptyStateIcon icon={SearchIcon} />
<Title headingLevel="h2" size="lg">
No results found
</Title>
<EmptyStateBody>Clear all filters and try again.</EmptyStateBody>
<Button variant="link">Clear all filters</Button>
</EmptyState>
</Bullseye>
)
}
]
}
];
return (
<Table aria-label="Table text with modifiers" cells={columns} rows={rows}>
<TableHeader />
<TableBody />
</Table>
);
};
|
import React from 'react';
import PropTypes from 'prop-types';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faSpinner } from '@fortawesome/free-solid-svg-icons';
const propTypes = {
size: PropTypes.string,
};
const Loading = ({ size } = {}) => <FontAwesomeIcon icon={faSpinner} size={size} spin />;
Loading.propTypes = propTypes;
export default Loading;
|
import { withStyles, TableRow } from '@material-ui/core'
const TableRowMobile = withStyles({
'@media (max-width: 500px)': {
root: {
display: 'grid',
gridTemplate: 'auto / 1fr 2fr',
}
}
})(TableRow)
export default TableRowMobile
|
<filename>tools/src/test/scala/com/threerings/nexus/gencode/FactoryGenTest.scala
//
// Nexus Tools - code generators for Nexus framework
// http://github.com/threerings/nexus/blob/master/LICENSE
package com.threerings.nexus.gencode
import javax.annotation.processing.SupportedAnnotationTypes
import javax.lang.model.element.TypeElement
import org.junit.Assert._
import org.junit.Test
/**
* Tests the factory generator.
*/
class FactoryGenTest {
@Test def testFactoryName {
assertEquals("Factory_Foo", Generator.factoryName("Foo"))
assertEquals("foo.bar.Factory_Foo", Generator.factoryName("foo.bar.Foo"))
}
@Test def testService {
val source = FactoryTestCompiler.genSource("TestService.java", """
package foo.bar;
public interface TestService extends com.threerings.nexus.distrib.NexusService {
react.RFuture<Integer> addOne (int value);
void launchMissiles ();
void sendData (byte[] data);
}
""")
// System.err.println(source)
}
@Test def testMeta {
val meta = FactoryTestCompiler.genMeta("TestService.java", """
public interface TestService extends com.threerings.nexus.distrib.NexusService {
react.RFuture<Integer> addOne (int value);
void launchMissiles ();
}
""")
assertEquals(meta.serviceName, "TestService")
assertEquals(2, meta.methods.size)
checkMethod(meta.methods.get(0), "addOne", 1)
checkArg(meta.methods.get(0).args.get(0), "int", "Integer")
checkMethod(meta.methods.get(1), "launchMissiles", 0)
}
private def checkMethod (m :ServiceMetadata.Method, name :String, args :Int) {
assertEquals(name, m.name)
assertEquals(args, m.args.size)
}
private def checkArg (arg :ServiceMetadata.Arg, `type` :String, boxedType :String) {
assertEquals(`type`, arg.`type`)
assertEquals(boxedType, arg.boxedType)
}
}
object FactoryTestCompiler extends TestCompiler {
def genSource (filename :String, content :String) :String =
process(new GenSourceProcessor, filename, content)
@SupportedAnnotationTypes(Array("*"))
class GenSourceProcessor extends TestProcessor[String] {
override def result = _source
override protected def generate (elem :TypeElement, metas :Seq[Metadata]) {
val out = new java.io.StringWriter
Generator.generateFactory(elem, metas.collect { case sm :ServiceMetadata => sm }, out)
_source = out.toString
}
protected var _source = ""
}
def genMeta (filename :String, content :String) :ServiceMetadata =
process(new GenMetaProcessor, filename, content)
@SupportedAnnotationTypes(Array("*"))
class GenMetaProcessor extends TestProcessor[ServiceMetadata] {
override def result = _meta
override protected def generate (elem :TypeElement, metas :Seq[Metadata]) {
(metas.collect { case sm :ServiceMetadata => sm }).map { m => _meta = m }
}
protected var _meta :ServiceMetadata = _
}
override protected def stockObjects = List(nexusServiceObj, rFutureObj)
private def nexusServiceObj = mkTestObject("NexusService.java", """
package com.threerings.nexus.distrib;
public interface NexusService {}
""")
private def rFutureObj = mkTestObject("RFuture.java", """
package react;
public interface RFuture<T> {}
""")
}
|
#!/bin/bash
# Copyright (c) 2013 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
if [ -d "$1" ]; then
cd "$1" || exit 1
else
echo "Usage: $0 <datadir>" >&2
echo "Removes obsolete Unit database files" >&2
exit 1
fi
LEVEL=0
if [ -f wallet.dat -a -f addr.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=1; fi
if [ -f wallet.dat -a -f peers.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=2; fi
if [ -f wallet.dat -a -f peers.dat -a -f coins/CURRENT -a -f blktree/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=3; fi
if [ -f wallet.dat -a -f peers.dat -a -f chainstate/CURRENT -a -f blocks/index/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=4; fi
case $LEVEL in
0)
echo "Error: no Unit datadir detected."
exit 1
;;
1)
echo "Detected old Unit datadir (before 0.7)."
echo "Nothing to do."
exit 0
;;
2)
echo "Detected Unit 0.7 datadir."
;;
3)
echo "Detected Unit pre-0.8 datadir."
;;
4)
echo "Detected Unit 0.8 datadir."
;;
esac
FILES=""
DIRS=""
if [ $LEVEL -ge 3 ]; then FILES=$(echo $FILES blk????.dat blkindex.dat); fi
if [ $LEVEL -ge 2 ]; then FILES=$(echo $FILES addr.dat); fi
if [ $LEVEL -ge 4 ]; then DIRS=$(echo $DIRS coins blktree); fi
for FILE in $FILES; do
if [ -f $FILE ]; then
echo "Deleting: $FILE"
rm -f $FILE
fi
done
for DIR in $DIRS; do
if [ -d $DIR ]; then
echo "Deleting: $DIR/"
rm -rf $DIR
fi
done
echo "Done."
|
import string
import random
pwd_length = 8
alphabet = string.ascii_letters + string.digits + string.punctuation
pwd = ""
for i in range(pwd_length):
char = random.choice(alphabet)
if i % 4 == 0 and char in string.ascii_uppercase:
pwd += char
elif i % 4 == 1 and char in string.ascii_lowercase:
pwd += char
elif i % 4 == 2 and char in string.digits:
pwd += char
elif i % 4 == 3 and char in string.punctuation:
pwd += char
else:
pwd += ""
print("Random generated password: ", pwd)
|
<reponame>mineMineGo/Javascript
export function h(type, props, children) {
return {
type,
props,
children
}
}
|
#!/bin/bash
# Installation script for mobileinsight-core on macOS
# It installs package under /usr/local folder
# Author : Zengwen Yuan
# Date : 2018-04-12
# Version : 3.1
# set -e
# set -u
echo "** Installer Script for mobileinsight-core on macOS **"
echo " "
echo " Author : Zengwen Yuan (zyuan [at] cs.ucla.edu)"
echo " Date : 2018-04-12"
echo " Rev : 3.1"
echo " Usage : ./install-macos.sh"
echo " "
echo "Upgrading MobileInsight..."
yes | ./uninstall.sh
# Wireshark version to install
ws_ver=2.0.13
# Use local library path
PREFIX=/usr/local
MOBILEINSIGHT_PATH=$(pwd)
WIRESHARK_SRC_PATH=${MOBILEINSIGHT_PATH}/wireshark-${ws_ver}
PYTHON=python2
PIP=pip2
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
NC='\033[0m' # No Color
# Install up-to-date Homebrew and use default prefixes
echo -e "${GREEN}[INFO]${NC} Checking if you have Homebrew installed..."
if [[ $(which -s brew) ]] ; then
echo -e "${YELLOW}[WARNING]${NC} It appears that Homebrew is not installed on your computer, installing..."
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
else
echo "Updating Homebrew"
brew update
fi
echo -e "${GREEN}[INFO]${NC} Consulting Homebrew doctor for potential issues..."
echo -e "Output from Homebrew doctor:\n"
brew doctor
if [[ $? != 0 ]] ; then
echo -e "${YELLOW}[WARNING]${NC} MobileInsight relies on correctly configured Python environment by Homebrew (not the Python that comes with macOS)."
echo "It appears that Homebrew reported some issues in your environment. Please carefully review and fix them if necessary before proceeding."
echo "If the Python environment is not correctly configured, MobileInsight may not work properly."
echo "If you feel that the warning reported by Homebrew is not related, you may ignore it."
echo ""
read -r -p "Are you sure that you want to ignore the warnings reported by Homebrew and continue? [y/N] " response
case "$response" in
[yY][eE][sS]|[yY])
echo "Proceed to MobileInsight installation ignoring Homebrew reported warning."
;;
*)
echo "MobileInsight installation canceled due to Homebrew reported warning. Please fix those issues and run MobileInsight installation again."
echo "Exiting with status 2."
exit 2
;;
esac
else
echo "All clear."
fi
echo -e "${GREEN}[INFO]${NC} Checking Python 2.7 environment..."
if [[ $(brew ls --versions ${PYTHON}) ]] ; then
echo "Python 2.7 is installed by Homebrew and ready."
else
echo -e "${YELLOW}[WARNING]${NC} It appears that you do not have a Python 2.7 version installed via Homebrew."
echo "Installing formulae python@2 via Homebrew."
brew install python@2
fi
if [[ $(which -s ${PIP}) ]] ; then
echo -e "${YELLOW}[WARNING]${NC} It appears that pip2 is not installed on your computer, installing it..."
brew install python
fi
# Check if Wireshark is installed
brew_ws_ver=$(brew ls --versions wireshark)
if [[ $? != 0 ]]; then
echo -e "${YELLOW}[WARNING]${NC} You don't have a Wireshark installed by Homebrew, installing..."
# Wireshark is not installed, install dependencies
brew install pkg-config cmake
brew install glib gnutls libgcrypt dbus
brew install geoip c-ares
# Install Wireshark stable version 2.0.x using our own formulae
brew install ./wireshark.rb
elif [[ $brew_ws_ver == *${ws_ver}* ]]; then
echo -e "${GREEN}[INFO]${NC} You have a Wireshark version ${ws_ver} installed, continuing..."
else
echo -e "${YELLOW}[WARNING]${NC} You have a Wireshark other than current version installed."
echo "Installing Wireshark version ${ws_ver}..."
brew remove wireshark
brew install ./wireshark.rb
brew link --overwrite wireshark
fi
echo -e "${GREEN}[INFO]${NC} Installing dependencies for compiling Wireshark libraries"
brew install wget gettext libffi
echo -e "${GREEN}[INFO]${NC} Checking Wireshark sources to compile ws_dissector"
if [ ! -d "${WIRESHARK_SRC_PATH}" ]; then
echo -e "${GREEN}[INFO]${NC} You do not have source codes for Wireshark version ${ws_ver}, downloading..."
wget https://www.wireshark.org/download/src/all-versions/wireshark-${ws_ver}.tar.bz2
tar xf wireshark-${ws_ver}.tar.bz2
rm wireshark-${ws_ver}.tar.bz2
fi
echo -e "${GREEN}[INFO]${NC} Configuring Wireshark sources for ws_dissector compilation..."
cd ${WIRESHARK_SRC_PATH}
./configure --disable-wireshark > /dev/null 2>&1
if [[ $? != 0 ]]; then
echo -e "${YELLOW}[WARNING]${NC} Error when executing '${WIRESHARK_SRC_PATH}/configure --disable-wireshark'."
echo "You need to manually fix it before continuation. Exiting with status 3."
exit 3
fi
echo -e "${GREEN}[INFO]${NC} Compiling Wireshark dissector for MobileInsight..."
cd ${MOBILEINSIGHT_PATH}/ws_dissector
if [[ -f "ws_dissector" ]]; then
rm ws_dissector
fi
g++ ws_dissector.cpp packet-aww.cpp -o ws_dissector `pkg-config --libs --cflags glib-2.0` \
-I"${WIRESHARK_SRC_PATH}" -L"${PREFIX}/lib" -lwireshark -lwsutil -lwiretap
strip ws_dissector
echo -e "${GREEN}[INFO]${NC} Installing Wireshark dissector to ${PREFIX}/bin"
if [[ $(cp ws_dissector ${PREFIX}/bin) ]] ; then
chmod 755 ${PREFIX}/bin/ws_dissector
else
sudo mkdir -p ${PREFIX}/bin
sudo cp ws_dissector ${PREFIX}/bin
sudo chmod 755 ${PREFIX}/bin/ws_dissector
fi
echo -e "${GREEN}[INFO]${NC} Installing mobileinsight-core..."
cd ${MOBILEINSIGHT_PATH}
if [[ $(${PYTHON} setup.py install) ]] ; then
echo "Congratulations! mobileinsight-core is successfully installed!"
else
echo "Installing mobileinsight-core using sudo, your password may be required..."
sudo ${PYTHON} setup.py install
fi
echo -e "${GREEN}[INFO]${NC} Installing GUI for MobileInsight..."
cd ${MOBILEINSIGHT_PATH}
if [[ $(mkdir -p ${PREFIX}/share/mobileinsight/) ]] ; then
cp -r gui/* ${PREFIX}/share/mobileinsight/
ln -s ${PREFIX}/share/mobileinsight/mi-gui ${PREFIX}/bin/mi-gui
else
sudo mkdir -p ${PREFIX}/share/mobileinsight/
sudo cp -r gui/* ${PREFIX}/share/mobileinsight/
sudo ln -s ${PREFIX}/share/mobileinsight/mi-gui ${PREFIX}/bin/mi-gui
fi
echo -e "${GREEN}[INFO]${NC} Installing dependencies for MobileInsight GUI..."
echo -e "${GREEN}[INFO]${NC} Installing wxPython..."
brew install wxpython
if [[ ! -f ${PREFIX}/bin/${PYTHON} ]] ; then
echo -e "${YELLOW}[WARNING]${NC} Cannot find python installed by Homebrew under /usr/local"
echo "Your Homebrew prefix is:"
echo $(brew config | grep -i prefix)
echo -e "${YELLOW}[WARNING]${NC} The MobileInsight GUI will likely not functioning if you do not use the default /usr/local prefix."
fi
if [[ $(${PIP} install pyserial matplotlib) ]] ; then
echo "PySerial and matplotlib are successfully installed!"
else
echo "Installing PySerial and matplotlib using sudo, your password may be required..."
sudo ${PIP} install pyserial matplotlib
fi
echo -e "${GREEN}[INFO]${NC} Testing the MobileInsight offline analysis example."
cd ${MOBILEINSIGHT_PATH}/examples
${PYTHON} offline-analysis-example.py
if [[ $? == 0 ]] ; then
echo "Successfully ran the offline analysis example!"
else
echo -e "${YELLOW}[WARNING]${NC} Failed to run offline analysis example!"
echo "Exiting with status 4."
exit 4
fi
echo -e "${GREEN}[INFO]${NC} Testing MobileInsight GUI (you need to be in a graphic session)..."
mi-gui
if [[ $? == 0 ]] ; then
echo "Successfully ran MobileInsight GUI!"
echo "The installation of mobileinsight-core is finished!"
else
echo "There are issues running MobileInsight GUI, you need to fix them manually"
echo "The installation of mobileinsight-core is finished!"
fi
|
<reponame>mkralik3/syndesis-qe
package io.syndesis.qe.steps.integrations.importt;
import io.syndesis.qe.TestConfiguration;
import io.syndesis.qe.pages.integrations.Integrations;
import io.syndesis.qe.pages.integrations.importt.ImportIntegration;
import io.syndesis.qe.utils.TestUtils;
import java.io.File;
import io.cucumber.java.en.When;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ImportIntegrationSteps {
private Integrations integrations = new Integrations();
private ImportIntegration importIntegrationPage = new ImportIntegration();
@When("^import integration \"([^\"]*)\"$")
public void importIntegration(String integrationName) {
importIntegrationPage.importIntegration(integrationName);
TestUtils.sleepForJenkinsDelayIfHigher(3);
}
@When("^drag exported integration \"([^\"]*)\" file to drag and drop area$")
public void importIntegrationViaDragAndDrop(String integrationName) {
importIntegrationPage.importIntegrationViaDragAndDrop(integrationName);
}
@When("^import integration from relative file path \"([^\"]*)\"$")
public void importIntegrationFromFile(String stringPathToFile) {
importIntegrationPage.importIntegration(new File(stringPathToFile));
//give jenkins more time so the integration shows up in the list
TestUtils.sleepIgnoreInterrupt(TestConfiguration.getJenkinsDelay());
}
}
|
#!/bin/sh
set -eu
>&2 echo "Automated deploy currently disabled"
>&2 echo "Would have deployed $(cat .dockertag) if enabled"
exit
tag=$(cat .dockertag)
echo "Running remote SSH-script"
ssh -o StrictHostKeyChecking=no core@coreos-1.foreningenbs.no /bin/bash << EOF
set -eu
cd /data/drift/services/users-api
./deploy.sh $tag
EOF
echo "Deploy finished"
|
package com.platform.cache;
import com.platform.dao.SysRegionDao;
import com.platform.entity.MyRegion;
import com.platform.entity.SysRegionEntity;
import com.platform.utils.SpringContextUtils;
import org.springframework.beans.factory.InitializingBean;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author lipengjun
* @email <EMAIL>
* @date 2017-11-04 11:19:31
*/
public class RegionCacheUtil implements InitializingBean {
public static List<SysRegionEntity> sysRegionEntityList;
public static List<MyRegion> myRegionList;
public static List<MyRegion> myRegionLevelList;
public static Map<Integer ,MyRegion > regionMap=new HashMap();
public static void init() {
SysRegionDao regionDao = SpringContextUtils.getBean(SysRegionDao.class);
myRegionList=new ArrayList();
myRegionLevelList=new ArrayList();
if (null != regionDao) {
sysRegionEntityList = regionDao.queryList(new HashMap<String, Object>());
for(SysRegionEntity sysRegionEntity:sysRegionEntityList) {
MyRegion myRegion=new MyRegion();
myRegion.setId(sysRegionEntity.getId());
myRegion.setName(sysRegionEntity.getName());
myRegion.setPid(sysRegionEntity.getParentId());
myRegion.setType(sysRegionEntity.getType());
myRegion.setChildList(new ArrayList());
MyRegion myRegion2=new MyRegion();
myRegion2.setId(sysRegionEntity.getId());
myRegion2.setName(sysRegionEntity.getName());
myRegion2.setPid(sysRegionEntity.getParentId());
myRegion2.setType(sysRegionEntity.getType());
myRegionList.add(myRegion2);
MyRegion pmyRegion=regionMap.get(sysRegionEntity.getParentId());
if(pmyRegion!=null) {
pmyRegion.getChildList().add(myRegion);
}else {
pmyRegion=new MyRegion();
pmyRegion.setId(sysRegionEntity.getParentId());
pmyRegion.setChildList(new ArrayList());
regionMap.put(pmyRegion.getId(), pmyRegion);
}
MyRegion tmpmyRegion=regionMap.get(sysRegionEntity.getId());
if(tmpmyRegion==null) {
regionMap.put(myRegion.getId(), myRegion);
}else {
myRegion.setChildList(tmpmyRegion.getChildList());
regionMap.put(myRegion.getId(), myRegion);
}
}
Collection<MyRegion> collection= regionMap.values();
for(MyRegion myRegion:collection) {
if(myRegion.getType()!=null&&myRegion.getType().equals(1)) {
myRegionLevelList.add(myRegion);
}
}
}
}
/**
* 获取所有国家
*
* @return
*/
public static List<SysRegionEntity> getAllCountry() {
List<SysRegionEntity> resultObj = new ArrayList<SysRegionEntity>();
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (areaVo.getType().equals(0)) {
resultObj.add(areaVo);
}
}
}
return resultObj;
}
/**
* 获取全部省份
*
* @return
*/
public static List<SysRegionEntity> getAllProvice() {
List<SysRegionEntity> resultObj = new ArrayList<SysRegionEntity>();
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (areaVo.getType().equals(1)) {
resultObj.add(areaVo);
}
}
}
return resultObj;
}
/**
* 获取全部所有
*
* @return
*/
public static List<MyRegion> getAll() {
return myRegionList;
}
/**
* 获取全部所有
*
* @return
*/
public static List<MyRegion> getAllLevel() {
return myRegionLevelList;
}
/**
* 获取所有城市
*
* @return
*/
public static List<SysRegionEntity> getAllCity() {
List<SysRegionEntity> resultObj = new ArrayList<SysRegionEntity>();
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (areaVo.getType().equals(2)) {
resultObj.add(areaVo);
}
}
}
return resultObj;
}
/**
* 根据国家获取全部省份
*
* @return
*/
public static List<SysRegionEntity> getAllProviceByParentId(Integer areaId) {
List<SysRegionEntity> resultObj = new ArrayList<SysRegionEntity>();
if (null == areaId) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (null != areaVo.getParentId() && areaVo.getType().equals(1) && areaId.equals(areaVo.getParentId())) {
resultObj.add(areaVo);
}
}
}
return resultObj;
}
/**
* 获取地市
*
* @return
*/
public static List<SysRegionEntity> getChildrenCity(Integer areaId) {
List<SysRegionEntity> resultObj = new ArrayList<SysRegionEntity>();
if (null == areaId) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (null != areaVo.getParentId() && areaVo.getType().equals(2) && areaId.equals(areaVo.getParentId())) {
resultObj.add(areaVo);
}
}
}
return resultObj;
}
/**
* 获取地市
*
* @return
*/
public static List<SysRegionEntity> getChildrenCity(String proviceName) {
List<SysRegionEntity> resultObj = new ArrayList<SysRegionEntity>();
if (null == proviceName) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (null != areaVo.getParentId() && areaVo.getType().equals(2) && proviceName.equals(areaVo.getParentName())) {
resultObj.add(areaVo);
}
}
}
return resultObj;
}
/**
* 获取区县
*
* @return
*/
public static List<SysRegionEntity> getChildrenDistrict(Integer areaId) {
List<SysRegionEntity> resultObj = new ArrayList<SysRegionEntity>();
if (null == areaId) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (null != areaVo.getParentId() && areaVo.getType().equals(3) && areaId.equals(areaVo.getParentId())) {
resultObj.add(areaVo);
}
}
}
return resultObj;
}
/**
* 获取区县
*
* @return
*/
public static List<SysRegionEntity> getChildrenDistrict(String provinceName, String cityName) {
List<SysRegionEntity> resultObj = new ArrayList<SysRegionEntity>();
if (null == provinceName || null == cityName) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (null != areaVo.getParentId() && areaVo.getType().equals(3)
&& cityName.equals(areaVo.getParentName())
&& null != getAreaByAreaId(areaVo.getParentId())
&& provinceName.equals(getAreaByAreaId(areaVo.getParentId()).getParentName())) {
resultObj.add(areaVo);
}
}
}
return resultObj;
}
/**
* 获取区县
*
* @return
*/
public static List<SysRegionEntity> getChildrenByParentId(Integer parentId) {
List<SysRegionEntity> resultObj = new ArrayList<SysRegionEntity>();
if (null == parentId) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (null != areaVo.getParentId() && parentId.equals(areaVo.getParentId())) {
resultObj.add(areaVo);
}
}
}
return resultObj;
}
/**
* 获取区域名称
*
* @return
*/
public static String getAreaNameByAreaId(Integer areaId) {
if (null == areaId) {
return "";
}
String resultObj = areaId.toString();
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (areaVo.getId().equals(areaId)) {
resultObj = areaVo.getName();
break;
}
}
}
return resultObj;
}
/**
* 根据Id获取区域
*
* @return
*/
public static SysRegionEntity getAreaByAreaId(Integer areaId) {
SysRegionEntity resultObj = null;
if (null == areaId) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (areaVo.getId().equals(areaId)) {
resultObj = areaVo;
break;
}
}
}
return resultObj;
}
/**
* 根据Id获取区域
*
* @return
*/
public static Integer getProvinceIdByName(String areaName) {
Integer resultObj = null;
if (null == areaName) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (areaVo.getType() == 1 && areaVo.getName().equals(areaName)) {
resultObj = areaVo.getId();
break;
}
}
}
return resultObj;
}
/**
* 根据Id获取区域
*
* @return
*/
public static Integer getCityIdByName(Integer provinceId, String areaName) {
Integer resultObj = null;
if (null == areaName) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (areaVo.getType() == 2 && areaVo.getName().equals(areaName)
&& areaVo.getParentId().equals(provinceId)) {
resultObj = areaVo.getId();
break;
}
}
}
return resultObj;
}
/**
* 根据Id获取区域
*
* @return
*/
public static Integer getDistrictIdByName(Integer provinceId, Integer cityId, String areaName) {
Integer resultObj = null;
if (null == areaName) {
return resultObj;
}
if (null != sysRegionEntityList) {
for (SysRegionEntity areaVo : sysRegionEntityList) {
if (areaVo.getType() == 3 && areaVo.getName().equals(areaName)
&& areaVo.getParentId().equals(cityId)
&& null != getAreaByAreaId(areaVo.getParentId())
&& null != getAreaByAreaId(areaVo.getParentId()).getParentId()
&& getAreaByAreaId(areaVo.getParentId()).getParentId().equals(provinceId)) {
resultObj = areaVo.getId();
break;
}
}
}
return resultObj;
}
@Override
public void afterPropertiesSet() throws Exception {
init();
}
}
|
<reponame>Hannah-Abi/python-pro-21
import unittest
from unittest.mock import patch
from tmc import points, reflect
from tmc.utils import load, load_module, reload_module, get_stdout, check_source
from functools import reduce
import os
import os.path
import textwrap
from random import choice, randint
from datetime import date
exercise = 'src.shortest_in_room'
def f(attr: list):
return ",".join([str(x) for x in attr])
class ShortestInRoomTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected.")]):
cls.module = load_module(exercise, 'en')
@points('9.shortest_in_room_part1')
def test_0a_main_program_ok(self):
ok, line = check_source(self.module)
message = """The code for testing the functions should be placed inside
if __name__ == "__main__":
block. The following row should be moved:
"""
self.assertTrue(ok, message+line)
@points('9.shortest_in_room_part1')
def test_1_classes_exists(self):
try:
from src.shortest_in_room import Person
r = Person("Ken",150)
except Exception as e:
self.fail(f'Calling constructor Person("Ken",150) threw an error \n{e}\n' +
'Have you changed the implementation of Person class?')
try:
from src.shortest_in_room import Room
r = Room()
except Exception as e:
self.fail(f'Calling constructor Room() threw an error \n{e}\n' +
'Make sure, that class exists.')
@points('9.shortest_in_room_part1')
def test_2_room_methods_exists1(self):
from src.shortest_in_room import Room, Person
room = Room()
try:
room.add(Person("Ken", 150))
except Exception as e:
self.fail(f'Method call add(Person("Ken", 150)) threw an error\n{e}')
try:
val = room.is_empty()
taip = str(type(val)).replace("<class '","").replace("'>","")
self.assertTrue(type(val) == bool, f'Method is_empty() should return a value, ' +
f'which is of type bool, now it returns a value {val}, which is of type {taip}.')
except Exception as e:
self.fail(f'Method call is_empty threw an error\n{e}')
try:
room.print_contents()
except Exception as e:
self.fail(f'Method call print_contents threw an error\n{e}\nwhen the room is empty')
@points('9.shortest_in_room_part1')
def test_3_add_person_print_contents(self):
from src.shortest_in_room import Room, Person
room = Room()
self.assertTrue(room.is_empty(), f'Method is_empty() should return True, when not a single person has been added in the room ' +
'Now it returned False.')
test_cases = [("Grace", 180), ("Jan", 175), ("Lisa", 150), ("Paul", 204), ("Jana", 171), ("Ruth", 149)]
persons = ""
tested = []
for test_case in test_cases:
tested.append(test_case)
room.add(Person(test_case[0], test_case[1]))
persons += f"\n{test_case[0]} ({test_case[1]} cm)"
self.assertFalse(room.is_empty(), f'"Method is_empty() should return False, when there have been added persons in the room ' +
f'- now it returned True.' +
f'\nwhen the following persons has been added in the room:\n{persons}\n')
room.print_contents()
output = get_stdout()
for t in tested:
self.assertTrue(t[0] in output and str(t[1]) in output, f'The following strings should be found out from the printout\n' +
f'{t[0]} and {t[1]}\nwhen the following persons has been added in the room:\n{persons}\n' +
f'now the print out is\n{output}')
@points('9.shortest_in_room_part2')
def test_4_method_shortest_exists(self):
from src.shortest_in_room import Room, Person
room = Room()
try:
val = room.shortest()
taip = str(type(val)).replace("<class '","").replace("'>","")
self.assertTrue(val is None, f'Method shortest should return a value None, ' +
f'when the room is empty, now it returns a value {val}, which is of type {taip}.')
except Exception as e:
self.fail('Method call shortest threw an error\n{e}\n' +
'when the room is empty.')
try:
room.add(Person("Ann",120))
room.add(Person("Tim",150))
val = room.shortest()
taip = str(type(val)).replace("<class '","").replace("'>","")
self.assertTrue("Person" in str(type(val)), f'Method shortest should return a value, ' +
f'which is of type Person, now it returns a value {val}, which is of type {taip}.' +
f'when the following persons has been added in the room:\n' +
'Person("Ann",120)\n' +
'Person("Tim",150)')
except Exception as e:
self.fail(f'Method call shortest threw an error\n{e}\n'+
f'when the following persons has been added in the room:\n' +
'Person("Ann",120)\n' +
'Person("Tim",150)')
@points('9.shortest_in_room_part2')
def test_5_test_shortest(self):
from src.shortest_in_room import Room, Person
room = Room()
test_cases = [("Grace", 180), ("Jan", 175), ("Lisa", 150), ("Paul", 204), ("Jana", 171), ("Ruth", 149)]
persons = ""
tested = []
for test_case in test_cases:
tested.append(test_case)
room.add(Person(test_case[0], test_case[1]))
persons += f"\n{test_case[0]} ({test_case[1]} cm)"
shortest = room.shortest()
taip = str(type(shortest)).replace("<class '","").replace("'>","")
self.assertTrue("Person" in str(type(shortest)), f'Method shortest should return a value, ' +
f'which is of type Person, now it returns a value {shortest}, which is of type {taip}.' +
f'when the following persons has been added in the room:\n{persons}')
try:
val = shortest.name
except:
self.fail(f'Method shortest() should return Person object\nNow it returns ' +
f'returns {shortest}, which is of type {type(shortest)} when the following persons has been added:{persons}.')
corr = min(tested, key = lambda x : x[1])[0]
self.assertEqual(val, corr, f'Method shortest() should return a person, whose name is {corr}. \nNow it ' +
f'returns a person, whose name is {val}, when the following persons has been added:{persons}.')
@points('9.shortest_in_room_part3')
def test_6_method_remove_shortest_exists(self):
from src.shortest_in_room import Room, Person
room = Room()
try:
val = room.remove_shortest()
taip = str(type(val)).replace("<class '","").replace("'>","")
self.assertTrue(val is None, f'Method remove_shortest should return a value None, ' +
f'when the room is empty, now it returns a value {val}, which is of type {taip}.')
except Exception as e:
self.fail('Method call remove_shortest threw an error\n{e}' +
'when the room is empty.')
try:
room.add(Person("Ann",120))
room.add(Person("Tim",150))
val = room.remove_shortest()
taip = str(type(val)).replace("<class '","").replace("'>","")
self.assertTrue("Person" in str(type(val)), f'Method remove_shortest should return a value, ' +
f'which is of type Person, now it returns a value {val}, which is of type {taip}.' +
f'when the following persons has been added in the room:\n' +
'Person("Ann",120)\n' +
'Person("Tim",150)')
except Exception as e:
self.fail('Method call remove_shortest threw an error\n{e}\n'+
f'when the following persons has been added in the room:\n' +
'Person("Ann",120)\n' +
'Person("Tim",150)')
@points('9.shortest_in_room_part3')
def test_7_test_remove_shortest(self):
from src.shortest_in_room import Room, Person
room = Room()
test_cases = [("Grace", 180), ("Jan", 175), ("Lisa", 150), ("Paul", 204), ("Jana", 171), ("Ruth", 149)]
tested = []
persons = ""
for test_case in test_cases:
room.add(Person(test_case[0], test_case[1]))
persons += f"\n{test_case[0]} ({test_case[1]} cm)"
tested.append(test_case)
prev_output = ""
for i in range(len(test_cases)):
val = room.remove_shortest()
corr = min(tested, key = lambda x : x[1])
self.assertEqual(val.name, corr[0], f'Method remove_shortest() should return a person, whose name is {corr[0]}. \nNow it' +
f'returns a person, whose name is {val.name}, when the following persons were in the room {tested}.')
tested.remove(corr)
room.print_contents()
output = get_stdout().replace(prev_output, "")
prev_output += output
output_list = [x for x in output.split("\n") if len(x.strip()) > 0 and not x.startswith("There are")]
self.assertEqual(len(output_list), len(tested), f'In the room should be now {len(tested)} persons, when the following persons were added to the room:' +
f'{persons}\nand method remove_shortest have been called {i + 1} times.\nNow, however the method print_contents prints out\n{output}')
if __name__ == '__main__':
unittest.main()
|
from typing import List
def count_anime_store_statistics_endpoints(url_patterns: List[str]) -> int:
anime_store_endpoints = [pattern for pattern in url_patterns if "anime-store" in pattern]
return len(anime_store_endpoints)
|
#!/usr/bin/env bash
cd "$(dirname "$0")"
k8sDirPrefix="./k8s-deployment-manifest-templates/deployment-mode/daemonset/container-insights-monitoring"
ecsDirPrefix="./ecs-task-definition-templates/deployment-mode/daemon-service/cwagent-ecs-instance-metric"
newK8sVersion="k8s/1.3.0"
agentVersion="amazon/cloudwatch-agent:1.247346.0b249609"
fluentdVersion="fluent/fluentd-kubernetes-daemonset:v1.7.3-debian-cloudwatch-1.0"
fluentBitVersion="amazon/aws-for-fluent-bit:2.10.0"
k8sPrometheusDirPrefix="./k8s-deployment-manifest-templates/deployment-mode/service/cwagent-prometheus"
ecsPrometheusDirPrefix="./ecs-task-definition-templates/deployment-mode/replica-service/cwagent-prometheus"
# replace agent version for ECS Prometheus
sed -i'.bak' "s|amazon/cloudwatch-agent:[0-9]*\.[0-9]*\.[0-9a-z]*\(-prometheus\)\?|${agentVersion}|g" ${ecsPrometheusDirPrefix}/cwagent-prometheus-task-definition.json
rm ${ecsPrometheusDirPrefix}/cwagent-prometheus-task-definition.json.bak
sed -i'.bak' "s|amazon/cloudwatch-agent:[0-9]*\.[0-9]*\.[0-9a-z]*\(-prometheus\)\?|${agentVersion}|g" ${ecsPrometheusDirPrefix}/cloudformation-quickstart/cwagent-ecs-prometheus-metric-for-awsvpc.yaml
rm ${ecsPrometheusDirPrefix}/cloudformation-quickstart/cwagent-ecs-prometheus-metric-for-awsvpc.yaml.bak
sed -i'.bak' "s|amazon/cloudwatch-agent:[0-9]*\.[0-9]*\.[0-9a-z]*\(-prometheus\)\?|${agentVersion}|g" ${ecsPrometheusDirPrefix}/cloudformation-quickstart/cwagent-ecs-prometheus-metric-for-bridge-host.yaml
rm ${ecsPrometheusDirPrefix}/cloudformation-quickstart/cwagent-ecs-prometheus-metric-for-bridge-host.yaml.bak
# replace agent and k8s version for K8s Prometheus
sed -i'.bak' "s|k8s/[0-9]*\.[0-9]*\.[0-9a-z]*\(-prometheus\)\?|${newK8sVersion}|g;s|amazon/cloudwatch-agent:[0-9]*\.[0-9]*\.[0-9a-z]*\(-prometheus\)\?|${agentVersion}|g" ${k8sPrometheusDirPrefix}/prometheus-eks.yaml
rm ${k8sPrometheusDirPrefix}/prometheus-eks.yaml.bak
sed -i'.bak' "s|k8s/[0-9]*\.[0-9]*\.[0-9a-z]*\(-prometheus\)\?|${newK8sVersion}|g;s|amazon/cloudwatch-agent:[0-9]*\.[0-9]*\.[0-9a-z]*\(-prometheus\)\?|${agentVersion}|g" ${k8sPrometheusDirPrefix}/prometheus-k8s.yaml
rm ${k8sPrometheusDirPrefix}/prometheus-k8s.yaml.bak
# replace agent version for ECS
sed -i'.bak' "s|amazon/cloudwatch-agent:[0-9]*\.[0-9]*\.[0-9a-z]*|${agentVersion}|g" ${ecsDirPrefix}/cwagent-ecs-instance-metric.json
rm ${ecsDirPrefix}/cwagent-ecs-instance-metric.json.bak
sed -i'.bak' "s|amazon/cloudwatch-agent:[0-9]*\.[0-9]*\.[0-9a-z]*|${agentVersion}|g" ${ecsDirPrefix}/cloudformation-quickstart/cwagent-ecs-instance-metric-cfn.json
rm ${ecsDirPrefix}/cloudformation-quickstart/cwagent-ecs-instance-metric-cfn.json.bak
# replace agent, fluentD and fluent-bit version for K8s
sed -i'.bak' "s|k8s/[0-9]*\.[0-9]*\.[0-9a-z]*|${newK8sVersion}|g;s|amazon/cloudwatch-agent:[0-9]*\.[0-9]*\.[0-9a-z]*|${agentVersion}|g" ${k8sDirPrefix}/cwagent/cwagent-daemonset.yaml
rm ${k8sDirPrefix}/cwagent/cwagent-daemonset.yaml.bak
sed -i'.bak' "s|k8s/[0-9]*\.[0-9]*\.[0-9a-z]*|${newK8sVersion}|g;s|fluent/fluentd-kubernetes-daemonset:.*|${fluentdVersion}|g" ${k8sDirPrefix}/fluentd/fluentd.yaml
rm ${k8sDirPrefix}/fluentd/fluentd.yaml.bak
sed -i'.bak' "s|k8s/[0-9]*\.[0-9]*\.[0-9a-z]*|${newK8sVersion}|g;s|amazon/aws-for-fluent-bit.*|${fluentBitVersion}|g" ${k8sDirPrefix}/fluent-bit/fluent-bit.yaml
rm ${k8sDirPrefix}/fluent-bit/fluent-bit.yaml.bak
sed -i'.bak' "s|k8s/[0-9]*\.[0-9]*\.[0-9a-z]*|${newK8sVersion}|g;s|amazon/aws-for-fluent-bit.*|${fluentBitVersion}|g" ${k8sDirPrefix}/fluent-bit/fluent-bit-compatible.yaml
rm ${k8sDirPrefix}/fluent-bit/fluent-bit-compatible.yaml.bak
# generate quickstart manifest for K8s
OUTPUT=${k8sDirPrefix}/quickstart/cwagent-fluentd-quickstart.yaml
OUTPUT_FLUENT_BIT=${k8sDirPrefix}/quickstart/cwagent-fluent-bit-quickstart.yaml
cat ${k8sDirPrefix}/cloudwatch-namespace.yaml > ${OUTPUT}
echo -e "\n---\n" >> ${OUTPUT}
cat ${k8sDirPrefix}/cwagent/cwagent-serviceaccount.yaml >> ${OUTPUT}
echo -e "\n---\n" >> ${OUTPUT}
cat ${k8sDirPrefix}/cwagent/cwagent-configmap.yaml | sed "s|\"logs|\"agent\": {\\
\"region\": \"{{region_name}}\"\\
},\\
\"logs|g" >> ${OUTPUT}
echo -e "\n---\n" >> ${OUTPUT}
cat ${k8sDirPrefix}/cwagent/cwagent-daemonset.yaml >> ${OUTPUT}
echo -e "\n---\n" >> ${OUTPUT}
cat ${k8sDirPrefix}/fluentd/fluentd-configmap.yaml >> ${OUTPUT}
echo -e "\n---\n" >> ${OUTPUT}
cat ${k8sDirPrefix}/fluentd/fluentd.yaml >> ${OUTPUT}
cat ${k8sDirPrefix}/cloudwatch-namespace.yaml > ${OUTPUT_FLUENT_BIT}
echo -e "\n---\n" >> ${OUTPUT_FLUENT_BIT}
cat ${k8sDirPrefix}/cwagent/cwagent-serviceaccount.yaml >> ${OUTPUT_FLUENT_BIT}
echo -e "\n---\n" >> ${OUTPUT_FLUENT_BIT}
cat ${k8sDirPrefix}/cwagent/cwagent-configmap.yaml | sed "s|\"logs|\"agent\": {\\
\"region\": \"{{region_name}}\"\\
},\\
\"logs|g" >> ${OUTPUT_FLUENT_BIT}
echo -e "\n---\n" >> ${OUTPUT_FLUENT_BIT}
cat ${k8sDirPrefix}/cwagent/cwagent-daemonset.yaml >> ${OUTPUT_FLUENT_BIT}
echo -e "\n---\n" >> ${OUTPUT_FLUENT_BIT}
cat ${k8sDirPrefix}/fluent-bit/fluent-bit-configmap.yaml >> ${OUTPUT_FLUENT_BIT}
echo -e "\n---\n" >> ${OUTPUT_FLUENT_BIT}
cat ${k8sDirPrefix}/fluent-bit/fluent-bit.yaml >> ${OUTPUT_FLUENT_BIT}
|
<filename>public/assets/js/main.js<gh_stars>0
(function ($) {
"use strict";
// Pre loader
$(window).on("load", function () {
$(".preloader").fadeOut(2000),
$("body").css({"overflow-y":"inherit"});
$(".mean-container .mean-bar").css({"display":"inherit"})
});
jQuery('#mobile-menu').meanmenu({
meanMenuContainer: '.mobile-menu',
meanScreenWidth: "992"
});
$('.counter').counterUp({
delay: 10,
time: 1000
});
$('.review-slider').slick({
autoplay: true,
slidesToShow: 3,
slidesToScroll: 1,
dots: false,
infinite: true,
nextArrow: "<button class=\"slide-next\"><i class=\"fas fa-long-arrow-alt-right\"></i></button>",
prevArrow: "<button class=\"slide-prev\"><i class=\"fas fa-long-arrow-alt-left\"></i></button>",
responsive: [
{
breakpoint: 1200,
settings: {
slidesToShow: 2,
}
},
{
breakpoint: 768,
settings: {
slidesToShow: 1,
}
}
]
});
$('.companies-logo').slick({
autoplay: true,
slidesToShow: 4,
slidesToScroll: 1,
dots: false,
infinite: true,
arrows: false,
responsive: [
{
breakpoint: 991,
settings: {
slidesToShow: 3,
}
},
{
breakpoint: 576,
settings: {
slidesToShow: 2,
}
},
{
breakpoint: 376,
settings: {
slidesToShow: 1,
}
},
]
});
$('.home-slider').slick({
autoplay: true,
slidesToShow: 1,
slidesToScroll: 1,
dots: false,
infinite: true,
arrows: true,
speed: 3000,
prevArrow: "<button class=\"slick-prev slick-arrow\"><i class=\"fas fa-long-arrow-alt-left\"></i></button>",
nextArrow: "<button class=\"slick-next slick-arrow\"><i class=\"fas fa-long-arrow-alt-right\"></i></button>"
});
$('.home-slider-2').slick({
autoplay: false,
slidesToShow: 1,
slidesToScroll: 1,
dots: false,
infinite: true,
arrows: true,
speed: 1000,
prevArrow: "<button class=\"slick-prev slick-arrow\"><i class=\"fas fa-long-arrow-alt-left\"></i></button>",
nextArrow: "<button class=\"slick-next slick-arrow\"><i class=\"fas fa-long-arrow-alt-right\"></i></button>"
});
var grid = $('.grid').isotope({
itemSelector: '.grid-item',
percentPosition: true,
masonry: {
// use outer width of grid-sizer for columnWidth
columnWidth: '.grid-item'
}
})
$('.project-menu').on('click', 'button', function () {
var filterValue = $(this).attr('data-filter');
grid.isotope({ filter: filterValue });
});
// for project-menu active class
$('.project-menu button').on('click', function (event) {
$(this).siblings('.active').removeClass('active');
$(this).addClass('active');
event.preventDefault();
})
$('.card-header a[aria-expanded="false"]').click(function () {
$(this).parent().parent().toggleClass('card-header-expanded')
});
$('.card-header a[aria-expanded="true"]').click(function () {
$(this).parent().parent().toggleClass('card-header-expanded')
});
})(jQuery);
|
#!/usr/bin/env bash
python -u train.py --lr 0.00005 --batch-size 16 --dist-url 'tcp://localhost:10001' --multiprocessing-distributed --world-size 1 --rank 0 --epochs 50 --save_path ./result_vigor --op sam --wd 0.03 --mining --dataset vigor --cos --dim 1000 --asam --rho 2.5
python -u train.py --lr 0.000005 --batch-size 16 --dist-url 'tcp://localhost:10001' --multiprocessing-distributed --world-size 1 --rank 0 --epochs 50 --resume ./result_vigor/checkpoint.pth.tar --save_path ./result_vigor --op sam --wd 0.03 --mining --dataset vigor --cos --dim 1000 --asam --rho 2.5 --sat_res 400 --crop
|
import React, { Component } from "react";
import PropTypes from "prop-types";
import _ from "lodash";
import { View, Keyboard, Text, ScrollView } from "react-native";
import baseTheme from "./theme";
import TextInputField from "./fields/textInput";
import LongTextInputField from "./fields/longTextInput";
import SwitchField from "./fields/switch";
import DateField from "./fields/date";
import PickerField from "./fields/picker";
import SelectField from "./fields/select";
import ImageField from "./fields/image";
import SignatureField from "./fields/signature";
import LocationField from "./fields/location";
import FormField from "./fields/form";
import SubForm from "./fields/subForm";
import Lookupfield from "./fields/lookup";
import CurrencyField from "./fields/currency";
import StatusPicker from "./fields/statusPicker";
import CustomDataComponent from "./fields/customDataView";
import SimpleGridView from "./fields/simplegrid";
import CollaboratorField from "./fields/collaborator";
import AssigneeField from "./fields/assignee";
import ChecklistField from "./fields/checklist";
import UserDirectoryField from "./fields/userDirectory";
import DocumentField from './fields/document';
import {
autoValidate,
getInitialState,
getDefaultValue,
getResetValue,
customValidateData,
customFieldCalculations
} from "./utils/helper";
import { isEmpty } from "./utils/validators";
const DefaultErrorComponent = (props) => {
const attributes = props.attributes;
const theme = props.theme;
if (attributes.error) {
return (
<Text
style={{
color: theme.errorMsgColor,
paddingStart: [
'select',
'user_directory',
'checklist',
'lookup',
'simple-grid',
'customDataView',
'product-catalog-sale'
].includes(attributes['type'])
? 0
: 5,
}}
>
{attributes.errorMsg}
</Text>
);
}
return null;
};
export default class Form0 extends Component {
static propTypes = {
fields: PropTypes.array,
theme: PropTypes.object,
formData: PropTypes.object,
errorComponent: PropTypes.func,
autoValidation: PropTypes.bool,
autoValidation: PropTypes.bool,
customValidation: PropTypes.func,
onValueChange: PropTypes.func,
};
constructor(props) {
super(props);
//This gets all the field defintions an an arrary and store in state
const initialState = getInitialState(props.fields);
this.state = {
...initialState,
errorStatus: false,
};
this.getValues = this.getValues.bind(this);
this.generateFields = this.generateFields.bind(this);
this.resetForm = this.resetForm.bind(this);
this.onSummitTextInput = this.onSummitTextInput.bind(this);
this.onValidateFields = this.onValidateFields.bind(this);
// Invoked every time whenever any fields's value changes
this.onValueChange = this.onValueChange.bind(this);
this.onAddNewFields = this.onAddNewFields.bind(this);
this.getValue = this.getValue.bind(this);
}
componentDidMount() {
const { formData } = this.props;
this.setValues(formData);
}
componentDidUpdate(prevProps) {
const { formData } = this.props;
if (!_.isEqual(prevProps, this.props)) {
this.setValues(formData);
}
}
getValue(fieldName) {
for (let i = 0; i < Object.values(this.state).length; i++) {
let fieldObj = Object.values(this.state)[i];
let fieldVal = fieldObj["value"];
if (typeof fieldVal !== "undefined" && fieldVal !== null) {
if (fieldObj["name"] === fieldName && typeof fieldVal === "string") {
return fieldVal;
} else if (
typeof fieldVal === "object" &&
fieldObj["name"] !== fieldName
) {
let index = _.indexOf(Object.keys(fieldVal), fieldName);
if (index !== -1) return Object.values(fieldVal)[index];
}
}
}
}
onValidateFields() {
const newFields = {};
Object.keys(this.state).forEach((fieldName) => {
const field = this.state[fieldName];
if (field) {
if (field.required !== undefined && field.required) {
let validate = autoValidate(field);
field.error = validate.error;
field.errorMsg = validate.errorMsg;
}
if (field.type === "number") {
let validate = customValidateData(field);
field.error = validate.error;
field.errorMsg = validate.errorMsg;
}
newFields[field.name] = field;
}
});
this.setState({ ...newFields });
}
onAddNewFields(name, newObj) {
let fieldObj = this.state[name];
if (fieldObj) {
if (fieldObj.type === "sub-form") {
if (
typeof fieldObj.value === "undefined" ||
fieldObj.value === null ||
fieldObj.value.length === 0
) {
fieldObj.value = [newObj];
} else {
let gIndex = _.indexOf(Object.keys(this.state), fieldObj.name);
let newValue;
if (gIndex !== -1) {
let preValue = Object.values(this.state)[gIndex].value;
let oIndex = _.findIndex(
preValue,
(item) => item._id === newObj._id
);
if (oIndex !== -1) {
preValue[oIndex] = newObj;
newValue = preValue;
} else {
newValue = _.concat(newObj, preValue);
}
} else {
newValue = [newObj];
}
fieldObj.value = newValue;
}
const newField = {};
newField[fieldObj.name] = fieldObj;
this.setState({ ...newField });
}
}
}
getLookupSubsciberFields = (name) => {
const lookupSubscriberFields = _.filter(this.props.fields, (field) => {
if (
typeof field["data-pub"] !== "undefined" &&
field["data-pub"] === name
) {
return field;
}
});
return lookupSubscriberFields;
};
getLocationFieldState = () => {
const locationFields = [];
const keys = Object.keys(this.state);
keys.map((item) => {
if (
item &&
this.state[item] &&
this.state[item]["type"] &&
this.state[item]["type"] === "location"
) {
locationFields.push(this.state[item]);
}
});
return locationFields;
};
handleOnValueChange = (valueObj, value) => {
valueObj.value = value;
//autovalidate the fields
if (this.props.autoValidation === undefined || this.props.autoValidation) {
Object.assign(valueObj, autoValidate(valueObj));
}
// apply some custom logic for validation
if (
this.props.customValidation &&
typeof this.props.customValidation === "function"
) {
Object.assign(valueObj, this.props.customValidation(valueObj));
}
if (
valueObj.type === "location" &&
typeof this.props.calculateProximityBeacon === "function"
) {
const locationFields = this.getLocationFieldState();
this.props.calculateProximityBeacon(valueObj, value, locationFields);
}
// update state value
const newField = {};
newField[valueObj.name] = valueObj;
if (
valueObj &&
valueObj["expr_field"] &&
valueObj["expr_field"].length > 0
) {
const res = customFieldCalculations(valueObj, value, this.state);
if (res && res.length > 0) {
res.forEach((item) => {
newField[item.name] = item;
});
}
}
if (
this.props.onValueChange &&
typeof this.props.onValueChange === "function"
) {
this.setState({ ...newField }, () => this.props.onValueChange());
} else {
this.setState({ ...newField });
}
};
onValueChange(name, value) {
const valueObj = this.state[name];
if (valueObj) {
const type = valueObj["type"];
switch (type) {
case "sub-form":
break;
case "lookup":
const lookupSubscriberFields = this.getLookupSubsciberFields(name);
const pk = valueObj["primaryKey"];
const lk = valueObj["labelKey"];
if (lookupSubscriberFields.length) {
_.forEach(lookupSubscriberFields, (field) => {
const key = field["name"];
let val = null;
if (typeof value[key] === "number") {
val = value[key];
} else {
val = value[key] || "";
}
this.handleOnValueChange(field, val);
});
}
const lookupValue = _.pick(value, [pk, lk, "instance_id"]);
this.handleOnValueChange(valueObj, lookupValue);
break;
default:
this.handleOnValueChange(valueObj, value);
}
}
}
onSummitTextInput(name) {
const index = Object.keys(this.state).indexOf(name);
if (
index !== -1 &&
this[Object.keys(this.state)[index + 1]] &&
this[Object.keys(this.state)[index + 1]].textInput
) {
this[Object.keys(this.state)[index + 1]].textInput._root.focus();
} else {
Keyboard.dismiss();
}
}
getFieldReturnValue = (field) => {
if (
field.type &&
(field.type.match(/number/i) || field.type.match(/auto-incr-number/i))
)
return parseFloat(field.value);
else if (
field.type &&
field.type === "picker" &&
field.value === "-Select-"
)
return "";
else if (field.type && field.type === "document") {
const updateValue = !isEmpty(field.value)
? field.value.map((item) => {
return {
name: item["name"],
file_path: item["filePath"]
? item["filePath"]
: item["file_path"]
? item["file_path"]
: "",
content_type: item["type"]
? item["type"]
: item["content_type"]
? item["content_type"]
: "",
};
})
: [];
return updateValue;
} else if (field.type === "longtext") {
return !isEmpty(field.value) ? field.value.trim() : field.value;
} else return field.value;
};
getValues() {
this.onValidateFields();
const values = {};
let isValidFields = true;
Object.keys(this.state).forEach((fieldName) => {
const field = this.state[fieldName];
if (field) {
if (field.error !== undefined && field.error) {
isValidFields = false;
}
values[field.name] = this.getFieldReturnValue(field);
}
});
if (isValidFields) {
console.log(values);
return values;
} else {
return null;
}
}
resetForm() {
const newFields = {};
Object.keys(this.state).forEach((fieldName) => {
const field = this.state[fieldName];
if (field) {
field.value =
field.editable !== undefined && !field.editable
? getDefaultValue(field)
: getResetValue(field);
field.error = false;
field.errorMsg = "";
if (field.type === "group") {
this[field.name].group.resetForm();
}
newFields[field.name] = field;
}
});
this.setState({ ...newFields });
}
// Helper function for setValues
getFieldValue(fieldObj, value) {
const field = fieldObj;
if (field.type === "group") {
const subFields = {};
Object.keys(value).forEach((fieldName) => {
subFields[fieldName] = value[fieldName];
});
this[field.name].group.setValues(subFields);
field.value = this[field.name].group.getValues();
// Remaing thing is error Handling Here
} else {
field.value = value;
//Validate and check for errors
if (
this.props.autoValidation === undefined ||
this.props.autoValidation
) {
Object.assign(field, autoValidate(field));
}
// Validate through customValidation if it is present in props
if (
this.props.customValidation &&
typeof this.props.customValidation === "function"
) {
Object.assign(field, this.props.customValidation(field));
}
}
return field;
}
setValues(...args) {
if (args && args.length && args[0]) {
const newFields = {};
Object.keys(args[0]).forEach((fieldName) => {
/**
* In update form, if any field value changes
* image is greyed out, to avoid we are using deep clone object
*/
const field =
this.state &&
this.state[fieldName] &&
this.state[fieldName].type === "image"
? _.cloneDeep(this.state[fieldName])
: this.state[fieldName];
if (field) {
newFields[field.name] = this.getFieldValue(field, args[0][fieldName]);
}
});
this.setState({ ...newFields });
}
}
generateFields() {
const theme = Object.assign(baseTheme, this.props.theme);
const { customComponents, errorComponent } = this.props;
let formKeys = Object.keys(this.state);
const renderFields = formKeys.map((fieldName, index) => {
const field = this.state[fieldName];
if (!field.hidden) {
const commonProps = {
key: index,
theme,
attributes:
field.type === "image"
? _.cloneDeep(this.state[field.name])
: this.state[field.name],
updateValue: this.onValueChange,
onAddNewFields: this.onAddNewFields,
getValue: this.getValue,
ErrorComponent: errorComponent || DefaultErrorComponent,
navigation: this.props["navigation"] || null,
};
switch (field.type) {
case "text":
case "email":
case "number":
case "url":
case "password":
case "phone":
case "calculated":
case "auto-incr-number":
return (
<TextInputField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
onSummitTextInput={this.onSummitTextInput}
{...this.props}
/>
);
case "longtext":
return (
<LongTextInputField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "currency":
return (
<CurrencyField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "switch":
return (
<SwitchField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "date":
return (
<DateField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "picker":
return (
<PickerField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "status_picker":
return (
<StatusPicker
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "select":
return (
<SelectField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "checklist":
return (
<ChecklistField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "image":
return (
<ImageField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "signature":
return (
<SignatureField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "document":
return (
<DocumentField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "location":
return (
<LocationField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "group":
return (
<FormField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "sub-form":
return (
<SubForm
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "lookup":
return (
<Lookupfield
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "customDataView":
case "product-catalog-sale":
return (
<CustomDataComponent
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "simple-grid":
return (
<SimpleGridView
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "collaborator":
return (
<CollaboratorField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "assignee":
return (
<AssigneeField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
case "user_directory":
return (
<UserDirectoryField
ref={(c) => {
this[field.name] = c;
}}
{...commonProps}
{...this.props}
/>
);
default:
break;
}
}
});
return renderFields;
}
render() {
return (
<ScrollView keyboardShouldPersistTaps={"handled"}>
<View>{this.generateFields()}</View>
</ScrollView>
);
}
}
|
<filename>src/components/list/__tests__/CdrList.spec.js
import { mount } from '@vue/test-utils';
import CdrList from 'componentdir/list/CdrList';
describe('CdrList', () => {
test('renders correctly', () => {
const wrapper = mount(CdrList);
expect(wrapper.element).toMatchSnapshot();
});
it('renders an ol', () => {
const wrapper = mount(CdrList, {
propsData: {
tag: 'ol',
}
});
expect(wrapper.element).toMatchSnapshot();
});
});
|
#g++ -g -Wall -std=c++11 -DHAVE_INTTYPES_H -DHAVE_NETINET_IN_H -I/usr/local/include/thrift -I/usr/include/boost cppgen/*.cpp server.cpp -L/usr/local/lib -lthrift -o server
g++ -Wall -I/usr/local/include/thrift -c cppgen/PingService.cpp -o PingService.o
g++ -Wall -I/usr/local/include/thrift -c cppgen/ping_constants.cpp -o constants.o
g++ -Wall -I/usr/local/include/thrift -c cppgen/ping_types.cpp -o types.o
g++ -g -Wall -std=c++11 -I/usr/local/include/thrift -c server.cpp -o server.o
g++ -L/usr/local/lib *.o -o server -lthrift
|
#!/bin/sh
export C_INCLUDE_PATH=$PREFIX/include
export CPLUS_INCLUDE_PATH=$PREFIX/include
export CFLAGS="-I$PREFIX/include"
export LDFLAGS="-L$PREFIX/lib"
export CPATH=${PREFIX}/include
wget http://ccb.jhu.edu/software/stringtie/dl/prepDE.py
make release
mkdir -p $PREFIX/bin
mv stringtie $PREFIX/bin
if [ "$PY3K" == 1 ]; then
2to3 -w prepDE.py
fi
sed -i.bak 's|/usr/bin/env python2|/usr/bin/env python|' prepDE.py
mv prepDE.py $PREFIX/bin
chmod +x $PREFIX/bin/prepDE.py
|
console.log('Demo!');
|
#!/bin/bash
set -o errexit
set -o xtrace
GARBD_OPTS=""
function get_backup_source() {
peer-list -on-start=/usr/bin/get-pxc-state -service=$PXC_SERVICE 2>&1 \
| grep wsrep_ready:ON:wsrep_connected:ON:wsrep_local_state_comment:Synced:wsrep_cluster_status:Primary \
| sort \
| tail -1 \
| cut -d : -f 2 \
| cut -d . -f 1
}
function check_ssl() {
CA=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt
if [ -f /var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt ]; then
CA=/var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt
fi
SSL_DIR=${SSL_DIR:-/etc/mysql/ssl}
if [ -f ${SSL_DIR}/ca.crt ]; then
CA=${SSL_DIR}/ca.crt
fi
SSL_INTERNAL_DIR=${SSL_INTERNAL_DIR:-/etc/mysql/ssl-internal}
if [ -f ${SSL_INTERNAL_DIR}/ca.crt ]; then
CA=${SSL_INTERNAL_DIR}/ca.crt
fi
KEY=${SSL_DIR}/tls.key
CERT=${SSL_DIR}/tls.crt
if [ -f ${SSL_INTERNAL_DIR}/tls.key -a -f ${SSL_INTERNAL_DIR}/tls.crt ]; then
KEY=${SSL_INTERNAL_DIR}/tls.key
CERT=${SSL_INTERNAL_DIR}/tls.crt
fi
if [ -f "$CA" -a -f "$KEY" -a -f "$CERT" ]; then
GARBD_OPTS="socket.ssl_ca=${CA};socket.ssl_cert=${CERT};socket.ssl_key=${KEY};socket.ssl_cipher=;pc.weight=1;${GARBD_OPTS}"
fi
}
function request_streaming() {
local LOCAL_IP=$(hostname -i)
local NODE_NAME=$(get_backup_source)
if [ -z "$NODE_NAME" ]; then
peer-list -on-start=/usr/bin/get-pxc-state -service=$PXC_SERVICE
echo "[ERROR] Cannot find node for backup"
exit 1
fi
echo '[INFO] garbd was started'
garbd \
--address "gcomm://$NODE_NAME.$PXC_SERVICE?gmcast.listen_addr=tcp://0.0.0.0:4567" \
--donor "$NODE_NAME" \
--group "$PXC_SERVICE" \
--options "$GARBD_OPTS" \
--sst "xtrabackup-v2:$LOCAL_IP:4444/xtrabackup_sst//1" \
--recv-script="/usr/bin/run_backup.sh"
EXID_CODE=$?
echo '[INFO] garbd was finished'
exit $EXID_CODE
}
check_ssl
request_streaming
exit 0
|
logValue<-function(){
lsval<-Sys.time()
cat("The most recently displayed value was logged at", lsval, "\n")
return(lsval)
}
|
<filename>7-assets/past-student-repos/LambdaSchool-master/m4/42e1/data/seeds/05-taskscontexts.js
exports.seed = function(knex) {
return knex('projectsresources').insert([
{
taskid: 1,
contextid: 1
},
{
taskid: 2,
contextid: 2
},
{
taskid: 3,
contextid: 3
}
]);
};
|
public class BowScript : WeaponScript
{
public GameObject ActivArrow { get; set; }
public Transform FierPoint;
public GameObject arrow;
private int ammunitionCount;
public void FireArrow()
{
if (ammunitionCount > 0)
{
GameObject newArrow = Instantiate(arrow, FierPoint.position, FierPoint.rotation);
ActivArrow = newArrow;
ammunitionCount--;
}
else
{
Debug.Log("Out of ammunition!");
}
}
public void ReloadAmmunition(int amount)
{
ammunitionCount += amount;
}
}
|
#!/usr/bin/env bash
cd artillery-load-testing
npm install
npm run test
|
var _neon_softmax_base_workload_8cpp =
[
[ "NeonSoftmaxWorkloadValidate", "_neon_softmax_base_workload_8cpp.xhtml#a4077a9771ba9c551f4ce61863f65e798", null ]
];
|
<gh_stars>0
$(document).foundation();
var app = {};
$(function () {
/*$(document).foundation(); */
app.topMenu();
});
// Top menu
app.topMenu = function () {
$(".dropdown").on('show.zf.dropdownmenu', function (ev, $el) {
$el.css({
"display": "none"
})
.slideDown(400);
});
$(".dropdown").on('hide.zf.dropdownmenu', function (ev, $el) {
$el.children("ul")
.css('display', 'inherit')
.slideUp(200);
});
};
// Default setting
Foundation.DropdownMenu.defaults.closingTime = 100;
Foundation.DropdownMenu.defaults.hoverDelay = 200;
/* Open the sidenav */
function openNav() {
document.getElementById("mySidenav").style.width = "100%";
}
/* Close/hide the sidenav */
function closeNav() {
document.getElementById("mySidenav").style.width = "0";
}
// Back to top
/*window.onscroll = function() {scrollFunction()};
function scrollFunction() {
if (document.body.scrollTop > 20 || document.documentElement.scrollTop > 20) {
document.getElementById("back-to-top").style.opacity = "1";
} else {
document.getElementById("back-to-top").style.opacity = "0";
}
}
document.getElementById('back-to-top').onclick = function () {
scrollTo(document.body, 0, 100);
}*/
function scrollTo(element, to, duration) {
if (duration < 0) return;
var difference = to - element.scrollTop;
var perTick = difference / duration * 2;
setTimeout(function() {
element.scrollTop = element.scrollTop + perTick;
scrollTo(element, to, duration - 2);
}, 10);
}
/*Cartype slider*/
var slideIndex = 1;
//showSlides(slideIndex);
function plusSlides(n) {
showSlides(slideIndex += n);
}
function currentSlide(n) {
showSlides(slideIndex = n);
}
function showSlides(n) {
var i;
var slides = document.getElementsByClassName("mySlides");
var dots = document.getElementsByClassName("dot");
if(slides.length == 0 || dots.length == 0) return;
if (n > slides.length) {slideIndex = 1}
if (n < 1) {slideIndex = slides.length}
for (i = 0; i < slides.length; i++) {
slides[i].style.display = "none";
}
for (i = 0; i < dots.length; i++) {
dots[i].className = dots[i].className.replace(" active", "");
}
slides[slideIndex-1].style.display = "block";
dots[slideIndex-1].className += " active";
}
/*
$('.count').each(function () {
$(this).prop('Counter',0).animate({
Counter: $(this).text()
}, {
duration: 1000,
easing: 'swing',
step: function (now) {
$(this).text(Math.ceil(now));
}
});
});
*/
//Brand slider
/* var flkty = new Flickity( '#brandCarousel', {
contain: true,
pageDots: false,
wrapAround: true,
freeScroll: true,
autoPlay: 3000
});
*/
var $carousel = $('.carousel').flickity({
cellalign: 'left',
contain: true,
prevNextButtons: false,
pageDots: false
});
$carousel.flickity('reloadCells')
$(document).on('click', '.deck-link', function(event) {
event.preventDefault();
$('.slide-deck').toggleClass('show-deck');
});
jQuery(function () {
jQuery.smartbanner({
daysHidden: 15, // days to hide banner after close button is clicked (defaults to 15)
daysReminder: 90, // days to hide banner after "VIEW" button is clicked (defaults to 90)
appStoreLanguage: 'us', // language code for the App Store (defaults to user's browser language)
title: 'Careem',
author: 'Careem - Transportation',
button: 'View',
speedIn: 100, // Show animation speed of the banner
speedOut: 100, // Close animation speed of the banner
//appendToSelector: 'header',
hideOnInstall: true,
inAppStore: 'On the App Store',
inGooglePlay: 'In Google Play',
onClose: function() {
jQuery('#smartbanner').addClass('hide');
jQuery('.menu-list').removeClass('fixLogo');
//jQuery('.ab-pos').removeClass('abFix');
},
});
if(jQuery('#smartbanner').length > 0){
jQuery('.menu-list').addClass('fixLogo');
//jQuery('.ab-pos').addClass('abFix');
}
});
/*
jQuery(function () {
if(jQuery('.fare-calculator').length < 1)
{
jQuery('.fare-estimator').showLoading();
}
});
*/
function setHeight(){
document.getElementById("cartypes").classList.add("50vh");
};
function unsetHeight(){
document.getElementById("cartypes").classList.remove("50vh");
};
function rtlCheck(){
if (language_code == "ar"){
return true;
} else {
return false
}
}
function sliderInit(){
jQuery('.slider-for').not('.slick-initialized').slick({
slidesToShow: 1,
rtl: rtlCheck(),
slidesToScroll: 1,
fade: false,
asNavFor: '.slider-nav',
dots: false,
arrows: true,
appendArrows: '.pr_images',
prevArrow: '<div class="arrow feat-prev">‹‹</div>',
nextArrow: '<div class="arrow feat-next">››</div>'
});
jQuery(".slider-nav").not('.slick-initialized').slick({
slidesToShow: 5,
slidesToScroll: 1,
rtl: rtlCheck(),
infinite: true,
asNavFor: '.slider-for',
dots: false,
arrows: false,
centerMode: true,
centerPadding: '20px',
focusOnSelect: true,
responsive: [
{
breakpoint: 1024,
settings: {
slidesToShow: 3,
slidesToScroll: 3,
infinite: true,
dots: true
}
},
{
breakpoint: 600,
settings: {
slidesToShow: 2,
slidesToScroll: 2
}
},
{
breakpoint: 480,
settings: {
slidesToShow: 2,
slidesToScroll: 1,
centerPadding: '0px',
}
}
// You can unslick at a given breakpoint now by adding:
// settings: "unslick"
// instead of a settings object
]
});
};
window.onresize = function(event) {
sliderInit();
};
|
from Jumpscale import j
from .JSBase import JSBase
class ThreeBotActorBase(JSBase):
def _init_actor(self, **kwargs):
self._scheduler = None
self._schemas = {}
assert "package" in kwargs
self.package = kwargs["package"]
self.bcdb = self.package.bcdb
@property
def scheduler(self):
if not self._scheduler:
name = self._name
self._scheduler = j.servers.rack.current.scheduler_get(name, timeout=0)
return self._scheduler
|
import os
from tests.fixtures import * # noqa
def test_create_project_via_cmd(new_project_via_cmd): # noqa
assert os.path.isdir(os.path.dirname(new_project_via_cmd.__file__))
|
package de.akquinet.engineering.vaadinator.example.address;
import java.util.HashMap;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import com.vaadin.addon.touchkit.server.TouchKitServlet;
import com.vaadin.server.VaadinRequest;
import com.vaadin.server.VaadinServlet;
import de.akquinet.engineering.vaadinator.example.address.dao.AddressDaoPlain;
import de.akquinet.engineering.vaadinator.example.address.dao.TeamDaoPlain;
import de.akquinet.engineering.vaadinator.example.address.service.AddressService;
import de.akquinet.engineering.vaadinator.example.address.service.AddressServicePlain;
import de.akquinet.engineering.vaadinator.example.address.service.TeamService;
import de.akquinet.engineering.vaadinator.example.address.service.TeamServicePlain;
import de.akquinet.engineering.vaadinator.example.address.ui.std.presenter.PresenterFactory;
import de.akquinet.engineering.vaadinator.example.address.ui.std.presenter.PresenterFactoryEx;
import de.akquinet.engineering.vaadinator.example.address.ui.std.view.VaadinViewFactoryEx;
public class AddressbookExampleUIEx extends AddressbookExampleUI {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
protected void init(VaadinRequest request) {
// for mobile, set yet another theme
if (VaadinServlet.getCurrent() instanceof TouchKitServlet) {
setTheme("touchkitexex");
}
super.init(request);
}
@Override
protected PresenterFactory obtainPresenterFactory(String contextPath) {
if (presenterFactory == null) {
// simple, overwrite method for e.g. Spring / CDI / ...
// Entity-Manager NUR Thread-Safe, wenn er injected wird wie hier
AddressService addressService;
TeamService teamService;
EntityManagerFactory entityManagerFactory = Persistence.createEntityManagerFactory("AddressbookExample");
AddressDaoPlain addressDaoPlain = new AddressDaoPlain(entityManagerFactory);
addressService = new AddressServicePlain(entityManagerFactory, addressDaoPlain);
TeamDaoPlain teamDaoPlain = new TeamDaoPlain(entityManagerFactory);
teamService = new TeamServicePlain(entityManagerFactory, teamDaoPlain);
VaadinViewFactoryEx viewFactory = new VaadinViewFactoryEx();
presenterFactory = new PresenterFactoryEx(new HashMap<String, Object>(), viewFactory,
addressService, teamService);
viewFactory.setPresenterFactory(presenterFactory);
}
return presenterFactory;
}
}
|
import React from 'react';
import { withStyles } from 'material-ui/styles';
import classnames from 'classnames';
import HeartOutlineIcon from '@material-ui/icons/FavoriteBorder';
import HeartIcon from '@material-ui/icons/Favorite';
const styles = theme => ({
infoBar: {
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
height: '57px',
borderBottom: '1px solid rgba(0,0,0,.1)',
fontSize: '.8rem',
backgroundColor: '#fff',
fontWeight: 300,
},
infoBarEmphasis: {
fontSize: '1.5rem',
},
infoBarItem: {
marginRight: '25px',
'&:last-of-type': {
marginRight: '0',
},
},
likeItem: {
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
padding: '5px 10px',
border: '1px solid rgba(0,0,0,.2)',
borderRadius: '20px',
cursor: 'pointer',
},
heartIconWrapper: {
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
marginRight: '5px',
},
heartIcon: {
fontSize: '1rem',
color: theme.frontEnd.colors.primary.main,
},
});
@withStyles(styles)
class InfoBar extends React.Component {
render() {
const { classes, listing } = this.props;
const {
price,
rentalOrSale,
isLiked,
beds,
baths,
neighborhood,
sqFootage,
} = listing;
const isRental = rentalOrSale === 'rental';
return (
<div className={classes.infoBar}>
<span className={classes.infoBarItem}>
<span className={classes.infoBarEmphasis}>${price}</span>
{isRental ? '/month' : null}
</span>
<span className={classnames(classes.infoBarItem, classes.likeItem)}>
<span className={classes.heartIconWrapper}>
{isLiked ? (
<HeartIcon
color="inherit"
classes={{ root: classes.heartIcon }}
/>
) : (
<HeartOutlineIcon
color="inherit"
classes={{ root: classes.heartIcon }}
/>
)}
</span>
Favorite
</span>
<span className={classes.infoBarItem}>
{beds} {beds > 1 ? 'beds' : 'bed'}
</span>
<span className={classes.infoBarItem}>
{baths} {baths > 1 ? 'baths' : 'bath'}
</span>
<span className={classes.infoBarItem}>{neighborhood}</span>
{sqFootage && Number(sqFootage) ? (
<span className={classes.infoBarItem}>
<span className={classes.infoBarEmphasis}>
{Number(sqFootage).toLocaleString()}
</span>{' '}
Sq. Ft.
</span>
) : null}
</div>
);
}
}
export default InfoBar;
|
#!/bin/bash -xe
sudo apt-get install -y openssh-client openssh-server
#sudo cp /etc/ssh/sshd_config /etc/ssh/sshd_config.original
#sudo chmod a-w /etc/ssh/sshd_config.original
sudo /etc/init.d/ssh restart
mkdir -p ~/.ssh
chmod 700 ~/.ssh
touch ~/.ssh/authorized_keys
chmod 600 ~/.ssh/authorized_keys
|
package cn.pasteme.common.mapper.handler;
import cn.pasteme.common.enumeration.ValueEnum;
import org.apache.ibatis.type.BaseTypeHandler;
import org.apache.ibatis.type.JdbcType;
import javax.validation.constraints.NotNull;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* @author Lucien
* @version 1.0.0
*/
public class ValueEnumTypeHandler<E extends Enum<?> & ValueEnum> extends BaseTypeHandler<ValueEnum> {
private Class<E> type;
private static <E extends Enum<?> & ValueEnum> E getEnumByClassValue(Class<E> enumClass, int value) {
for (E e : enumClass.getEnumConstants()) {
if (e.getValue() == value)
return e;
}
return null;
}
public ValueEnumTypeHandler(@NotNull(message = "Param type can not be null") Class<E> type) {
this.type = type;
}
@Override
public void setNonNullParameter(PreparedStatement preparedStatement, int columnIndex, ValueEnum parameter, JdbcType jdbcType) throws SQLException {
preparedStatement.setInt(columnIndex, parameter.getValue());
}
@Override
public E getNullableResult(ResultSet resultSet, String columnName) throws SQLException {
int value = resultSet.getInt(columnName);
return resultSet.wasNull() ? null : valueOf(value);
}
@Override
public E getNullableResult(ResultSet resultSet, int columnIndex) throws SQLException {
int value = resultSet.getInt(columnIndex);
return resultSet.wasNull() ? null : valueOf(value);
}
@Override
public E getNullableResult(CallableStatement callableStatement, int columnIndex) throws SQLException {
int value = callableStatement.getInt(columnIndex);
return callableStatement.wasNull() ? null : valueOf(value);
}
private E valueOf(int value) {
try {
return getEnumByClassValue(type, value);
} catch (Exception e) {
throw new IllegalArgumentException(String.format("Cannot convert value: %d to %s by value.", value, type.getSimpleName()), e);
}
}
}
|
def extract_unique_keys(config_dict):
unique_keys = set()
def extract_keys_recursive(dictionary):
for key, value in dictionary.items():
unique_keys.add(key)
if isinstance(value, dict):
extract_keys_recursive(value)
extract_keys_recursive(config_dict)
return list(unique_keys)
|
<html>
<head>
<title>Database Table Example</title>
</head>
<body>
<h1>Database Table Example</h1>
<table>
<thead>
<tr>
<th>ID</th>
<th>Name</th>
<th>Age</th>
<th>Address</th>
</tr>
</thead>
<tbody>
<tr>
<td>1</td>
<td>John</td>
<td>32</td>
<td>123 Main St</td>
</tr>
<tr>
<td>2</td>
<td>Jane</td>
<td>27</td>
<td>456 Park Ave</td>
</tr>
<tr>
<td>3</td>
<td>Tom</td>
<td>44</td>
<td>789 Second St</td>
</tr>
</tbody>
</table>
</body>
</html>
|
#!/bin/bash
root="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
source "$LSN_COMMON/functions"
cd $root
proc=$(ps -fC lsn-jsc)
if [ "$?" == "0" ]; then
echo $proc
if (! ask_yn 'Run anyway?'); then
exit 0;
fi
fi
build_files=(
specs/ecma.hf
specs/livesite.hf
specs/hub.hf
specs/app.hf
specs/lsn-adaptors.hf
src/share/web/res/tabset/js/build.hf
src/share/web/res/html/photo/build.hf
)
lsn-jsc ${build_files[@]} $@
#
# For debugging, this prompts (pauses) before building each file
#
# for file in ${build_files[@]}; do
# if (ask_yn "Build $file?"); then
# $root/src/bin/lsn-jsc $file $@
# fi
# done
#
|
#include <stdio.h>
int main()
{
int num1 = 7;
int num2 = 10;
int num3 = 22;
float average;
average = (num1 + num2 + num3) / 3.0;
printf("Average of three numbers: %f", average);
return 0;
}
|
<reponame>typekev/react-mk
import { Action } from './types';
import getKeyPressDelay from './getKeyPressDelay';
import { defaultKeyPressDelay } from './constants';
/**
* Returns a delay in milliseconds based on a given `action` and `delayRange`
*
* @param action - The smallest possible output
* @param delayRange - The largest possible output
* @returns A millisecond delay
*/
const getDelay = (action: Action, delayRange = defaultKeyPressDelay) =>
typeof action === 'number' ? action : action.length * getKeyPressDelay(...delayRange);
export default getDelay;
|
import { sanitizeHtml } from "./sanitizer";
import { ParsedRequest } from "./types";
const twemoji = require("twemoji");
const twOptions = { folder: "svg", ext: ".svg" };
const emojify = (text: string) => twemoji.parse(text, twOptions);
function getCss() {
return `
@import url('https://fonts.googleapis.com/css2?family=Poppins:wght@700&display=swap');
body {
background: #1f1f1f;
background-size: 100px 100px;
height: 100vh;
display: flex;
text-align: center;
justify-content: center;
align-items: center;
flex-direction: column;
width: 100%;
}
code {
color: #D400FF;
font-family: 'Vera';
white-space: pre-wrap;
letter-spacing: -5px;
}
code:before,
code:after {
content: '\`';
}
.items-wrapper {
display: grid;
grid-template-columns: repeat(8, 208px);
grid-template-rows: repeat(2, 208px);
grid-gap: 36px;
align-self: stretch;
margin: 72px auto 36px;
}
.item {
background: #262626;
border-radius: 12px;
width: 100%;
height: 100%;
display: flex;
justify-content: center;
align-items: center;
}
.spacer {
margin: 150px;
}
.emoji {
height: 1em;
width: 1em;
margin: 0 .05em 0 .1em;
vertical-align: -0.1em;
}
.class-title-wrapper {
display: flex;
align-items: center;
max-width: 100%;
padding: 0 48px;
}
.class-image {
width: 96px;
height: 96px;
margin-right: 32px;
flex: 0 0 96px;
}
.tags-wrapper {
margin-top: 48px;
display: flex;
align-items: center;
}
.tag-image {
height: 72px;
width: auto;
}
.tag-image:not(:last-child) {
margin-right: 18px;
}
.title-wrapper {
font-family: 'Poppins', sans-serif;
font-size: 96px;
font-style: normal;
line-height: 1;
color: white;
font-weight: 700;
display: flex;
align-items: center;
min-width: 0;
flex: 1 1 auto;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.dofuslab-logo {
margin-top: 48px;
width: 480px;
}`;
}
const ROOT = "https://d2iuiayak06k8j.cloudfront.net";
const ITEM_IMAGE_DIR = `${ROOT}/item/`;
const CLASS_IMAGE_DIR = `${ROOT}/class/face/`;
const TAG_IMAGE_DIR = `${ROOT}/icon/`;
const getItemImageUrl = (itemId: string) => {
return `${ITEM_IMAGE_DIR}${itemId}.png`;
};
const getClassImageUrl = (dofusClass: string | null) => {
return `${CLASS_IMAGE_DIR}${
dofusClass ? `${dofusClass}_M.png` : "No_Class.svg"
}`;
};
const getTagImageUrl = (tag: string | null) => {
return `${TAG_IMAGE_DIR}${tag}.svg`;
};
export function getHtml(parsedReq: ParsedRequest) {
const { text, items, dofusClass, tags } = parsedReq;
return `<!DOCTYPE html>
<html>
<meta charset="utf-8">
<title>Generated Image</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
${getCss()}
</style>
<body>
<div class="class-title-wrapper">
<img class="class-image" src="${getClassImageUrl(dofusClass)}">
<div class="title-wrapper">
${emojify(text)}
</div>
</div>
${
tags.length
? `<div class="tags-wrapper">
${tags
.map(
(tag) =>
`<img src=${getTagImageUrl(tag)} class="tag-image">`
)
.join("")}
</div>`
: ""
}
<div class="items-wrapper">
${items
.map(
(itemId) =>
`<div class="item">${getImage(
`${getItemImageUrl(itemId)}`
)}</div>`
)
.join("")}
${Array(16 - items.length)
.fill(null)
.map(() => '<div class="item"></div>')
.join("")}
</div>
<img src="${ROOT}/logo/DL-Full_Dark.svg" class="dofuslab-logo">
</body>
</html>`;
}
function getImage(src: string) {
return `<img
alt="Generated Image"
src="${sanitizeHtml(src)}"
width="170"
height="170"
/>`;
}
|
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2018.2 (64-bit)
#
# Filename : c_addsub_1.sh
# Simulator : Aldec Riviera-PRO Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Sat Nov 14 12:05:46 +0800 2020
# SW Build 2258646 on Thu Jun 14 20:03:12 MDT 2018
#
# Copyright 1986-2018 Xilinx, Inc. All Rights Reserved.
#
# usage: c_addsub_1.sh [-help]
# usage: c_addsub_1.sh [-lib_map_path]
# usage: c_addsub_1.sh [-noclean_files]
# usage: c_addsub_1.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'c_addsub_1.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "c_addsub_1.sh - Script generated by export_simulation (Vivado v2018.2 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <simulate>
simulate()
{
runvsimsa -l simulate.log -do "do {simulate.do}"
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./c_addsub_1.sh -help\" for more information)\n"
exit 1
fi
map_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
map_setup_file $2
esac
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Map library.cfg file
map_setup_file()
{
file="library.cfg"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="E:/Users/benji/OneDrive/FPGA_Project/NCSSK_copy_v1.1.0/NCSSK.cache/compile_simlib/riviera"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
if [[ -e $src_file ]]; then
vmap -link $lib_map_path
fi
fi
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaboration.log simulate.log dataset.asdb work riviera)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./c_addsub_1.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: c_addsub_1.sh [-help]\n\
Usage: c_addsub_1.sh [-lib_map_path]\n\
Usage: c_addsub_1.sh [-reset_run]\n\
Usage: c_addsub_1.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
// @ts-nocheck
/* eslint-disable @typescript-eslint/no-unsafe-argument */
/* eslint-disable @typescript-eslint/restrict-template-expressions */
import { EventEmitter } from 'events'
import type TypedEmitter from 'typed-emitter'
type Emitter = TypedEmitter<any>
const IGNORED_HTML_TAGS = new Set([
'BR',
'HEAD',
'LINK',
'META',
'SCRIPT',
'STYLE'
])
interface ParsedResult {
partialSelector: string
selector: string
options?: {
attributes?: boolean
useParentNode?: boolean
useTargetNode?: boolean
}
}
let observer: MutationObserver
const observedIds: Record<string, ParsedResult[]> = Object.create(null)
const observedClassNames: Record<string, ParsedResult[]> = Object.create(null)
const attributeObservers = new Map()
function parseSelector(selector: string) {
const partialSelectors = selector.split(',').map((s) => s.trim())
const ids = []
const classNames = []
for (const partialSelector of partialSelectors) {
if (partialSelector.startsWith('#')) {
ids.push({
key: partialSelector.split(' ')[0].split('#')[1],
partialSelector
})
} else if (partialSelector.startsWith('.')) {
classNames.push({
key: partialSelector.split(' ')[0].split('.')[1],
partialSelector
})
}
}
return { ids, classNames }
}
function startAttributeObserver(observedType: ParsedResult, emitter: Emitter, node: Element): void {
const attributeObserver = new MutationObserver(() =>
emitter.emit(observedType.selector, node, node.isConnected)
)
attributeObserver.observe(node, { attributes: true, subtree: true })
attributeObservers.set(observedType, attributeObserver)
}
function stopAttributeObserver(observedType: ParsedResult) {
const attributeObserver = attributeObservers.get(observedType)
if (!attributeObserver) {
return
}
attributeObserver.disconnect()
attributeObservers.delete(observedType)
}
function processObservedResults(emitter: Emitter, target: Element, node: Element, results: ParsedResult[]) {
if (!results || results.length === 0) {
return
}
for (const observedType of results) {
const { partialSelector, selector, options } = observedType
let foundNode = partialSelector.includes(' ') ? node.querySelector(selector) : node
if (!foundNode) {
continue
}
if (options && options.useParentNode) {
foundNode = node
}
if (options && options.useTargetNode) {
foundNode = target
}
const { isConnected } = foundNode
if (options && options.attributes) {
if (isConnected) {
startAttributeObserver(observedType, emitter, foundNode)
} else {
stopAttributeObserver(observedType)
}
}
emitter.emit(selector, foundNode, isConnected)
}
}
function processMutations(emitter: Emitter, nodes: Node[][]) {
if (!nodes || nodes.length === 0) {
return
}
for (const [target, node] of nodes) {
let nodeId = node.id
if (typeof nodeId === 'string' && nodeId.length > 0) {
nodeId = nodeId.trim()
processObservedResults(emitter, target, node, observedIds[nodeId])
}
const nodeClassList = node.classList
if (nodeClassList && nodeClassList.length > 0) {
for (let className of nodeClassList) {
className = className.trim()
processObservedResults(emitter, target, node, observedClassNames[className])
}
}
}
}
class DOMObserver extends (EventEmitter as new () => Emitter) {
constructor() {
super()
observer = new MutationObserver((mutations) => {
const pendingNodes: Node[][] = []
for (const { addedNodes, removedNodes, target } of mutations) {
if (!addedNodes || !removedNodes || (addedNodes.length === 0 && removedNodes.length === 0)) {
continue
}
for (let i = 0; i < 2; i++) {
const nodes = i === 0 ? addedNodes : removedNodes
for (const node of nodes) {
if (node.nodeType !== Node.ELEMENT_NODE || IGNORED_HTML_TAGS.has(node.nodeName)) {
continue
}
pendingNodes.push([target, node])
if (node.childElementCount === 0) {
continue
}
for (const childNode of node.querySelectorAll('[id],[class]')) {
pendingNodes.push([target, childNode])
}
}
}
}
if (pendingNodes.length === 0) {
return
}
processMutations(this, pendingNodes)
})
observer.observe(document, { childList: true, subtree: true })
}
on(
selector: string,
callback: (node: Element, isConnected: boolean) => void,
options?: { attributes?: boolean, useParentNode?: boolean }
) {
const parsedSelector = parseSelector(selector)
const initialNodes = []
for (const selectorType of Object.keys(parsedSelector)) {
let observedSelectorType
switch (selectorType) {
case 'ids':
observedSelectorType = observedIds
break
case 'classNames':
observedSelectorType = observedClassNames
break
default:
break
}
for (const { key, partialSelector } of parsedSelector[selectorType]) {
const currentObservedTypeSelectors = observedSelectorType[key]
const observedType = { partialSelector, selector, options }
if (!currentObservedTypeSelectors) {
observedSelectorType[key] = [observedType]
} else {
currentObservedTypeSelectors.push(observedType)
}
if (observedSelectorType === observedIds) {
initialNodes.push(...document.querySelectorAll(`#${key}`))
} else if (observedSelectorType === observedClassNames) {
initialNodes.push(...document.getElementsByClassName(key))
}
}
}
const result = super.on(selector, callback)
processMutations(this, initialNodes.map((node) => [node.parentElement, node]))
return result
}
off(
selector: string,
callback: (node: Element, isConnected: boolean) => void
) {
this.removeListener(selector, callback)
if (this.listenerCount(selector) > 0) {
return
}
const parsedSelector = parseSelector(selector)
for (const selectorType of Object.keys(parsedSelector)) {
let observedSelectorType
switch (selectorType) {
case 'ids':
observedSelectorType = observedIds
break
case 'classNames':
observedSelectorType = observedClassNames
break
default:
break
}
for (const { key } of parsedSelector[selectorType]) {
const currentObservedTypeSelectors = observedSelectorType[key]
if (!currentObservedTypeSelectors) {
continue
}
const observedTypeIndex = currentObservedTypeSelectors.findIndex(
(observedType) => observedType.selector === selector
)
if (observedTypeIndex === -1) {
continue
}
const observedType = currentObservedTypeSelectors[observedTypeIndex]
stopAttributeObserver(observedType)
currentObservedTypeSelectors.splice(observedTypeIndex)
if (currentObservedTypeSelectors.length === 0) {
delete observedSelectorType[key]
}
}
}
}
}
export default new DOMObserver()
|
import torch.nn as nn
class ConvolutionLayer(nn.Module):
def __init__(self, inplanes, feat_planes, stride):
super(ConvolutionLayer, self).__init__()
self.final_use = True
if stride == 1:
self.final = nn.Conv2d(inplanes, feat_planes, 1)
else:
self.final = nn.Conv2d(inplanes, feat_planes, kernel_size=1, stride=2, padding=1)
def forward(self, x):
return self.final(x)
|
// Define a struct to represent a request
struct Request {
id: u64,
data: String,
}
// Define a trait for policies
trait Policy<T> {
fn apply_policy(&self, request: &T) -> bool;
}
// Define a trait for services
trait Service<T> {
fn process_request(&self, request: &T) -> Result<(), String>;
}
// Define a struct to hold policies and services
struct RequestHandler<P, S>
where
P: Policy<Request>,
S: Service<Request> + Clone,
{
policies: Vec<P>,
services: Vec<S>,
}
impl<P, S> RequestHandler<P, S>
where
P: Policy<Request>,
S: Service<Request> + Clone,
{
// Method to add a policy to the collection
fn add_policy(&mut self, policy: P) {
self.policies.push(policy);
}
// Method to add a service to the collection
fn add_service(&mut self, service: S) {
self.services.push(service);
}
// Method to handle a request
fn handle_request(&self, request: Request) -> Result<(), String> {
// Apply policies to the request
let applicable_policies: Vec<&P> = self.policies.iter().filter(|p| p.apply_policy(&request)).collect();
// Select a service and process the request
if let Some(service) = self.services.choose() {
service.process_request(&request)
} else {
Err("No service available to handle the request".to_string())
}
}
}
|
<gh_stars>0
/*
ANSWER
-------------------------------------------------------------------------------------
这道题实在TMD的变态了。我能正常想到的方法是按值进行二分法。
BinarySearch的解释:
[1,3,4,2,2] 这个长度是5,所以一共有4个不同的数(一个重复)
按值BinarySearch,先看比<=2的有多少个 (4的一半是2)
扫描一轮 (分界2, 目标2), 发现1,2,2 三个数满足,这大于目标个数2所以按这个range
扫描二轮 (分界1, 目标1), 发现1 一个数满足,和目标相同,所以选择另外一个range
扫描三轮 (分界2, 目标1), 发现2,2 两个数满足,这大于目标个数、而且[2,2]这有一个数,所以算法结束
复杂度 binarySearch logN * 每次要扫描N个 =~ O(NlogN)
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
但是这个题真正变态的是解法2:也就是这个题我提交的解法, 用作快慢指针,找环的方法。。。
关键在于把这个问题转换成找环:
比如:
[1 3 2 4 5 3] 这个Case,可以做如下转换:
跟所有的 value 标上index.
Index: 0 1 2 3 4 5
Value: [1 3 2 4 5 3]
令初始index=0,一直往下推,直到产生环,那么环的开始节点就是重复的节点
f(0): 1
f(1): 3
f(3): 4
f(4): 5
f(5): 3 --> 产生环
1-3-4-5
| |
└---┘
反思:
1. 为什么环的开始节点就是重复的节点?
因为在value数组中存在两个相同的数,即存在 m!=n 满足 f(m) = f(n)
也就是这个数有两个入度(两个数指向它),这就会产生环。
2. 怎么保证所有的点都会遍历到?
并不能保证,比如这个例子中2就和其他的数构成的图不联通。但这不会影响结论
2--┐
| |
└--┘
3. 为何从index=0开始?
因为value是没法取值0的、所以f(0)的入度为0(也就是没有数可以指向0),
所以0按照道理来说,是没法形成环的。如果形成了环,一定是有重复的数。
具体找环的方法请见q142.
-------------------------------------------------------------------------------------
*/
class Solution {
public:
int findDuplicate(vector<int>& nums) {
if( nums.size()<=1 ){
return 0;
}
int slow = nums[0];
int fast = nums[nums[0]];
while( slow != fast ){
slow = nums[slow];
fast = nums[nums[fast]];
}
slow = 0;
while(slow != fast) {
slow = nums[slow];
fast = nums[fast];
}
return slow;
}
};
|
const nodemailer = require('nodemailer');
const sendEmail = async (user) => {
const transporter = nodemailer.createTransport({
service: 'Gmail',
auth: {
user: 'yourGmail@gmail.com',
pass: 'yourPassword'
}
});
const options = {
from: 'myemail@gmail.com',
to: user.email,
subject: 'New User Information',
html: `<p>A new user has been added with the following information:</p>
<ul>
<li>Name: ${user.name}</li>
<li>Email: ${user.email}</li>
<li>Phone Number: ${user.phoneNumber}</li>
</ul>`
};
await transporter.sendMail(options);
};
sendEmail(user);
|
import random
class LicensePlateGenerator:
def __init__(self):
self.provinces = u"京津冀晋蒙辽吉黑沪苏浙皖闽赣鲁豫鄂湘粤桂琼川贵云渝藏陕甘青宁新"
self.digits = '0123456789'
self.used_plates = set()
def generate_license_plate(self):
province = random.choice(self.provinces)
number = ''.join(random.choice(self.digits) for _ in range(5))
plate = province + number
if plate in self.used_plates:
return self.generate_license_plate()
else:
self.used_plates.add(plate)
return plate
# Usage
generator = LicensePlateGenerator()
print(generator.generate_license_plate()) # Output: '京12345'
print(generator.generate_license_plate()) # Output: '津67890'
|
<filename>src/features/auth/redux/data/reduxFormEntries.ts<gh_stars>1-10
import { makeReduxFormEntry } from 'shared/helpers/redux';
import * as NS from '../../namespace';
export const loginFormEntry = makeReduxFormEntry<NS.ILoginForm>('loginForm',
['email', 'password', 'remember']);
export const passwordRecoveryFormEntry = makeReduxFormEntry<NS.IPasswordRecoveryForm>('passwordRecovery',
['email', 'captcha']);
export const changePasswordFormEntry = makeReduxFormEntry<NS.IChangePasswordForm>('changePassword',
['password', 'passwordConfirm']);
export const registrationFormEntry = makeReduxFormEntry<NS.IRegistrationForm>('registration',
['email', 'password', 'passwordConfirm', 'nickname', 'captcha']);
export const twoFactorFormEntry = makeReduxFormEntry<NS.ITwoFactorForm>('twoFactorForm', ['code']);
|
package ru.job4j.search;
import java.util.LinkedList;
/**
* PriorityQueue - class emulates Queue with priority on a basic LinkedList
*
* @author <NAME> (<EMAIL>)
* @version $Id$
* @since 0.1
*/
public class PriorityQueue {
/** Linkred List of tasks */
private LinkedList<Task> tasks = new LinkedList<>();
/**
* The method inputs Task element into the Priority Queue according to element priority.
* If the PriorityQueue already has the element with the same priority, new element will be inserted after "elder" element
* (like element with a lower priority).
* @param task - Task.
*/
public void put(Task task) {
var index = 0;
for (var it = this.tasks.iterator(); it.hasNext();) {
if (task.getPriority() >= it.next().getPriority()) {
index++;
} else {
break;
}
}
this.tasks.add(index, task);
}
/**
* The method returns (with removing) the top element of the Queue.
* @return - the top element of the Queue.
*/
public Task take() {
return this.tasks.poll();
}
}
|
docker build -t biglittlechallenge/trials-base-arm -f Dockerfile-trials-base-arm .
docker build -t biglittlechallenge/trials-engine-arm -f Dockerfile-trials-engine-arm .
docker build -t biglittlechallenge/trials-engine-dashboard-arm -f Dockerfile-trials-engine-dashboard-arm .
docker build -t biglittlechallenge/trials-ai-arm -f Dockerfile-trials-ai-arm .
|
import React, { useState } from "react"
import { Link } from "gatsby"
import Lottie from "react-lottie"
import animationData from "../lotties/house"
import Layout from "../components/layout"
import "../components/layout.css"
import SEO from "../components/seo"
import data from "../data"
const IndexPage = props => {
const [name, setName] = useState("")
const [email, setEmail] = useState("")
const [message, setMessage] = useState("")
const [status, setStatus] = useState("")
const featured = data.properties.filter(
property => property.featured === true
)
const defaultOptions = {
loop: true,
autoplay: true,
animationData: animationData,
rendererSettings: {
preserveAspectRatio: "xMidYMid slice",
},
}
const submitForm = ev => {
ev.preventDefault()
const form = ev.target
const data = new FormData(form)
const xhr = new XMLHttpRequest()
xhr.open(form.method, form.action)
xhr.setRequestHeader("Accept", "application/json")
xhr.onreadystatechange = () => {
if (xhr.readyState !== XMLHttpRequest.DONE) return
if (xhr.status === 200) {
form.reset()
setStatus("SUCCESS")
setName("")
setEmail("")
setMessage("")
} else {
setStatus("ERROR")
}
}
xhr.send(data)
}
return (
<Layout>
<SEO title="Home" />
<div className="Hero">
<div className="HeroGroup">
<a href="tel:+2330558270333">
<button className="ContactButton">
<i style={{ color: "#fff" }} className="material-icons">
phone_iphone
</i>
+233 55 827 0333
</button>
</a>
<h1>FIND YOUR DREAM HOME TODAY</h1>
<p>
Contact Axtella for all your Construction & Real Estate Services.
</p>
<Link to="/contact">
<button className="ContactUsButton">Contact us</button>
</Link>
</div>
<form
className="Form"
onSubmit={submitForm}
action="https://formspree.io/xayppdyw"
method="POST"
>
<p>Need Help?</p>
<h2>MESSAGE US</h2>
<label>
<input
name="name"
value={name}
placeholder="<NAME>"
onChange={e => setName(e.target.value)}
required
/>
</label>
<label>
<input
name="_replyto"
value={email}
placeholder="Your Email"
onChange={e => setEmail(e.target.value)}
required
/>
</label>
<label className="Message">
<textarea
name="message"
value={message}
placeholder="Message"
onChange={e => setMessage(e.target.value)}
required
/>
</label>
<button>Send Message</button>
{status === "SUCCESS" ? (
<p style={{ fontSize: 10, marginTop: 0, paddingTop: 3 }}>Thanks!</p>
) : null}
{status === "ERROR" && <p>Ooops! There was an error.</p>}
</form>
</div>
<div className="Featured">
<p>FEATURED LISTINGS</p>
<h1>FIND YOUR PERFECT HOME</h1>
<ul>
{featured.map((feature, id) => (
<li key={id}>
<Link to={`/app/property-details/${feature.id}`}>
<img src={feature.image} alt="home1" className="Img" />
</Link>
<p className="Tag">{feature.tag}</p>
<h2>{feature.title}</h2>
<h5>{feature.address}</h5>
<p className="Price">{feature.price}</p>
</li>
))}
</ul>
</div>
<div className="AboutUs">
<div className="AboutGroup">
<h3>WHO WE ARE</h3>
<h1>ABOUT US</h1>
<p>
<b>Axtella Properties & Investments</b> is a fully legislated Real
Estate Company based in Accra. <br />
</p>
<p>
We have over 5 years combined experience covering Sales, Letting &
Property Management.We are specialized in Luxury Properties &
investment opportunities for foreign investors. Our reputation has
grown to represent a trust worthy and transparent independent
company providing an exceptional level of customer service...{" "}
</p>
<Link to="/about">
<button>Read More</button>
</Link>
</div>
<div className="AboutImg"></div>
</div>
<div className="Featured">
<p>FIND YOUR PERFECT HOME</p>
<h1>UNIQUE LISTINGS</h1>
<ul>
{data.properties.map((property, id) => (
<li key={id}>
<Link to={`/app/property-details/${property.id}`}>
<img src={property.image} alt="property" className="Img" />
</Link>
<p className="Tag">{property.tag}</p>
<h2>{property.title}</h2>
<h5>{property.address}</h5>
<p className="Price">{property.price}</p>
</li>
))}
</ul>
</div>
<div className="Help">
<div className="HelpGroup">
<p>WE'RE HERE TO HELP YOU</p>
<h1>WHAT ARE YOU LOOKING FOR</h1>
</div>
<div className="HelpCard">
<ul>
<li>
<img
src={require("../images/apartment.png")}
alt="apartment"
width="60px"
height="60px"
/>
<h3> APARTMENTS</h3>
<p>
Find and get Deals on the best and affordable premium apartments
in ghana.
</p>
<Link to="/properties">
<button>Find Apartments</button>
</Link>
</li>
<li>
<img
src={require("../images/house.png")}
alt="apartment"
width="60px"
height="60px"
/>
<h3> HOUSES</h3>
<p>
Check out the latest houses for sale on the real estate market,
compare prices and own a home.
</p>
<Link to="/properties">
<button>Find Houses</button>
</Link>
</li>
<li>
<img
src={require("../images/office.png")}
alt="apartment"
width="60px"
height="60px"
/>
<h3>OFFICES</h3>
<p>
Get access to Offices for lease in the best locations in accra.
</p>
<Link to="/properties">
<button>Find Offices</button>
</Link>
</li>
</ul>
</div>
</div>
<div className="Finder">
<p>
<i className="material-icons">phone_iphone</i>
+233 54 135 6456
</p>
<h1>LET'S FIND THE RIGHT PROPERTY FOR YOU</h1>
<Link to="/contact">
<button>Contact us</button>
</Link>
<Lottie options={defaultOptions} height={200} width={200} />
</div>
</Layout>
)
}
export default IndexPage
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## Contains configuration values for interacting with the Vagrant cluster in test mode
#Set NUM_NODES to minimum required for testing.
NUM_NODES=2
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/../..
source "${KUBE_ROOT}/cluster/vagrant/config-default.sh"
# Do not register the master kubelet during testing
REGISTER_MASTER_KUBELET=${REGISTER_MASTER:-false}
# Optional: if set to true, kube-up will configure the cluster to run e2e tests.
E2E_STORAGE_TEST_ENVIRONMENT=${KUBE_E2E_STORAGE_TEST_ENVIRONMENT:-false}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.keyboard = void 0;
var keyboard = {
"viewBox": "0 0 20 20",
"children": [{
"name": "path",
"attribs": {
"d": "M18.6,4h-17.2C0.629,4,0,4.629,0,5.4V14.6C0,15.369,0.629,16,1.399,16h17.2c0.77,0,1.4-0.631,1.4-1.4V5.4\r\n\tC20,4.629,19.369,4,18.6,4z M11,6h2v2h-2V6z M14,9v2h-2V9H14z M8,6h2v2H8V6z M11,9v2H9V9H11z M5,6h2v2H5V6z M8,9v2H6V9H8z M2,6h2v2\r\n\tH2V6z M5,9v2H3V9H5z M4,14H2v-2h2V14z M15,14H5v-2h10V14z M18,14h-2v-2h2V14z M15,11V9h2v2H15z M18,8h-4V6h4V8z"
}
}]
};
exports.keyboard = keyboard;
|
def sumThree(x, y):
nums = [x + y]
result = [int(num) for num in nums if int(num) % 3 == 0]
return result
sum_results = sumThree(x, y)
print(sum_results)
|
<filename>yupc-frontend/yupc-manage-center/src/main/java/com/github/yupc/center/configuration/GlobalVariableProperties.java<gh_stars>0
package com.github.yupc.center.configuration;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import static com.github.yupc.center.configuration.GlobalVariableProperties.GLOBAL_VARIABLE_PREFIX;
@ConfigurationProperties(prefix = GLOBAL_VARIABLE_PREFIX)
@Data
@NoArgsConstructor
@Configuration
public class GlobalVariableProperties {
public static final String GLOBAL_VARIABLE_PREFIX = "yupc.variable";
/** 网关服务的url前缀 */
private String gateWayUrlPrefix = "http://gateway.yupc.com:9770";
}
|
<filename>query_builder.go<gh_stars>10-100
package influxquerybuilder
import (
"bytes"
"fmt"
"regexp"
"strings"
)
// Duration Duration interface
type Duration interface {
Nanoseconds(uint) Duration
Microseconds(uint) Duration
Milliseconds(uint) Duration
Second(uint) Duration
Minute(uint) Duration
Hour(uint) Duration
Day(uint) Duration
Week(uint) Duration
getDuration() string
}
// DurationType DurationType struct
type DurationType struct {
unit string
value uint
}
// NewDuration New Duration
func NewDuration() Duration {
return &DurationType{}
}
// Nanoseconds Nanoseconds
func (t *DurationType) Nanoseconds(d uint) Duration {
t.unit = "ns"
t.value = d
return t
}
// Microseconds Microseconds
func (t *DurationType) Microseconds(d uint) Duration {
t.unit = "u"
t.value = d
return t
}
// Milliseconds Milliseconds
func (t *DurationType) Milliseconds(d uint) Duration {
t.unit = "ms"
t.value = d
return t
}
// Second Second
func (t *DurationType) Second(d uint) Duration {
t.unit = "s"
t.value = d
return t
}
// Minute Minute
func (t *DurationType) Minute(d uint) Duration {
t.unit = "m"
t.value = d
return t
}
// Hour Hour
func (t *DurationType) Hour(d uint) Duration {
t.unit = "h"
t.value = d
return t
}
// Day Day
func (t *DurationType) Day(d uint) Duration {
t.unit = "d"
t.value = d
return t
}
// Week Week
func (t *DurationType) Week(d uint) Duration {
t.unit = "w"
t.value = d
return t
}
func (t *DurationType) getDuration() string {
return fmt.Sprintf("time(%d%s)", t.value, t.unit)
}
// QueryBuilder QueryBuilder interface
type QueryBuilder interface {
Select(fields ...string) QueryBuilder
From(string) QueryBuilder
Where(string, string, interface{}) QueryBuilder
And(string, string, interface{}) QueryBuilder
Or(string, string, interface{}) QueryBuilder
WhereBrackets(QueryBuilder) QueryBuilder
AndBrackets(QueryBuilder) QueryBuilder
OrBrackets(QueryBuilder) QueryBuilder
GroupBy(string) QueryBuilder
GroupByTime(Duration) QueryBuilder
GroupByTag(string) QueryBuilder
Fill(interface{}) QueryBuilder
Limit(uint) QueryBuilder
Offset(uint) QueryBuilder
Desc() QueryBuilder
Asc() QueryBuilder
Build() string
Clean() QueryBuilder
GetQueryStruct() CurrentQuery
}
// Tag Tag struct
type Tag struct {
key string
op string
value interface{}
}
// Query Query struct
type Query struct {
measurement string
fields []string
where Tag
and []Tag
or []Tag
whereBrackets QueryBuilder
andBrackets []QueryBuilder
orBrackets []QueryBuilder
groupBy string
groupByTime string
groupByTag string
order string
limit uint
_limit bool
offset uint
_offset bool
fill interface{}
}
// CurrentQuery Get current query
type CurrentQuery struct {
Measurement string
Where Tag
And []Tag
Or []Tag
WhereBrackets QueryBuilder
AndBrackets []QueryBuilder
OrBrackets []QueryBuilder
Fields []string
GroupBy string
GroupByTime string
GroupByTag string
Limit uint
Offset uint
Order string
IsLimitSet bool
IsOffsetSet bool
}
// New New QueryBuilder
func New() QueryBuilder {
return &Query{}
}
// Clean Clean current builder and get a new one
func (q *Query) Clean() QueryBuilder {
return New()
}
// Select Select fields...
func (q *Query) Select(fields ...string) QueryBuilder {
q.fields = append(q.fields, fields...)
return q
}
// From From measurement
func (q *Query) From(measurement string) QueryBuilder {
q.measurement = measurement
return q
}
// Where Where criteria
func (q *Query) Where(key string, op string, value interface{}) QueryBuilder {
q.where = Tag{key, op, value}
return q
}
// And And criteria
func (q *Query) And(key string, op string, value interface{}) QueryBuilder {
q.and = append(q.and, Tag{key, op, value})
return q
}
// Or Or criteria
func (q *Query) Or(key string, op string, value interface{}) QueryBuilder {
q.or = append(q.or, Tag{key, op, value})
return q
}
// WhereBrackets WHERE (...)
func (q *Query) WhereBrackets(builder QueryBuilder) QueryBuilder {
q.whereBrackets = builder
return q
}
// AndBrackets AND (...)
func (q *Query) AndBrackets(builder QueryBuilder) QueryBuilder {
q.andBrackets = append(q.andBrackets, builder)
return q
}
// OrBrackets OR (...)
func (q *Query) OrBrackets(builder QueryBuilder) QueryBuilder {
q.orBrackets = append(q.orBrackets, builder)
return q
}
// GroupBy GROUP BY time
func (q *Query) GroupBy(time string) QueryBuilder {
q.groupBy = time
return q
}
// GroupByTime GROUP BY time
func (q *Query) GroupByTime(duration Duration) QueryBuilder {
q.groupByTime = duration.getDuration()
return q
}
// GroupByTag GROUP By tag
func (q *Query) GroupByTag(tag string) QueryBuilder {
q.groupByTag = tag
return q
}
// Fill FILL(...)
func (q *Query) Fill(fill interface{}) QueryBuilder {
q.fill = fill
return q
}
// Limit LIMIT x
func (q *Query) Limit(limit uint) QueryBuilder {
q._limit = true
q.limit = limit
return q
}
// Offset OFFSET x
func (q *Query) Offset(offset uint) QueryBuilder {
q._offset = true
q.offset = offset
return q
}
// Desc ORDER BY time DESC
func (q *Query) Desc() QueryBuilder {
q.order = "DESC"
return q
}
// Asc ORDER BY time ASC
func (q *Query) Asc() QueryBuilder {
q.order = "ASC"
return q
}
// GetQueryStruct Get query struct
func (q *Query) GetQueryStruct() CurrentQuery {
return CurrentQuery{
Measurement: q.measurement,
Where: q.where,
And: q.and,
Or: q.or,
WhereBrackets: q.whereBrackets,
AndBrackets: q.andBrackets,
OrBrackets: q.orBrackets,
Fields: q.fields,
GroupBy: q.groupBy,
Limit: q.limit,
Offset: q.offset,
Order: q.order,
IsLimitSet: q._limit,
IsOffsetSet: q._offset,
}
}
// Build Build query string
func (q *Query) Build() string {
var buffer bytes.Buffer
buffer.WriteString(q.buildFields())
buffer.WriteString(q.buildFrom())
buffer.WriteString(q.buildWhere())
buffer.WriteString(q.buildGroupBy())
buffer.WriteString(q.buildFill())
buffer.WriteString(q.buildOrder())
buffer.WriteString(q.buildLimit())
buffer.WriteString(q.buildOffset())
return strings.TrimSpace(buffer.String())
}
var functionMatcher = regexp.MustCompile(`.+\(.+\)$`)
func (q *Query) buildFields() string {
if q.fields == nil {
return ""
}
fields := make([]string, len(q.fields))
for i := range fields {
splitByAs := strings.Split(q.fields[i], "AS")
selectField := strings.TrimSpace(splitByAs[0])
selectAs := ""
if selectField == "*" {
return "SELECT * "
}
if len(splitByAs) == 2 {
selectAs = strings.TrimSpace(splitByAs[1])
}
if functionMatcher.MatchString(selectField) {
fields[i] = selectField
} else {
fields[i] = fmt.Sprintf("\"%s\"", selectField)
}
if selectAs != "" {
fields[i] = fields[i] + " AS " + fmt.Sprintf("\"%s\"", selectAs)
}
}
return fmt.Sprintf("SELECT %s ", strings.Join(fields, ","))
}
func (q *Query) buildFrom() string {
if q.measurement == "" {
return ""
}
return fmt.Sprintf(`FROM "%s" `, q.measurement)
}
func (q *Query) buildWhere() string {
var buffer bytes.Buffer
var whereCriteria string
andCriteria := make([]string, 0)
orCriteria := make([]string, 0)
if q.where != (Tag{}) || q.whereBrackets != nil {
if q.where != (Tag{}) {
buffer.WriteString("WHERE ")
whereCriteria = getCriteriaTemplate(q.where)
buffer.WriteString(whereCriteria)
buffer.WriteString(" ")
} else if q.whereBrackets != nil {
buffer.WriteString("WHERE (")
buffer.WriteString(strings.Replace(q.whereBrackets.Build(), "WHERE ", "", 1))
buffer.WriteString(") ")
}
if q.and != nil {
buffer.WriteString("AND ")
for _, tag := range q.and {
andCriteria = append(
andCriteria,
getCriteriaTemplate(tag),
)
}
buffer.WriteString(strings.Join(andCriteria, " AND "))
buffer.WriteString(" ")
}
if q.or != nil {
buffer.WriteString("OR ")
for _, tag := range q.or {
orCriteria = append(
orCriteria,
getCriteriaTemplate(tag),
)
}
buffer.WriteString(strings.Join(orCriteria, " OR "))
buffer.WriteString(" ")
}
if q.andBrackets != nil {
for _, g := range q.andBrackets {
buffer.WriteString("AND (")
buffer.WriteString(strings.Replace(g.Build(), "WHERE ", "", 1))
buffer.WriteString(") ")
}
}
if q.orBrackets != nil {
for _, g := range q.orBrackets {
buffer.WriteString("OR (")
buffer.WriteString(strings.Replace(g.Build(), "WHERE ", "", 1))
buffer.WriteString(") ")
}
}
}
return buffer.String()
}
func (q *Query) buildGroupBy() string {
var buffer bytes.Buffer
if q.groupBy != "" {
buffer.WriteString(
fmt.Sprintf("GROUP BY time(%s)", q.groupBy),
)
buffer.WriteString(" ")
} else if q.groupByTime != "" {
buffer.WriteString(
fmt.Sprintf("GROUP BY %s", q.groupByTime),
)
buffer.WriteString(" ")
} else if q.groupByTag != "" {
buffer.WriteString(
fmt.Sprintf("GROUP BY %s", q.groupByTag),
)
buffer.WriteString(" ")
}
return buffer.String()
}
func (q *Query) buildFill() string {
var buffer bytes.Buffer
if q.fill != nil {
buffer.WriteString(
fmt.Sprintf(`FILL(%s)`, fmt.Sprint(q.fill)),
)
buffer.WriteString(" ")
}
return buffer.String()
}
func (q *Query) buildOrder() string {
var buffer bytes.Buffer
if q.order != "" {
buffer.WriteString(
fmt.Sprintf(`ORDER BY time %s`, q.order),
)
buffer.WriteString(" ")
}
return buffer.String()
}
func (q *Query) buildLimit() string {
var buffer bytes.Buffer
if q._limit {
buffer.WriteString(
fmt.Sprintf(`LIMIT %v`, q.limit),
)
buffer.WriteString(" ")
}
return buffer.String()
}
func (q *Query) buildOffset() string {
var buffer bytes.Buffer
if q._offset {
buffer.WriteString(
fmt.Sprintf(`OFFSET %v`, q.offset),
)
buffer.WriteString(" ")
}
return buffer.String()
}
func getCriteriaTemplate(tag Tag) string {
switch tag.value.(type) {
case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
return fmt.Sprintf(`"%s" %s %d`, tag.key, tag.op, tag.value)
case float32, float64:
return fmt.Sprintf(`"%s" %s %g`, tag.key, tag.op, tag.value)
case bool:
return fmt.Sprintf(`"%s" %s %t`, tag.key, tag.op, tag.value)
default:
return fmt.Sprintf(`"%s" %s '%s'`, tag.key, tag.op, tag.value)
}
}
|
export class NavigationService {
private history: string[] = [];
private navigationCallbacks: ((page: string) => void)[] = [];
navigateTo(page: string): void {
this.history.push(page);
this.notifyNavigationSubscribers(page);
}
navigateBack(): void {
if (this.history.length > 1) {
this.history.pop();
const previousPage = this.history[this.history.length - 1];
this.notifyNavigationSubscribers(previousPage);
}
}
subscribeToNavigationEvents(callback: (page: string) => void): void {
this.navigationCallbacks.push(callback);
}
private notifyNavigationSubscribers(page: string): void {
this.navigationCallbacks.forEach(callback => callback(page));
}
}
|
package cloudflare
import (
"bytes"
"encoding/json"
"golang.org/x/net/context"
)
// Zones - Cloudflare Zones API Client.
type Zones struct {
// Options - Client options.
*Options
}
// Create - Creates a zone.
func (zones *Zones) Create(ctx context.Context, domain string) (zone *Zone, err error) {
buffer := new(bytes.Buffer)
err = json.NewEncoder(buffer).Encode(struct {
Name string `json:"name"`
}{
Name: domain,
})
if err != nil {
return
}
response, err := httpDo(ctx, zones.Options, "POST", apiURL("/zones"), buffer)
if err != nil {
return
}
defer response.Body.Close()
result, err := readResponse(response.Body)
if err != nil {
return
}
zone = new(Zone)
err = json.Unmarshal(result.Result, &zone)
return
}
// List - Lists all zones.
func (zones *Zones) List(ctx context.Context) ([]*Zone, error) {
return zones.listPages(ctx, 1)
}
// Details - Requests Zone details by ID.
func (zones *Zones) Details(ctx context.Context, id string) (zone *Zone, err error) {
response, err := httpDo(ctx, zones.Options, "GET", apiURL("/zones/%s", id), nil)
if err != nil {
return
}
defer response.Body.Close()
result, err := readResponse(response.Body)
if err != nil {
return
}
zone = new(Zone)
err = json.Unmarshal(result.Result, &zone)
return
}
// Patch - Patches a zone. It has a limited possibilities.
func (zones *Zones) Patch(ctx context.Context, id string, patch *ZonePatch) (err error) {
buffer := new(bytes.Buffer)
err = json.NewEncoder(buffer).Encode(patch)
if err != nil {
return
}
response, err := httpDo(ctx, zones.Options, "POST", apiURL("/zones/%s", id), buffer)
if err != nil {
return
}
defer response.Body.Close()
_, err = readResponse(response.Body)
return
}
// Delete - Deletes zone by id.
func (zones *Zones) Delete(ctx context.Context, id string) (err error) {
response, err := httpDo(ctx, zones.Options, "DELETE", apiURL("/zones/%s", id), nil)
if err != nil {
return
}
defer response.Body.Close()
_, err = readResponse(response.Body)
return
}
// listPages - Gets all pages starting from `page`.
func (zones *Zones) listPages(ctx context.Context, page int) (list []*Zone, err error) {
response, err := httpDo(ctx, zones.Options, "GET", apiURL("/zones?page=%d&per_page=50", page), nil)
if err != nil {
return
}
defer response.Body.Close()
result, err := readResponse(response.Body)
if err != nil {
return
}
err = json.Unmarshal(result.Result, &list)
if err != nil {
return
}
if result.ResultInfo == nil || page >= result.ResultInfo.TotalPages {
return
}
next, err := zones.listPages(ctx, page+1)
if err != nil {
return
}
return append(list, next...), nil
}
|
import { Component, OnInit } from '@angular/core';
import { ToastrService } from 'ngx-toastr';
import { ActivatedRoute, Router } from '@angular/router';
import { NgForm } from '@angular/forms';
import { Party } from '../../../@theme/model/party-class';
import { PartyService } from '../../../@theme/services/party.service';
import { AuthService } from '../../../@theme/services/auth.service';
import { User, UserPermission } from '../../../@theme/model/user-class';
import { Subscription } from 'rxjs';
@Component({
selector: 'app-add-edit-party',
templateUrl: './add-edit-party.component.html',
styleUrls: ['./add-edit-party.component.scss']
})
export class AddEditPartyComponent implements OnInit {
id: any;
subBtnName = '';
topHeader = '';
partyModal: Party;
currentUserId: any;
currentUserHeadid: any;
currentUser$: Subscription;
currentUser: User;
currentUserPermission: UserPermission[] = [];
constructor(private toasterService: ToastrService, private route: ActivatedRoute,
private router: Router, private partyService: PartyService, private authService: AuthService) {
this.partyModal = new Party();
this.currentUser$ = this.authService.currentUser.subscribe(ele => {
if (ele != null) {
this.currentUser = ele.user;
this.currentUserId = ele.user.user_id;
this.currentUserHeadid = ele.user.user_head_id;
this.currentUserPermission = ele.user_permission;
}
});
}
ngOnInit() {
this.onPageLoad();
}
onPageLoad() {
this.id = this.route.snapshot.paramMap.get('id');
if (this.id != null) {
this.subBtnName = 'Update';
this.topHeader = 'Edit Party';
this.partyService.getPartyById(this.id).subscribe(
data => {
if (!data[0].error) {
this.partyModal = data[0].data[0];
} else {
this.toasterService.error(data[0].message);
}
}, error => {
this.toasterService.error(error);
});
} else {
this.subBtnName = 'Save';
this.topHeader = 'Add Party';
}
}
numberOnly(event): boolean {
const charCode = (event.which) ? event.which : event.keyCode;
if (charCode > 31 && ((charCode < 46 || charCode > 57) || charCode == 47)) {
return false;
}
return true;
}
onCustomFormSubmit(form: NgForm) {
//for update
if (this.id) {
this.partyModal.updated_by = this.currentUserId;
this.partyService.updateParty(this.partyModal).subscribe(data => {
if (!data[0].error) {
this.toasterService.success(data[0].message);
form.resetForm();
this.router.navigate(['/pages/party/view-party']);
} else {
this.toasterService.error(data[0].message);
}
}, error => {
this.toasterService.error('Server Error');
});
} else {
//for add
this.partyModal.created_by = this.currentUserId;
this.partyModal.user_head_id = this.currentUserHeadid;
console.log("party Modal")
console.log(this.partyModal)
this.partyService.addParty(this.partyModal).subscribe(data => {
// data = data[0]
if (!data.error) {
this.toasterService.success(data[0].message);
form.resetForm();
this.router.navigate(['/pages/party/view-party']);
} else {
this.toasterService.error(data[0].message);
}
}, error => {
this.toasterService.error('Server Error');
});
}
}
}
|
import { scrypt_simple, to_uint8, encrypt as chacha_encrypt, decrypt as chacha_decrypt, to_utf8, isAvailable } from 'rasm-crypt';
import * as waitUntil from 'async-wait-until';
export class RandomHelper {
static getRandomValues?: (array: Uint32Array | Uint8Array) => void = undefined;
static fillRandom(array: Uint8Array | Uint32Array) {
if (this.getRandomValues) {
this.getRandomValues(array);
} else {
crypto.getRandomValues(array);
}
}
}
export type Nonce = Uint8Array;
export type Key = Uint8Array;
export type EncryptedData = Uint8Array;
// fixme externalize to worker in the future
export async function hash(plaintext: string, salt: string): Promise<Uint8Array> {
await waitUntil(isAvailable, 5000, 50);
return new Promise<Uint8Array>(((resolve, reject) => {
let salt_uint8 = to_uint8(salt);
let plaintext_uint8 = to_uint8(plaintext);
const key = scrypt_simple(plaintext_uint8, salt_uint8, 32);
resolve(key);
}));
}
// fixme externalize to worker in the future
export async function doubleHash(plaintext: string, salt: string): Promise<[Uint8Array, Uint8Array]> {
await waitUntil(isAvailable, 5000, 50);
return new Promise<[Uint8Array, Uint8Array]>(((resolve, reject) => {
let salt_uint8 = to_uint8(salt);
let plaintext_uint8 = to_uint8(plaintext);
const key = scrypt_simple(plaintext_uint8, salt_uint8, 32);
const double = scrypt_simple(key, salt_uint8, 32);
resolve([key, double]);
}));
}
export function hashSync(plaintext: string | Uint8Array, salt: string): Uint8Array {
let salt_uint8 = to_uint8(salt);
if (typeof plaintext === 'string') {
plaintext = to_uint8(plaintext);
}
const key = scrypt_simple(plaintext, salt_uint8, 32);
return key;
}
export function doubleHashSync(plaintext: string, salt: string): Uint8Array {
let salt_uint8 = to_uint8(salt);
let plaintext_uint8 = to_uint8(plaintext);
const key = scrypt_simple(plaintext_uint8, salt_uint8, 32);
const double = scrypt_simple(key, salt_uint8, 32);
return double;
}
// fixme externalize to worker in the future
export async function encrypt(data: Uint8Array | string, key: Key): Promise<[EncryptedData, Nonce]> {
await waitUntil(isAvailable, 5000, 50);
return new Promise<[EncryptedData, Nonce]>((resolve, reject) => {
resolve(encryptSync(data, key));
});
}
export function encryptSync(data: Uint8Array | string, key: Key): [EncryptedData, Nonce] {
if (typeof data === 'string') {
data = to_uint8(data);
}
const nonce = new Uint8Array(12);
fillRandomValues(nonce);
let res = chacha_encrypt(key, nonce, new Uint8Array(0), data);
let authTag = res.get_auth_tag();
let ciphertext = res.get_ciphertext();
let encrypted = new Uint8Array(authTag.length + ciphertext.length);
encrypted.set(authTag, 0);
encrypted.set(ciphertext, authTag.length);
return [encrypted, nonce];
}
// fixme externalize to worker in the future
export async function decryptToUtf8(data: EncryptedData, nonce: Nonce, key: Key): Promise<string> {
await waitUntil(isAvailable, 5000, 50);
return new Promise<string>((resolve, reject) => {
let tag = data.slice(0, 16);
let ciphertext = data.slice(16, data.length);
const decoded = chacha_decrypt(key, nonce, new Uint8Array(0), ciphertext, tag);
const result = to_utf8(decoded);
resolve(result);
});
}
// fixme externalize to worker in the future
export async function decrypt(data: EncryptedData, nonce: Nonce, key: Key): Promise<Uint8Array> {
await waitUntil(isAvailable, 5000, 50);
return new Promise<Uint8Array>((resolve, reject) => {
let tag = data.slice(0, 16);
let ciphertext = data.slice(16, data.length);
const decoded = chacha_decrypt(key, nonce, new Uint8Array(0), ciphertext, tag);
resolve(decoded);
});
}
export function fillRandomValues(array: Uint8Array | Uint32Array) {
RandomHelper.fillRandom(array);
}
export function toHex32(array: Uint32Array): string {
return toHex8(Uint8Array.from(array));
}
export function toHex8(array: Uint8Array): string {
let hexStr = '';
for (let i = 0; i < array.length; i++) {
// tslint:disable-next-line
let hex = (array[i] & 0xff).toString(16);
hex = (hex.length === 1) ? '0' + hex : hex;
hexStr += hex;
}
return hexStr.toUpperCase();
}
export function fromHex(hex: string): Uint8Array {
const array = [];
for (let i = 0, len = hex.length; i < len; i += 2) {
array.push(parseInt(hex.substr(i, 2), 16));
}
return new Uint8Array(array);
}
|
diff <(tail -n +3 data/2020-11-12.1057) <(tail -n +3 data/2020-11-12.1027)
|
# Gemfile
source 'https://rubygems.org'
gem 'rails', '~> 6.0.3'
gem 'sqlite3'
# config/routes.rb
Rails.application.routes.draw do
get '/survey' => 'survey#new'
post '/survey/create' => 'survey#create'
end
# app/controllers/survey_controller.rb
class SurveyController < ApplicationController
def new
@survey = Survey.new
end
def create
@survey = Survey.new(survey_params)
if @survey.save
flash[:success] = 'Survey created successfully.'
redirect_to root_path
else
render :new
end
end
private
def survey_params
params.require(:survey).permit(:name, :email, :age, :hobbies)
end
end
# app/views/survey/new.html.erb
<h1>Create New Survey</h1>
<%= form_for @survey do |f| %>
<div>
<%= f.label :name %>
<%= f.text_field :name %>
</div>
<div>
<%= f.label :email %>
<%= f.text_field :email %>
</div>
<div>
<%= f.label :age %>
<%= f.number_field :age %>
</div>
<div>
<%= f.label :hobbies %>
<%= f.text_area :hobbies %>
</div>
<div>
<%= f.submit %>
</div>
<% end %>
|
class Node {
public data
public left
public right
constructor(data, left, right){
this.data = data
this.left = left
this.right = right
}
show () {
return this.data
}
}
/**
* BinarySearchTree 二分叉树类
*/
class BinarySearchTree {
public root
constructor () {
this.root = null
}
insert (data) {
const n = new Node(data, null, null);
if (this.root === null) {
this.root = n
} else {
let current = this.root
let parent
while (true) {
parent = current;
if (data < current.data) {
current = current.left;
if (current === null) {
parent.left = n
break
}
} else {
current = current.right;
if (current === null) {
parent.right = n
break
}
}
}
}
}
// 中序遍历
inOrder (node, arr = []) {
if (node !== null) {
this.inOrder(node.left, arr)
arr.push(node.show())
this.inOrder(node.right, arr)
}
}
getOrder () {
const orderArr = []
this.inOrder(this.root, orderArr)
return orderArr
}
}
function heapSort(array: number[] = []):number[] {
const len = array.length
if(len < 2) return array // 基线条件
const binarySearchTree = new BinarySearchTree()
for(let i = len - 1; i >= 0; i--){
binarySearchTree.insert(array[i])
}
return binarySearchTree.getOrder()
}
|
<reponame>ZacharyBabbitt/office-ui-fabric-react<filename>apps/a11y-tests/src/axe-sarif-converter/document-utils.ts
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
export class DocumentUtils {
public title(): string {
return '';
}
}
|
#!/bin/sh
cd ..
. ./setEnv.sh
RUN_CMD="$JAVA_HOME/bin/java $MEM_ARGS -cp $CLASSPATH twitter4j.examples.directmessage.ShowDirectMessage"
echo $RUN_CMD ${1+"$@"}
exec $RUN_CMD ${1+"$@"}
|
<reponame>makenosound/css-in-js-generator
import * as postcss from "postcss";
const parseSelector = require("postcss-selector-parser");
import { getSelectorScope } from "./getSelectorScope";
export function getRequiredScopes(
css: string,
scope: string,
knownScopes: Set<string>
): Set<string> {
const requiredScopes = new Set<string>();
const root = postcss.parse(css);
root.walkRules((rule) => {
parseSelector((nodes: any) => {
nodes.walkClasses((node: any) => {
const selectorScopes = getSelectorScope(node.toString());
if (selectorScopes.includes(scope)) {
return;
}
selectorScopes.forEach((result) => {
if (knownScopes.has(result)) {
requiredScopes.add(result);
}
});
});
}).processSync(rule.selector);
});
return requiredScopes;
}
|
mpirun --allow-run-as-root --tag-output -np 16 -H 11.238.201.7:4,11.238.201.70:4,11.238.201.4:4,11.238.201.67:4 -bind-to none -map-by slot -mca pml ob1 -mca btl ^openib -mca plm_rsh_args "-p 54321" -mca btl_tcp_if_include bond0 -x NCCL_SOCKET_IFNAME=bond0 -x NCCL_IB_DISABLE=1 -x CUDNN_VERSION -x NSIGHT_SYSTEMS_VERSION -x LC_ALL -x LS_COLORS -x LD_LIBRARY_PATH -x NCCL_MAX_NRINGS -x OPENCV_FOR_THREADS_NUM -x MXNET_VERSION -x LESSCLOSE -x HOSTNAME -x MOFED_VERSION -x MXNET_OPTIMIZER_AGGREGATION_SIZE -x MXNET_USE_OPERATOR_TUNING -x MXNET_GPU_COPY_NTHREADS -x MXNET_UPDATE_ON_KVSTORE -x MXNET_EXEC_ENABLE_ADDTO -x PYTHONIOENCODING -x NVIDIA_VISIBLE_DEVICES -x pouch_container_id -x ENV -x CUDA_DRIVER_VERSION -x MXNET_HOROVOD_NUM_GROUPS=16 -x NCCL_VERSION -x MXNET_HOME -x STACK_HARD_LIMIT -x MXNET_CUDNN_SUPPLY_NORMCONV_CONSTANTS=1 -x ONNX_NAMESPACE -x PWD -x DALI_VERSION -x HOME -x NVIDIA_MXNET_VERSION -x TRT_VERSION -x OPENMPI_VERSION -x BASH_ENV -x LIBRARY_PATH -x NCCL_BUFFSIZE -x HOROVOD_BATCH_D2D_MEMCOPIES -x MXNET_USE_TENSORRT -x NVIDIA_BUILD_ID -x MXNET_GPU_WORKER_NTHREADS -x TERM -x NCCL_DISABLE_CHECKS -x CUDA_VERSION -x HOROVOD_CYCLE_TIME=0.1 -x NVIDIA_DRIVER_CAPABILITIES -x HOROVOD_GROUPED_ALLREDUCES -x SHLVL -x NCCL_NET_GDR_READ -x NVIDIA_REQUIRE_CUDA -x CUDA_CACHE_DISABLE -x pouch_container_image -x DALI_BUILD -x CUBLAS_VERSION -x PATH -x MEMLOCK_HARD_LIMIT -x MXNET_CUDNN_AUTOTUNE_DEFAULT -x HOROVOD_NUM_STREAMS=2 -x OMPI_MCA_btl_vader_single_copy_mechanism -x NCCL_SOCKET_IFNAME -x _CUDA_COMPAT_PATH -x LESSOPEN -x OMP_NUM_THREADS -x _ -x HOROVOD_STALL_CHECK_TIME_SECONDS -x HOROVOD_STALL_SHUTDOWN_TIME_SECONDS -x HOROVOD_NUM_NCCL_STREAMS -x HOROVOD_MLSL_BGT_AFFINITY -x HOROVOD_FUSION_THRESHOLD=67108864 -x NHWC_BATCHNORM_LAUNCH_MARGIN=32 -x MXNET_EXEC_BULK_EXEC_MAX_NODE_TRAIN_FWD=999 -x MXNET_EXEC_BULK_EXEC_MAX_NODE_TRAIN_BWD=25 python train_imagenet.py --gpus 0,1,2,3 --batch-size 208 --kv-store horovod --lr 9 --lr-step-epochs pow2 --lars-eta 0.001 --label-smoothing 0.1 --wd 0.0002 --warmup-epochs 5 --eval-period 4 --eval-offset 3 --optimizer sgdwfastlars --network resnet-v1b-normconv-fl --num-layers 50 --num-epochs 44 --accuracy-threshold 0.759 --seed 1 --dtype float16 --use-dali --disp-batches 20 --image-shape 4,224,224 --fuse-bn-relu 1 --fuse-bn-add-relu 1 --min-random-area 0.05 --max-random-area 1.0 --conv-algo 1 --force-tensor-core 1 --input-layout NHWC --conv-layout NHWC --batchnorm-layout NHWC --pooling-layout NHWC --batchnorm-mom 0.9 --batchnorm-eps 1e-5 --data-train /data/aimatrix-source-data/imagenet-mxnet/train.rec --data-train-idx /data/aimatrix-source-data/imagenet-mxnet/train.idx --data-val /data/aimatrix-source-data/imagenet-mxnet/val.rec --data-val-idx /data/aimatrix-source-data/imagenet-mxnet/val.idx --dali-prefetch-queue 2 --dali-nvjpeg-memory-padding 256 --dali-threads 3 --dali-cache-size 0 --dali-roi-decode 1 2>&1 | tee 4nodeepoch44_lr9.log
|
package pl.allegro.tech.hermes.management.infrastructure.zookeeper;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.curator.framework.CuratorFramework;
import pl.allegro.tech.hermes.common.admin.AdminTool;
import pl.allegro.tech.hermes.common.admin.zookeeper.ZookeeperAdminTool;
import pl.allegro.tech.hermes.common.exception.InternalProcessingException;
import pl.allegro.tech.hermes.common.kafka.offset.SubscriptionOffsetChangeIndicator;
import pl.allegro.tech.hermes.common.message.undelivered.UndeliveredMessageLog;
import pl.allegro.tech.hermes.common.message.undelivered.ZookeeperUndeliveredMessageLog;
import pl.allegro.tech.hermes.domain.CredentialsRepository;
import pl.allegro.tech.hermes.domain.readiness.ReadinessRepository;
import pl.allegro.tech.hermes.domain.group.GroupRepository;
import pl.allegro.tech.hermes.domain.oauth.OAuthProviderRepository;
import pl.allegro.tech.hermes.domain.subscription.SubscriptionRepository;
import pl.allegro.tech.hermes.domain.topic.TopicRepository;
import pl.allegro.tech.hermes.domain.topic.preview.MessagePreviewRepository;
import pl.allegro.tech.hermes.domain.workload.constraints.WorkloadConstraintsRepository;
import pl.allegro.tech.hermes.infrastructure.zookeeper.ZookeeperCredentialsRepository;
import pl.allegro.tech.hermes.infrastructure.zookeeper.ZookeeperMessagePreviewRepository;
import pl.allegro.tech.hermes.infrastructure.zookeeper.ZookeeperOAuthProviderRepository;
import pl.allegro.tech.hermes.infrastructure.zookeeper.ZookeeperPaths;
import pl.allegro.tech.hermes.infrastructure.zookeeper.ZookeeperDatacenterReadinessRepository;
import pl.allegro.tech.hermes.infrastructure.zookeeper.ZookeeperSubscriptionOffsetChangeIndicator;
import pl.allegro.tech.hermes.infrastructure.zookeeper.ZookeeperSubscriptionRepository;
import pl.allegro.tech.hermes.infrastructure.zookeeper.ZookeeperTopicRepository;
import pl.allegro.tech.hermes.infrastructure.zookeeper.ZookeeperWorkloadConstraintsRepository;
import pl.allegro.tech.hermes.management.config.storage.ZookeeperGroupRepositoryFactory;
import pl.allegro.tech.hermes.management.domain.blacklist.TopicBlacklistRepository;
import pl.allegro.tech.hermes.management.domain.dc.DatacenterBoundRepositoryHolder;
import pl.allegro.tech.hermes.management.domain.dc.RepositoryManager;
import pl.allegro.tech.hermes.management.infrastructure.blacklist.ZookeeperTopicBlacklistRepository;
import pl.allegro.tech.hermes.management.infrastructure.dc.DatacenterNameProvider;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class ZookeeperRepositoryManager implements RepositoryManager {
private final DatacenterNameProvider datacenterNameProvider;
private final ObjectMapper mapper;
private final ZookeeperPaths paths;
private final ZookeeperClientManager clientManager;
private ZookeeperGroupRepositoryFactory zookeeperGroupRepositoryFactory;
private Integer adminReaperInterval;
private final Map<Class<?>, Object> repositoryByType = new HashMap<>();
private final Map<String, GroupRepository> groupRepositoriesByDc = new HashMap<>();
private final Map<String, CredentialsRepository> credentialsRepositoriesByDc = new HashMap<>();
private final Map<String, TopicRepository> topicRepositoriesByDc = new HashMap<>();
private final Map<String, SubscriptionRepository> subscriptionRepositoriesByDc = new HashMap<>();
private final Map<String, OAuthProviderRepository> oAuthProviderRepositoriesByDc = new HashMap<>();
private final Map<String, SubscriptionOffsetChangeIndicator> offsetChangeIndicatorsByDc = new HashMap<>();
private final Map<String, MessagePreviewRepository> messagePreviewRepositoriesByDc = new HashMap<>();
private final Map<String, TopicBlacklistRepository> topicBlacklistRepositoriesByDc = new HashMap<>();
private final Map<String, WorkloadConstraintsRepository> workloadConstraintsRepositoriesByDc = new HashMap<>();
private final Map<String, UndeliveredMessageLog> undeliveredMessageLogsByDc = new HashMap<>();
private final Map<String, AdminTool> adminToolByDc = new HashMap<>();
private final Map<String, ReadinessRepository> readinessRepositoriesByDc = new HashMap<>();
public ZookeeperRepositoryManager(ZookeeperClientManager clientManager,
DatacenterNameProvider datacenterNameProvider,
ObjectMapper mapper,
ZookeeperPaths paths,
ZookeeperGroupRepositoryFactory zookeeperGroupRepositoryFactory,
Integer adminReaperInterval) {
this.datacenterNameProvider = datacenterNameProvider;
this.mapper = mapper;
this.paths = paths;
this.clientManager = clientManager;
this.zookeeperGroupRepositoryFactory = zookeeperGroupRepositoryFactory;
this.adminReaperInterval = adminReaperInterval;
initRepositoryTypeMap();
}
public void start() {
for (ZookeeperClient client : clientManager.getClients()) {
String dcName = client.getDatacenterName();
CuratorFramework zookeeper = client.getCuratorFramework();
GroupRepository groupRepository = zookeeperGroupRepositoryFactory.create(zookeeper, mapper, paths);
CredentialsRepository credentialsRepository = new ZookeeperCredentialsRepository(zookeeper, mapper, paths);
TopicRepository topicRepository = new ZookeeperTopicRepository(zookeeper, mapper, paths, groupRepository);
SubscriptionRepository subscriptionRepository = new ZookeeperSubscriptionRepository(zookeeper, mapper,
paths, topicRepository);
OAuthProviderRepository oAuthProviderRepository = new ZookeeperOAuthProviderRepository(zookeeper, mapper,
paths);
SubscriptionOffsetChangeIndicator offsetChangeIndicator =
new ZookeeperSubscriptionOffsetChangeIndicator(zookeeper, paths, subscriptionRepository);
MessagePreviewRepository messagePreviewRepository = new ZookeeperMessagePreviewRepository(zookeeper, mapper,
paths);
TopicBlacklistRepository topicBlacklistRepository = new ZookeeperTopicBlacklistRepository(zookeeper, mapper,
paths);
WorkloadConstraintsRepository workloadConstraintsRepository = new ZookeeperWorkloadConstraintsRepository(
zookeeper, mapper, paths);
UndeliveredMessageLog undeliveredMessageLog = new ZookeeperUndeliveredMessageLog(zookeeper, paths, mapper);
AdminTool adminTool = new ZookeeperAdminTool(paths, client.getCuratorFramework(),
mapper, adminReaperInterval);
ReadinessRepository readinessRepository = new ZookeeperDatacenterReadinessRepository(zookeeper, mapper, paths);
adminTool.start();
groupRepositoriesByDc.put(dcName, groupRepository);
credentialsRepositoriesByDc.put(dcName, credentialsRepository);
topicRepositoriesByDc.put(dcName, topicRepository);
subscriptionRepositoriesByDc.put(dcName, subscriptionRepository);
oAuthProviderRepositoriesByDc.put(dcName, oAuthProviderRepository);
offsetChangeIndicatorsByDc.put(dcName, offsetChangeIndicator);
messagePreviewRepositoriesByDc.put(dcName, messagePreviewRepository);
topicBlacklistRepositoriesByDc.put(dcName, topicBlacklistRepository);
workloadConstraintsRepositoriesByDc.put(dcName, workloadConstraintsRepository);
undeliveredMessageLogsByDc.put(dcName, undeliveredMessageLog);
adminToolByDc.put(dcName, adminTool);
readinessRepositoriesByDc.put(dcName, readinessRepository);
}
}
public <T> DatacenterBoundRepositoryHolder<T> getLocalRepository(Class<T> repositoryType) {
String dcName = datacenterNameProvider.getDatacenterName();
T repository = getRepositoriesByType(repositoryType).get(dcName);
if (repository == null) {
throw new InternalProcessingException("Failed to find '" + repositoryType.getSimpleName() +
"' bound with DC '" + dcName + "'.");
}
return new DatacenterBoundRepositoryHolder<>(repository, dcName);
}
public <T> List<DatacenterBoundRepositoryHolder<T>> getRepositories(Class<T> repositoryType) {
return getRepositoriesByType(repositoryType)
.entrySet()
.stream()
.sorted(Comparator.comparing(Map.Entry::getKey))
.map(entry -> new DatacenterBoundRepositoryHolder<>(entry.getValue(), entry.getKey()))
.collect(Collectors.toList());
}
@SuppressWarnings("unchecked")
private <T> Map<String, T> getRepositoriesByType(Class<T> type) {
Object repository = repositoryByType.get(type);
if (repository == null) {
throw new InternalProcessingException("Could not provide repository of type: " + type.getName());
}
return (Map<String, T>) repository;
}
private void initRepositoryTypeMap() {
repositoryByType.put(GroupRepository.class, groupRepositoriesByDc);
repositoryByType.put(CredentialsRepository.class, credentialsRepositoriesByDc);
repositoryByType.put(TopicRepository.class, topicRepositoriesByDc);
repositoryByType.put(SubscriptionRepository.class, subscriptionRepositoriesByDc);
repositoryByType.put(OAuthProviderRepository.class, oAuthProviderRepositoriesByDc);
repositoryByType.put(SubscriptionOffsetChangeIndicator.class, offsetChangeIndicatorsByDc);
repositoryByType.put(MessagePreviewRepository.class, messagePreviewRepositoriesByDc);
repositoryByType.put(TopicBlacklistRepository.class, topicBlacklistRepositoriesByDc);
repositoryByType.put(WorkloadConstraintsRepository.class, workloadConstraintsRepositoriesByDc);
repositoryByType.put(UndeliveredMessageLog.class, undeliveredMessageLogsByDc);
repositoryByType.put(AdminTool.class, adminToolByDc);
repositoryByType.put(ReadinessRepository.class, readinessRepositoriesByDc);
}
}
|
var searchData=
[
['opsick_20client_20library_20_2d_20api_20documentation_0',['Opsick Client Library - API Documentation',['../index.html',1,'']]]
];
|
<reponame>carlos-menezes/hastebincli
#!/usr/bin/env node
const axios = require('axios');
const yargs = require('yargs');
const fs = require('fs');
const HASTEBIN_URL = 'https://hastebin.com/documents';
const options = yargs
.usage('Usage: <option> <data>')
.option('t', { alias: 'text', describe: 'The text to write to Hastebin', type: 'string' })
.option('f', { alias: 'file', describe: 'The file to read and write to Hastebin', type: 'string' })
.argv;
if (process.argv.length < 3) { // Change
return yargs.showHelp();
}
if (options.text) {
const data = process.argv.slice(3).join(' ');
postData(data);
return ;
} else if (options.file) {
let data;
try {
data = fs.readFileSync(process.argv[3]);
} catch (error) {
console.error('ERROR: The specified file does not exist.');
return;
}
const splitFile = options.file.split('.');
const fileExt = splitFile[splitFile.length - 1];
postData(data, fileExt);
return;
}
function postData(data, extension = false) {
axios.post(HASTEBIN_URL, data)
.then((res) => {
if (!extension) {
console.log(`PASTE CREATED | URL: https://hastebin.com/${res.data.key}`);
console.log(`TIP: You can append any short programming language name to the url to enable syntax highlighting. e.g. https://hastebin.com/${res.data.key}.js`)
} else {
console.log(`PASTE CREATED | URL: https://hastebin.com/${res.data.key}.${extension}`);
}
});
}
|
@interface Matrix : NSObject
@property (assign) NSInteger rows;
@property (assign) NSInteger columns;
@end
|
<reponame>Afrah-Abdulmajid/CS-Operating-Systems<filename>Assignment 4/semaphores.java
import java. util.Scanner;
import java.lang.System;
import java.lang.System;
import java.util.concurrent.Semaphore;
import java.util.LinkedList;
import java.util.Queue;
class semaphores{
Queue <Process> queue;
Process p ;
private static Semaphore s;
public semaphores(){
s = new Semaphore(1) ;
queue = new LinkedList<Process>();
}
public Semaphore getSemaphore(){
return s;
}
public Process getProcess(Process h){
p=h;
return p;
}
public void wait(Semaphore sem,Process p){
if(sem.availablePermits() == 0){
try{
sem.acquire();
queue.add(p);
}
catch(Exception e){
e.printStackTrace();
}
}
}
public Queue<Process> getQProcess(){
return queue;
}
public void signal(Semaphore sem, Process p){
if(sem.availablePermits()== 0){
try{
sem.release();
if(!queue.isEmpty())
queue.remove();
}
catch(Exception e){
e.printStackTrace();
}
}
}
}
|
@app.route('/user', methods=['POST'])
def create_user():
data = request.get_json()
name = data['name']
email = data['email']
password = data['password']
# ... create the user ...
return jsonify({'message': 'User successfully created'})
|
#!/bin/bash
set -e # Exit with nonzero exit code if anything fails
SOURCE_BRANCH="master"
TARGET_BRANCH="master"
function generate {
php bin/sculpin generate --env=prod
}
# Pull requests and commits to other branches shouldn't try to deploy, just build to verify
if [ "$TRAVIS_PULL_REQUEST" != "false" -o "$TRAVIS_BRANCH" != "$SOURCE_BRANCH" ]; then
echo "Skipping deploy; just doing a build."
generate
exit 0
fi
# Save some useful information
REPO="https://github.com/dloranc/dloranc.github.io.git"
SSH_REPO=${REPO/https:\/\/github.com\//git@github.com:}
SHA=`git rev-parse --verify HEAD`
# Clone the existing master for this repo into output_prod/
# Create a new empty branch if master doesn't exist yet (should only happen on first deply)
git clone $REPO output_prod/
cd output_prod/
git checkout $TARGET_BRANCH || git checkout --orphan $TARGET_BRANCH
# Clean out existing contents
find * -maxdepth 0 -name '.git' -prune -o -exec rm -rf '{}' ';' || exit 0
cd ..
# Run our compile script
generate
# Now let's go have some fun with the cloned repo
cd output_prod
git config user.name "Travis CI"
git config user.email "$COMMIT_AUTHOR_EMAIL"
# If there are no changes to the compiled output_prod (e.g. this is a README update) then just bail.
if git diff --quiet; then
echo "No changes to the output on this push; exiting."
exit 0
fi
# Commit the "changes", i.e. the new version.
# The delta will show diffs between new and old versions.
git add -A .
git commit -m "Deploy to GitHub Pages: ${SHA}"
chmod 600 ../deploy_key
eval `ssh-agent -s`
ssh-add ../deploy_key
# Now that we're all set up, we can push.
git push $SSH_REPO $TARGET_BRANCH
|
import matplotlib.pyplot as plt
first_name = ['John', 'Jane', 'Alex']
age = [20, 18, 24]
plt.bar(first_name, age)
plt.xlabel('Name')
plt.ylabel('Age')
plt.title('Age of Persons')
plt.show()
|
"""Methods for working with data and numpy arrays"""
import warnings
import numpy as np
def arr_sample(arr, rate):
"""Return an array linearly sampled from the input array at the given rate.
Examples
--------
* [1, 2, 3, 4] and rate 2 -> [1, 3]
* [1, 2, 3, 4] and rate 0.5 -> [1, 1, 2, 2, 3, 3, 4, 4]
"""
if arr.ndim != 1:
raise ValueError("Only 1d arrays can be sampled from.")
i = 0
out = []
while i < arr.shape[0]:
out.append(arr[np.floor(i).astype(np.int)])
i += rate
return np.array(out)
def factors(x):
"""Return the factors of x.
Parameters
----------
x : int
The number to factorize. Must be a non-zero integer
Returns
-------
factors : set
The set of factors for x
"""
if x == 0 or x % 1 != 0:
raise ValueError("Factors can only be found with non-zero integers")
if x < 0:
x = np.abs(x)
warnings.warn("Only positive factors will be returned, but negative numbers have a positive and negative factor for each.", UserWarning)
factors = set([1, x])
for i in range(2, int(np.sqrt(x) + 1)):
if (x / float(i)) == int(x / i):
factors.add(int(i))
factors.add(int(x / i))
return factors
def flip_dict(dict, unique_items=False, force_list_values=False):
"""Swap keys and values in a dictionary
Parameters
----------
dict: dictionary
dictionary object to flip
unique_items: bool
whether to assume that all items in dict are unique, potential speedup but repeated items will be lost
force_list_values: bool
whether to force all items in the result to be lists or to let unique items have unwrapped values. Doesn't apply if unique_items is true.
"""
if unique_items:
return {v: k for k, v in dict.items()}
elif force_list_values:
new_dict = {}
for k, v in dict.items():
if v not in new_dict:
new_dict[v] = []
new_dict[v].append(k)
return new_dict
else:
new_dict = {}
for k, v in dict.items():
if v in new_dict:
if isinstance(new_dict[v], list):
new_dict[v].append(k)
else:
new_dict[v] = [new_dict[v], k]
else:
new_dict[v] = k
return new_dict
def num_digits(x):
if x == 0:
return 1
return int(np.ceil(np.log10(np.abs(x) + 1)))
def prime_factors(x):
"""Return the prime factorization of x.
Parameters
----------
x : int
The number to factorize. Must be a non-zero integer
Returns
-------
prime_factors : list
The list of prime factors. Repeated factors will occur multiple times in the list.
"""
if x == 0 or x % 1 != 0:
raise ValueError("Factors can only be found with non-zero integers")
if x < 0:
x = np.abs(x)
warnings.warn("Only positive factors will be returned, but negative numbers have a positive and negative factor for each.", UserWarning)
factors = [x]
prime_factors = []
while len(factors) > 0:
check = factors.pop()
found = False
for i in range(2, int(np.sqrt(check) + 1)):
if (check / float(i)) == int(check / i):
factors.extend([i, int(check / i)])
found = True
break
if not found:
prime_factors.append(check)
return sorted(prime_factors)
def prime_overlap(x, y):
"""Return the prime factors x and y have in common.
Parameters
----------
x : int
The first number to factorize
y: int
The second number to factorize
Returns
-------
overlap : list
The list of common factors. Repeated factors are included for the number of common repeats.
"""
fact_x = prime_factors(x)
fact_y = prime_factors(y)
overlap = []
for i in range(len(fact_x)): # pragma: no branch
item = fact_x.pop()
if item in fact_y:
overlap.append(item)
fact_y.remove(item)
if len(fact_x) == 0 or len(fact_y) == 0:
break
return sorted(overlap)
def rescale(data, new_min=0, new_max=1, axis=None):
"""Rescales data to have range [new_min, new_max] along axis or axes indicated."""
data = np.asarray(data)
if np.issubdtype(data.dtype, np.integer):
data = data.astype(np.float)
data_range = np.max(data, axis=axis, keepdims=True) - np.min(data, axis=axis, keepdims=True)
x = np.divide(data - np.min(data, axis=axis, keepdims=True), data_range, where=data_range > 0, out=np.zeros_like(data))
new_range = new_max - new_min
return (x * new_range) + new_min
def clip(data, output_min=0, output_max=1, input_min=0, input_max=255):
"""Clip an array to a given range, then rescale the clipped array from the input range to the output range.
Parameters
----------
image : numpy.ndarray
The data to rescale
output_min : int | float | np.number
The minimum value for the output data (the default is 0)
output_max : int | float | np.number
The maximum value for the output data (the default is 1)
input_min : int | float | np.number
The minimum value for the input data range (the default is 0)
input_max : int | float | np.number
The maximum value for the input data range (the default is 255)
"""
# TODO - Add tests for this
data = np.clip(data, input_min, input_max)
scaler = (output_max - output_min) / (input_max - input_min)
bias = (input_min * output_min) / (input_max - input_min) - (input_min * output_max) / (input_max - input_min) + output_min
return data * scaler + bias
def sigmoid(x, epsilon=1e-7):
"""Return the sigmoid of the given value/array."""
return (1.0 + epsilon) / (1.0 + np.exp(-x) + epsilon)
def inv_sigmoid(x, epsilon=1e-7):
"""Return the inverse of the sigmoid function for the given value/array."""
return np.log((x + epsilon) / (1 - x + epsilon))
def softmax(x, axis=None):
"""Return the softmax of the array
Parameters
----------
x : numpy.ndarray
The data to apply the softmax to
axis : int | list of ints
The axis or axes to apply softmax across
"""
x = np.asarray(x)
if np.issubdtype(x.dtype, np.integer):
x = x.astype(np.float)
s = np.max(x, axis=axis, keepdims=True)
e_x = np.exp(x - s)
div = np.sum(e_x, axis=axis, keepdims=True)
return np.divide(e_x, div, where=div != 0, out=np.zeros_like(x))
def normalize(data, axis=None):
"""Return data normalized to have zero mean and unit variance along axis or axes indicated."""
data = np.asarray(data)
if np.issubdtype(data.dtype, np.integer):
data = data.astype(np.float)
mean = np.mean(data, axis=axis, keepdims=True)
stddev = np.std(data, axis=axis, keepdims=True)
return np.divide(data - mean, stddev, where=stddev != 0, out=np.zeros_like(data))
def roc_curve(label, pred, as_rates=True):
"""Get the ROC curve for the data.
Parameters
----------
label : numpy.ndarray
The ground truth values
pred : numpy.ndarray
The predicted values
as_rate : bool
Whether to return true/false positive rates or scores (the default is True)
Returns
-------
fps : numpy.ndarray
The false positive rates/scores
tps : numpy.ndarray
The true positive rates/scores
thresh : numpy.ndarray
The thresholds for each fps/tps
"""
if not isinstance(label, np.ndarray):
label = np.array(label)
if not isinstance(pred, np.ndarray):
pred = np.array(pred)
label = np.ravel(label)
pred = np.ravel(pred)
desc_score_indices = np.argsort(pred, kind='mergesort')[::-1]
y_score = pred[desc_score_indices]
y_true = label[desc_score_indices]
distinct_idx = np.where(np.diff(y_score))[0]
thresh_idx = np.concatenate([distinct_idx, [y_true.size - 1]])
tps = np.cumsum(y_true)
# expected = np.sum(y_true)
tps = tps[thresh_idx]
fps = 1 + thresh_idx - tps
thresh = y_score[thresh_idx]
tps = np.concatenate(([0], tps))
fps = np.concatenate(([0], fps))
thresh = np.concatenate(([1], thresh))
if as_rates:
fpr = fps / fps[-1]
tpr = tps / tps[-1]
return fpr, tpr, thresh
else:
return fps, tps, thresh
def mcc_curve(label, pred, optimal_only=False):
"""Get the Matthew's Correlation Coefficient for different thresholds
Parameters
----------
label : numpy.ndarray
Expected labels for the data samples
pred : numpy.ndarray
Predicted labels for the data samples
optimal_only : bool
If true, returns only the value and threshold for the greatest MCC value
"""
fps, tps, thresh = roc_curve(label, pred, as_rates=False)
return optimal_mcc_from_roc(fps, tps, thresh, optimal_only=optimal_only)
def optimal_mcc_from_roc(fps, tps, thresholds, optimal_only=True):
"""Get the Matthew's Correlation Coefficient for different thresholds
Parameters
----------
fps : numpy.ndarray
False positive scores from the roc curve
tps : numpy.ndarray
True positive scores from the roc curve
thresholds : numpy.ndarray
Thresholds from the roc curve
optimal_only : bool
If true, returns only the value and threshold for the greatest MCC value
"""
N = tps[-1] + fps[-1]
S = tps[-1] / N
P = (fps + tps) / N
top = (tps / N) - (S * P)
bottom = np.sqrt(P * S * (1 - S) * (1 - P))
mcc = np.divide(top, bottom, out=np.zeros_like(top), where=bottom != 0)
if optimal_only:
best = np.argmax(mcc)
return mcc[best], thresholds[best]
return mcc, thresholds
def accuracy_curve(label, pred, return_peak=False):
"""Get the accuracy values for each possible threshold in the predictions.
Parameters
----------
label : numpy.ndarray
The true values for each sample in the data.
pred : numpy.ndarray
The predicted values for each sample in the data
return_peak : bool
Whether to return the peak accuracy and best threshold for the data as well as the curve
"""
if not isinstance(label, np.ndarray):
label = np.array(label)
if not isinstance(pred, np.ndarray):
pred = np.array(pred)
desc_score_indices = np.argsort(pred, kind='mergesort')[::-1]
y_score = pred[desc_score_indices]
y_true = label[desc_score_indices]
distinct_idx = np.where(np.diff(y_score))[0]
thresh_idx = np.concatenate([distinct_idx, [y_true.size - 1]])
thresh = y_score[thresh_idx]
tps = np.cumsum(y_true)[thresh_idx]
tns = np.cumsum((1 - y_true)[::-1])[::-1][thresh_idx]
correct = tps + tns
acc = correct / label.size
if return_peak:
peak = np.argmax(acc)
return acc, thresh, acc[peak], thresh[peak]
return acc, thresh
def spec_at_sens(expected, predicted, sensitivities=[0.95]):
"""Get the peak specificity for each sensitivity."""
if not hasattr(sensitivities, '__iter__'):
sensitivities = [sensitivities]
fpr, tpr, thresholds = roc_curve(expected, predicted)
specs = [np.max((1 - fpr)[tpr >= min_sens]) for min_sens in sensitivities]
return specs
def get_confusion_stats(label, pred, threshold=0.5):
"""Get the true positive, false positive, true negative, and false negative values for the given data"""
label = np.squeeze(label)
pred = np.squeeze(pred)
label_bool = label.astype(bool)
pred_bool = pred >= threshold
true_pos = np.logical_and(label_bool, pred_bool).sum()
true_neg = np.logical_and(~label_bool, ~pred_bool).sum()
false_pos = pred_bool.sum() - true_pos
false_neg = (~pred_bool).sum() - true_neg
return true_pos, false_pos, true_neg, false_neg
def dice_coef(label, pred, threshold=0.5):
"""Get the Sorenson Dice Coefficient for the given data"""
tp, fp, tn, fn = get_confusion_stats(label, pred, threshold)
denom = tp * 2 + fp + fn
if denom == 0:
return 0
return (tp * 2) / denom
def jaccard_coef(label, pred, threshold=0.5):
"""Get the Jaccard Coefficient for the given data"""
tp, fp, tn, fn = get_confusion_stats(label, pred, threshold)
denom = tp + fn + fp
if denom == 0:
return 0
return tp / denom
def value_crossing(array, threshold=0, positive_crossing=True, negative_crossing=True, return_indices=False):
"""Get the count of instances where a series crosses a value.
Parameters
----------
array : np.ndarray
A sequential array of values
threshold : int | float
The value used as a crossing point (the default is 0)
positive_crossing : bool
Whether to count when the sequence goes from less than to greater than the threshold value (the default is True)
negative_crossing : bool
Whether to count when the sequence goes from greater than to less than the threshold value (the default is True)
return_indices : bool
Whether to return the indices of the points immediately before the crossings
"""
if not isinstance(array, np.ndarray):
array = np.array(array)
if return_indices:
idxs = np.arange(array.size)[array != threshold]
array = array[array != threshold]
pos = array > threshold
npos = ~pos
if positive_crossing and negative_crossing:
crossing = (pos[:-1] & npos[1:]) | (npos[:-1] & pos[1:])
elif negative_crossing:
crossing = (pos[:-1] & npos[1:])
elif positive_crossing:
crossing = (npos[:-1] & pos[1:])
else:
raise ValueError('Either positive and/or negative crossings must be used')
if return_indices:
return idxs[np.concatenate([crossing, [False]])]
return crossing.sum()
def center_of_mass(input_arr):
"""Find the continuous index of the center of mass for the input n-dimensional array"""
flat_mass = np.reshape(input_arr, [-1, 1])
total_mass = np.sum(flat_mass)
if total_mass == 0:
raise ValueError("Cannot find the center if the total mass is 0")
grids = np.meshgrid(*[np.arange(axis_length) for axis_length in input_arr.shape], indexing='ij')
coords = np.stack([np.reshape(grid, [-1]) for grid in grids], axis=-1)
center_of_mass = np.sum(flat_mass * coords, axis=0) / total_mass
return center_of_mass
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var tslib_1 = require("tslib");
var bbox_1 = require("@antv/g2/lib/util/bbox");
var constant_1 = require("@antv/g2/lib/constant");
Object.defineProperty(exports, "DIRECTION", { enumerable: true, get: function () { return constant_1.DIRECTION; } });
var BBox = /** @class */ (function (_super) {
tslib_1.__extends(BBox, _super);
function BBox() {
return _super !== null && _super.apply(this, arguments) || this;
}
BBox.fromBBoxObject = function (bbox) {
return new BBox(bbox.x, bbox.y, bbox.width, bbox.height);
};
return BBox;
}(bbox_1.BBox));
exports.default = BBox;
//# sourceMappingURL=bbox.js.map
|
<reponame>kdubiel/bh-events<gh_stars>1-10
export * from './yupLocales';
export * from './en';
export * from './pl';
export * from './supportedLanguages';
|
<filename>splashpy/faker/model.py
#
# This file is part of SplashSync Project.
#
# Copyright (C) 2015-2020 Splash Sync <www.splashsync.com>
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
#
from abc import abstractmethod
from peewee import *
from splashpy.models.object import BaseObject
from splashpy.core.framework import Framework
from splashpy.models.objects.parser import SimpleFields
fakerDb = SqliteDatabase('faker.db')
class FakeObject(BaseObject, SimpleFields):
# ====================================================================#
# Generic Fields Definition
simpleFields =[]
boolFields =[]
intFields =[]
def __init__( self ):
pass
@abstractmethod
def getObjectClass(self):
"""Get Class of Faker Object"""
raise NotImplementedError("Not implemented yet.")
@staticmethod
def toListData(faker_object):
"""Convert Faker Object to List Data"""
raise NotImplementedError("Not implemented yet.")
# ====================================================================#
# Object CRUD
# ====================================================================#
def load( self, object_id ):
"""Load Faker Object with Id"""
try:
return self.getObjectClass().get_by_id(object_id)
except DoesNotExist:
return False
except Exception as exception:
return Framework.log().error(exception.message)
def update( self, needed ):
"""Update Current Faker Object"""
try:
self.object.save()
return self.object.id
except Exception as exception:
return Framework.log().error(exception)
def delete( self, object_id ):
"""Delete Faker Object with Id"""
try:
fake_object = self.getObjectClass().get_by_id(object_id)
fake_object.delete_instance()
return True
except DoesNotExist:
return True
except Exception as exception:
return Framework.log().error(exception)
def getObjectIdentifier(self):
return self.object.id
def objectsList( self, filter, params ):
# ====================================================================#
# Prepare Search Settings
try:
limit = params["max"]
except:
limit = 25
try:
offset = params["offset"]
except:
offset = 0
# ====================================================================#
# Execute Search Query
query = self.getObjectClass().select().limit(limit).offset(offset)
# ====================================================================#
# Init Results
objects = {}
# ====================================================================#
# Walk on Results
try:
for faker_object in query:
objects["short-" + str(faker_object.id)] = self.toListData(faker_object)
except Exception as exception:
Framework.log().error(exception.message)
# ====================================================================#
# Add Metadata
objects['meta'] = {
'current': query.count(),
'total': self.getObjectClass().select().count()
}
return objects
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2012:1491
#
# Security announcement date: 2012-12-04 20:28:43 UTC
# Script generation date: 2017-01-17 21:18:08 UTC
#
# Operating System: Red Hat 6
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - kernel-rt-doc.noarch:3.2.33-rt50.66.el6rt
# - kernel-rt-firmware.noarch:3.2.33-rt50.66.el6rt
# - mrg-rt-release.noarch:3.2.33-rt50.66.el6rt
# - kernel-rt.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-debug.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-debug-debuginfo.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-debug-devel.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-debuginfo.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-debuginfo-common-x86_64.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-devel.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-trace.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-trace-debuginfo.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-trace-devel.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-vanilla.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-vanilla-debuginfo.x86_64:3.2.33-rt50.66.el6rt
# - kernel-rt-vanilla-devel.x86_64:3.2.33-rt50.66.el6rt
#
# Last versions recommanded by security team:
# - kernel-rt-doc.noarch:3.2.33-rt50.66.el6rt
# - kernel-rt-firmware.noarch:3.2.33-rt50.66.el6rt
# - mrg-rt-release.noarch:3.2.33-rt50.66.el6rt
# - kernel-rt.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debug.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debug-debuginfo.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debug-devel.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debuginfo.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debuginfo-common-x86_64.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-devel.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-trace.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-trace-debuginfo.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-trace-devel.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-vanilla.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-vanilla-debuginfo.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-vanilla-devel.x86_64:3.10.0-514.rt56.210.el6rt
#
# CVE List:
# - CVE-2012-0957
# - CVE-2012-2133
# - CVE-2012-3400
# - CVE-2012-3430
# - CVE-2012-3511
# - CVE-2012-3520
# - CVE-2012-4508
# - CVE-2012-4565
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install kernel-rt-doc.noarch-3.2.33 -y
sudo yum install kernel-rt-firmware.noarch-3.2.33 -y
sudo yum install mrg-rt-release.noarch-3.2.33 -y
sudo yum install kernel-rt.x86_64-3.10.0 -y
sudo yum install kernel-rt-debug.x86_64-3.10.0 -y
sudo yum install kernel-rt-debug-debuginfo.x86_64-3.10.0 -y
sudo yum install kernel-rt-debug-devel.x86_64-3.10.0 -y
sudo yum install kernel-rt-debuginfo.x86_64-3.10.0 -y
sudo yum install kernel-rt-debuginfo-common-x86_64.x86_64-3.10.0 -y
sudo yum install kernel-rt-devel.x86_64-3.10.0 -y
sudo yum install kernel-rt-trace.x86_64-3.10.0 -y
sudo yum install kernel-rt-trace-debuginfo.x86_64-3.10.0 -y
sudo yum install kernel-rt-trace-devel.x86_64-3.10.0 -y
sudo yum install kernel-rt-vanilla.x86_64-3.10.0 -y
sudo yum install kernel-rt-vanilla-debuginfo.x86_64-3.10.0 -y
sudo yum install kernel-rt-vanilla-devel.x86_64-3.10.0 -y
|
#!/bin/bash
TASK=8
LANG=tr
MODEL=ctrl_vilbert
MODEL_CONFIG=ctrl_vilbert_base
TASKS_CONFIG=iglue_test_tasks_boxes36.dtu
TRTASK=RetrievalFlickr30k
TETASK=RetrievalxFlickrCO${LANG}
TEXT_PATH=/home/projects/ku_00062/data/xFlickrCO/annotations_machine-translate/${LANG}/test_gmt.jsonl
FEAT_PATH=/home/projects/ku_00062/data/xFlickrCO/features/xflickrco-test_boxes36.lmdb
PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/zero_shot/xflickrco/${MODEL}/${TRTASK}_${MODEL_CONFIG}/pytorch_model_best.bin
OUTPUT_DIR=/home/projects/ku_00062/results/iglue/zero_shot/xflickrco/${MODEL}_gmt/${TRTASK}_${MODEL_CONFIG}/$TETASK/test
source /home/projects/ku_00062/envs/iglue/bin/activate
cd ../../../../volta
python eval_retrieval.py \
--bert_model /home/projects/ku_00062/huggingface/bert-base-uncased --do_lower_case --config_file config/${MODEL_CONFIG}.json \
--from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK --split test_${LANG}_gmt --batch_size 1 \
--caps_per_image 1 --val_annotations_jsonpath ${TEXT_PATH} --val_features_lmdbpath ${FEAT_PATH} \
--output_dir ${OUTPUT_DIR}
deactivate
|
#!/bin/bash
tar czf happyR.tar.gz .babelrc .gitignore client package-lock.json package.json public script secrets.js server webpack.config.js
scp happyR.tar.gz 165.227.82.241 :~
rm happyR.tar.gz
ssh 165.227.82.241 << 'ENDSSH'
pm2 stop all
rm -rf happyResident
mkdir happyResident
tar xf happyR.tar.gz -C happyResident
rm happyR.tar.gz
cd happyResident
npm install
pm2 start all
ENDSSH
ssh root@165.227.82.241 << 'ENDSSH'
setsebool -P httpd_can_network_connect on
setsebool -P httpd_enable_homedirs on
chcon -Rt httpd_sys_content_t /home/taulantvokshi/happyResident/public
exit
ENDSSH
|
"use strict";
var _RequestManager_token;
Object.defineProperty(exports, "__esModule", { value: true });
exports.RequestManager = exports.RequestMethod = void 0;
const tslib_1 = require("tslib");
const collection_1 = tslib_1.__importDefault(require("@discordjs/collection"));
const form_data_1 = tslib_1.__importDefault(require("form-data"));
const snowflake_1 = require("@sapphire/snowflake");
const events_1 = require("events");
const https_1 = require("https");
const SequentialHandler_1 = require("./handlers/SequentialHandler");
const constants_1 = require("./utils/constants");
const agent = new https_1.Agent({ keepAlive: true });
/**
* Possible API methods to be used when doing requests
*/
var RequestMethod;
(function (RequestMethod) {
RequestMethod["Delete"] = "delete";
RequestMethod["Get"] = "get";
RequestMethod["Patch"] = "patch";
RequestMethod["Post"] = "post";
RequestMethod["Put"] = "put";
})(RequestMethod = exports.RequestMethod || (exports.RequestMethod = {}));
/**
* Represents the class that manages handlers for endpoints
*/
class RequestManager extends events_1.EventEmitter {
constructor(options) {
super();
/**
* A timeout promise that is set when we hit the global rate limit
* @default null
*/
this.globalTimeout = null;
/**
* API bucket hashes that are cached from provided routes
*/
this.hashes = new collection_1.default();
/**
* Request handlers created from the bucket hash and the major parameters
*/
this.handlers = new collection_1.default();
// eslint-disable-next-line @typescript-eslint/explicit-member-accessibility
_RequestManager_token.set(this, null);
this.options = { ...constants_1.DefaultRestOptions, ...options };
this.options.offset = Math.max(0, this.options.offset);
}
/**
* Sets the authorization token that should be used for requests
* @param token The authorization token to use
*/
setToken(token) {
tslib_1.__classPrivateFieldSet(this, _RequestManager_token, token, "f");
return this;
}
/**
* Queues a request to be sent
* @param request All the information needed to make a request
* @returns The response from the api request
*/
async queueRequest(request) {
// Generalize the endpoint to its route data
const routeID = RequestManager.generateRouteData(request.fullRoute, request.method);
// Get the bucket hash for the generic route, or point to a global route otherwise
const hash = this.hashes.get(`${request.method}:${routeID.bucketRoute}`) ?? `Global(${request.method}:${routeID.bucketRoute})`;
// Get the request handler for the obtained hash, with its major parameter
const handler = this.handlers.get(`${hash}:${routeID.majorParameter}`) ?? this.createHandler(hash, routeID.majorParameter);
// Resolve the request into usable fetch/node-fetch options
const { url, fetchOptions } = this.resolveRequest(request);
// Queue the request
return handler.queueRequest(routeID, url, fetchOptions);
}
/**
* Creates a new rate limit handler from a hash, based on the hash and the major parameter
* @param hash The hash for the route
* @param majorParameter The major parameter for this handler
* @private
*/
createHandler(hash, majorParameter) {
// Create the async request queue to handle requests
const queue = new SequentialHandler_1.SequentialHandler(this, hash, majorParameter);
// Save the queue based on its ID
this.handlers.set(queue.id, queue);
return queue;
}
/**
* Formats the request data to a usable format for fetch
* @param request The request data
*/
resolveRequest(request) {
const { options } = this;
let query = '';
// If a query option is passed, use it
if (request.query) {
query = `?${request.query.toString()}`;
}
// Create the required headers
const headers = {
'User-Agent': `${constants_1.DefaultUserAgent} ${options.userAgentAppendix}`.trim(),
};
// If this request requires authorization (allowing non-"authorized" requests for webhooks)
if (request.auth !== false) {
// If we haven't received a token, throw an error
if (!tslib_1.__classPrivateFieldGet(this, _RequestManager_token, "f")) {
throw new Error('Expected token to be set for this request, but none was present');
}
headers.Authorization = `${request.authPrefix ?? 'Bot'} ${tslib_1.__classPrivateFieldGet(this, _RequestManager_token, "f")}`;
}
// If a reason was set, set it's appropriate header
if (request.reason?.length) {
headers['X-Audit-Log-Reason'] = encodeURIComponent(request.reason);
}
// Format the full request URL (api base, optional version, endpoint, optional querystring)
const url = `${options.api}${request.versioned === false ? '' : `/v${options.version}`}${request.fullRoute}${query}`;
let finalBody;
let additionalHeaders = {};
if (request.attachments?.length) {
const formData = new form_data_1.default();
// Attach all files to the request
for (const attachment of request.attachments) {
formData.append(attachment.fileName, attachment.rawBuffer, attachment.fileName);
}
// If a JSON body was added as well, attach it to the form data
// eslint-disable-next-line no-eq-null
if (request.body != null) {
formData.append('payload_json', JSON.stringify(request.body));
}
// Set the final body to the form data
finalBody = formData;
// Set the additional headers to the form data ones
additionalHeaders = formData.getHeaders();
// eslint-disable-next-line no-eq-null
}
else if (request.body != null) {
// Stringify the JSON data
finalBody = JSON.stringify(request.body);
// Set the additional headers to specify the content-type
additionalHeaders = { 'Content-Type': 'application/json' };
}
const fetchOptions = {
agent,
body: finalBody,
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
headers: { ...(request.headers ?? {}), ...additionalHeaders, ...headers },
method: request.method,
};
return { url, fetchOptions };
}
/**
* Generates route data for an endpoint:method
* @param endpoint The raw endpoint to generalize
* @param method The HTTP method this endpoint is called without
* @private
*/
static generateRouteData(endpoint, method) {
const majorIDMatch = /^\/(?:channels|guilds|webhooks)\/(\d{16,19})/.exec(endpoint);
// Get the major ID for this route - global otherwise
const majorID = majorIDMatch?.[1] ?? 'global';
const baseRoute = endpoint
// Strip out all IDs
.replace(/\d{16,19}/g, ':id')
// Strip out reaction as they fall under the same bucket
.replace(/\/reactions\/(.*)/, '/reactions/:reaction');
let exceptions = '';
// Hard-Code Old Message Deletion Exception (2 week+ old messages are a different bucket)
// https://github.com/discord/discord-api-docs/issues/1295
if (method === "delete" /* Delete */ && baseRoute === '/channels/:id/messages/:id') {
const id = /\d{16,19}$/.exec(endpoint)[0];
const snowflake = snowflake_1.DiscordSnowflake.deconstruct(id);
if (Date.now() - Number(snowflake.timestamp) > 1000 * 60 * 60 * 24 * 14) {
exceptions += '/Delete Old Message';
}
}
return {
majorParameter: majorID,
bucketRoute: baseRoute + exceptions,
original: endpoint,
};
}
}
exports.RequestManager = RequestManager;
_RequestManager_token = new WeakMap();
//# sourceMappingURL=RequestManager.js.map
|
#!/bin/sh
#
# Sync mode: SYNC_LOCAL
#
printf "\033[94m[TEST]\033[m Sync mode: SYNC_LOCAL"
printf " \033[90m(Sync: WWW2 -> WWW2, Initiator: WWW2)\033[m"
docker-compose exec www2 python3 /var/www/html/db_sync_tool -f /var/www/html/tests/scenario/sync_local/sync-local-to-local.json -y $1
# Expecting 3 results in the database
count=$(docker-compose exec db2 mysql -udb -pdb db -e 'SELECT COUNT(*) FROM person' | grep 3 | tr -d '[:space:]')
if [[ $count == '|3|' ]]; then
echo " \033[92m✔\033[m"
else
echo " \033[91m✘\033[m"
exit 1
fi
sh helper/cleanup.sh
|
<reponame>curieos/CharacterSheetGenerator<filename>spec/objects/race_feature_spec.rb
RSpec.describe 'RaceFeature' do
let(:dummy_race_feature) { CharacterSheetGenerator::RaceFeature.new("Dark Vision", "") }
it "has source that is 'Race'" do
expect(dummy_race_feature.source).to eq("Race")
end
end
|
//
// CGRect+LTFramer.h
// Pods
//
// Created by <NAME> on 15/09/16.
//
//
@import Foundation;
FOUNDATION_EXTERN CGRect CGRectCreate(CGPoint origin, CGSize size);
FOUNDATION_EXTERN CGRect CGRectIntegralMake(CGFloat x, CGFloat y, CGFloat width, CGFloat height);
|
#!/bin/bash
#SBATCH --job-name=ppta_ptmcmc_pe_cpl_varslope_set_all_ephem_0
#SBATCH --output=/fred/oz002/bgoncharov/correlated_noise_logs/ppta_ptmcmc_pe_cpl_varslope_set_all_ephem_0_%A_%a.out
#SBATCH --ntasks=4
#SBATCH --time=1-11
#SBATCH --mem-per-cpu=6G
#SBATCH --tmp=8G
#SBATCH --array=0
pyv="$(python -c 'import sys; print(sys.version_info[0])')"
if [ "$pyv" == 2 ]
then
echo "$pyv"
module load numpy/1.16.3-python-2.7.14
fi
srun echo $TEMPO2
srun echo $TEMPO2_CLOCK_DIR
srun python /home/bgonchar/correlated_noise_pta_2020/run_analysis.py --prfile "/home/bgonchar/correlated_noise_pta_2020/params/ppta_dr2_ptmcmc_pe_common_pl_vargam_set_all_ephem_0_20201014.dat" --num $SLURM_ARRAY_TASK_ID
|
#!/bin/bash
#SBATCH --gres=gpu:1 #SBATCH --cpus-per-task=5
#SBATCH --job-name="BigGAN CIFAR"
#SBATCH --time=10-00:00:00
#SBATCH --account=engs-tvg
#SBATCH --reservation=tvg202110
#SBATCH --qos=tvg
#SBATCH --partition=short
source activate py36
CUDA_VISIBLE_DEVICES=0,1 python train.py \
--shuffle --batch_size 50 --parallel \
--num_G_accumulations 1 --num_D_accumulations 1 --num_epochs 500 \
--num_D_steps 4 --G_lr 2e-4 --D_lr 2e-4 \
--dataset C10 \
--G_ortho 0.0 \
--G_attn 0 --D_attn 0 \
--G_init N02 --D_init N02 \
--ema --use_ema --ema_start 1000 \
--test_every 5000 --save_every 2000 --num_best_copies 5 --num_save_copies 2 --seed 0 \
--ensemble_path /data/coml-ecr/shug5721/ensemble/ --lamda1 1.0 --lamda2 1.0 \
--weights_root /data/coml-ecr/shug5721/biggan_models/biggan_cifar_1.0/weights \
--samples_root /data/coml-ecr/shug5721/biggan_models/biggan_cifar_1.0/samples
|
import boto3
def list_s3_buckets(region_name):
s3_client = boto3.client('s3', region_name=region_name)
try:
response = s3_client.list_buckets()
bucket_names = [bucket['Name'] for bucket in response['Buckets']]
return bucket_names
except Exception as e:
# Handle any potential errors here
print(f"An error occurred: {e}")
return []
# Example usage
region = 'us-west-1'
buckets = list_s3_buckets(region)
print(buckets)
|
package databases
import (
"database/sql"
"log"
"github.com/DATA-DOG/go-sqlmock"
"github.com/alhamsya/boilerplate-go/lib/helpers/database"
"github.com/jmoiron/sqlx"
)
func setupMockDB() (mockDB *sql.DB, mockStore *database.Store, mockSQL sqlmock.Sqlmock) {
mockDB, mockSQL, err := sqlmock.New()
if err != nil {
log.Fatalf("an error '%s' was not expected when opening a stub database connection", err)
}
sqlxDB := sqlx.NewDb(mockDB, "sqlmock")
mockStore = &database.Store{
Master: sqlxDB,
Slave: sqlxDB,
}
return mockDB, mockStore, mockSQL
}
|
#!/bin/bash
# ----------------------------------------------------------------
# Copyright 2016 Cisco Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------
#
# Script for running ydk CI on docker via travis-ci.org
#
# ------------------------------------------------------------------
RED="\033[0;31m"
NOCOLOR="\033[0m"
function print_msg {
echo -e "${RED}*** $(date) *** dependencies_osx.sh | $1${NOCOLOR}"
}
function install_dependencies {
print_msg "install_dependencies"
brew install curl \
doxygen \
libssh \
pcre \
wget \
xml2 \
lcov > /dev/null
brew install libssh
brew link libssh
brew rm -f --ignore-dependencies python python3
wget https://www.python.org/ftp/python/3.6.3/python-3.6.3-macosx10.6.pkg
sudo installer -pkg python-3.6.3-macosx10.6.pkg -target /
}
function install_confd {
print_msg "install_confd"
wget https://github.com/CiscoDevNet/ydk-gen/files/562559/confd-basic-6.2.darwin.x86_64.zip
unzip confd-basic-6.2.darwin.x86_64.zip
./confd-basic-6.2.darwin.x86_64.installer.bin ../confd
}
function install_fpm {
print_msg "Installing fpm"
brew install gnu-tar > /dev/null
gem install --no-ri --no-rdoc fpm
}
########################## EXECUTION STARTS HERE #############################
install_dependencies
install_confd
install_fpm
sudo easy_install pip
sudo pip install virtualenv
|
<filename>WebMediaStoreRegister/src/main/java/ra/jademy/domain/ProductType.java
package ra.jademy.domain;
public enum ProductType {
CD, DVD, BOOK
}
|
module ActsAsTranslated
VERSION = '1.0.4'
end
|
#!/bin/bash -l
## Grant name
#SBATCH -A plgcholdadyplomy
## Node count, 1 by default
#SBATCH -N 1
#SBATCH --cpus-per-task=1
#SBATCH --mem-per-cpu=40GB
## Job time
#SBATCH --time=26:00:00
## Partition
#SBATCH -p plgrid-gpu
#SBATCH --gres=gpu:2
module add plgrid/tools/python/3.8
module add plgrid/libs/tensorflow-gpu/2.3.1-python-3.8
module add plgrid/apps/cuda/10.1
cd $SLURM_SUBMIT_DIR
cd ../..
pip install -r requirements.txt
python3 main.py -d "/net/archive/groups/plggpchdyplo/augmented_data/"
|
#! /bin/bash
sudo yum update -y
sudo yum install -y httpd
sudo systemctl enable httpd
sudo service httpd start
echo "<h1>This is terraform cloud demo using remote state file" | sudo tee /var/www/html/index.html
|
<filename>stepDefinitions/clickAction.js
//import AngularHomePage from '../pages/angularHomePage';
//import Globals from '../support/Globals';
//import { browser } from 'protractor';
//import { Given,When, Then } from "cucumber";
// Chai
/*const globals = new Globals();
const expect = globals.expect;*/
var AngularHomePage = require('../pages/angularHomePage');
var { When,Then } = require('cucumber');
// AngularHomePage page instance
const angularHomePage = new AngularHomePage();
When(/^I click learn dropdown$/, () => {
angularHomePage.learn.click();
return browser.waitForAngular();
});
Then(/^I click Tutorial option$/, () => {
angularHomePage.tutorial.click();
return browser.waitForAngular();
});
When(/^I click develop dropdown$/, () => {
return angularHomePage.develop.click();
});
Then(/^I click Developer Guide option$/, () => {
return angularHomePage.developerGuide.click();
});
|
if [ ! -f ~/logger.txt ]; then
touch ~/logger.txt;
fi
if [ ! -f ~/.logger ]; then
touch ~/.logger;
fi
touch user.user
whoami > user.user
python3 logger.py &
|
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V7
module Resources
# A dynamic search ads search term view.
# @!attribute [r] resource_name
# @return [::String]
# Output only. The resource name of the dynamic search ads search term view.
# Dynamic search ads search term view resource names have the form:
#
# `customers/{customer_id}/dynamicSearchAdsSearchTermViews/{ad_group_id}~{search_term_fingerprint}~{headline_fingerprint}~{landing_page_fingerprint}~{page_url_fingerprint}`
# @!attribute [r] search_term
# @return [::String]
# Output only. Search term
#
# This field is read-only.
# @!attribute [r] headline
# @return [::String]
# Output only. The dynamically generated headline of the Dynamic Search Ad.
#
# This field is read-only.
# @!attribute [r] landing_page
# @return [::String]
# Output only. The dynamically selected landing page URL of the impression.
#
# This field is read-only.
# @!attribute [r] page_url
# @return [::String]
# Output only. The URL of page feed item served for the impression.
#
# This field is read-only.
# @!attribute [r] has_negative_keyword
# @return [::Boolean]
# Output only. True if query matches a negative keyword.
#
# This field is read-only.
# @!attribute [r] has_matching_keyword
# @return [::Boolean]
# Output only. True if query is added to targeted keywords.
#
# This field is read-only.
# @!attribute [r] has_negative_url
# @return [::Boolean]
# Output only. True if query matches a negative url.
#
# This field is read-only.
class DynamicSearchAdsSearchTermView
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
|
#!/bin/bash
# Do not use, yet
for filename in *.mkv; do
ffmpeg -i "$filename" -map 0 -c copy -c:a aac -c:v av1 -crf 17 -strict -2 out.mkv
rm "$filename"
mv out.mkv "$filename"
done
|
export * as Declarative from './List.uncontrolled.declarative.stories';
export * as DataDriven from './List.uncontrolled.data-driven.stories';
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.