text
stringlengths 1
1.05M
|
|---|
//+build wireinject
package main
import (
"github.com/google/wire"
"github.com/khanakia/jgo/graph"
"github.com/khanakia/jgo/pkg/app"
"github.com/khanakia/jgo/pkg/auth"
"github.com/khanakia/jgo/pkg/cli"
"github.com/khanakia/jgo/pkg/dbc"
"github.com/khanakia/jgo/pkg/gql"
"github.com/khanakia/jgo/pkg/logger"
"github.com/khanakia/jgo/pkg/server"
)
func Init() Plugins {
wire.Build(
// hello.New,
cli.New,
logger.New,
wire.Struct(new(dbc.Config), "*"),
dbc.New,
app.New,
wire.Struct(new(server.Config), "*"),
server.New,
wire.Struct(new(auth.Config), "*"),
auth.New,
wire.Struct(new(gql.Config), "*"),
wire.Struct(new(graph.Resolver), "*"),
gql.New,
wire.Struct(new(Plugins), "*"),
)
return Plugins{}
}
|
#!/usr/bin/env bash
if [[ $# != 1 ]]; then
printf "usage: %s DIR\n" $(basename "$0")
exit 1
fi
DIR=$1
DB="uasrc"
# Variables
VARS="$DIR/variables.json"
if [[ -f "$VARS" ]]; then
mongoimport --jsonArray --drop --db "$DB" --collection variables "$VARS"
fi
# Measurements
# JOBS=$(mktemp)
JOBS="jobs.txt"
cat /dev/null > "$JOBS"
for FILE in $DIR/m-*.json; do
echo "mongoimport --db $DB --collection scrutinizer $FILE" >> "$JOBS"
done
NUM=$(wc -l "$JOBS" | awk '{print $1}')
if [[ $NUM -gt 0 ]]; then
echo "Importing $NUM measurements"
parallel --tmpdir /data/tmp -j 8 --halt soon,fail=1 < "$JOBS"
else
echo "No measurements?"
fi
rm "$JOBS"
echo "Done."
|
function CurrentYear() {
var d = new Date();
var n = d.getFullYear();
return n;
}
|
export const LOGO_URL =
'https://user-images.githubusercontent.com/6964334/147834043-fc384cab-e7ca-40f8-9663-38fc25fd5f3a.png';
export const TextInputType = {
GOOGLE_CLIENT_ID: 'GOOGLE_CLIENT_ID',
GITHUB_CLIENT_ID: 'GITHUB_CLIENT_ID',
FACEBOOK_CLIENT_ID: 'FACEBOOK_CLIENT_ID',
JWT_ROLE_CLAIM: 'JWT_ROLE_CLAIM',
REDIS_URL: 'REDIS_URL',
SMTP_HOST: 'SMTP_HOST',
SMTP_PORT: 'SMTP_PORT',
SMTP_USERNAME: 'SMTP_USERNAME',
SENDER_EMAIL: 'SENDER_EMAIL',
ORGANIZATION_NAME: 'ORGANIZATION_NAME',
ORGANIZATION_LOGO: 'ORGANIZATION_LOGO',
DATABASE_NAME: 'DATABASE_NAME',
DATABASE_TYPE: 'DATABASE_TYPE',
DATABASE_URL: 'DATABASE_URL',
};
export const HiddenInputType = {
GOOGLE_CLIENT_SECRET: 'GOOGLE_CLIENT_SECRET',
GITHUB_CLIENT_SECRET: 'GITHUB_CLIENT_SECRET',
FACEBOOK_CLIENT_SECRET: 'FACEBOOK_CLIENT_SECRET',
JWT_SECRET: 'JWT_SECRET',
SMTP_PASSWORD: '<PASSWORD>',
ADMIN_SECRET: 'ADMIN_SECRET',
OLD_ADMIN_SECRET: 'OLD_ADMIN_SECRET',
};
export const ArrayInputType = {
ROLES: 'ROLES',
DEFAULT_ROLES: 'DEFAULT_ROLES',
PROTECTED_ROLES: 'PROTECTED_ROLES',
ALLOWED_ORIGINS: 'ALLOWED_ORIGINS',
};
export const SelectInputType = {
JWT_TYPE: 'JWT_TYPE',
};
export const TextAreaInputType = {
CUSTOM_ACCESS_TOKEN_SCRIPT: 'CUSTOM_ACCESS_TOKEN_SCRIPT',
};
export const SwitchInputType = {
DISABLE_LOGIN_PAGE: 'DISABLE_LOGIN_PAGE',
DISABLE_MAGIC_LINK_LOGIN: 'DISABLE_MAGIC_LINK_LOGIN',
DISABLE_EMAIL_VERIFICATION: 'DISABLE_EMAIL_VERIFICATION',
DISABLE_BASIC_AUTHENTICATION: 'DISABLE_BASIC_AUTHENTICATION',
};
export const ArrayInputOperations = {
APPEND: 'APPEND',
REMOVE: 'REMOVE',
};
|
package com.professorvennie.bronzeage.blocks;
import com.professorvennie.bronzeage.lib.Reference;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.block.Block;
import net.minecraft.block.BlockColored;
import net.minecraft.block.material.Material;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.Item;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import net.minecraft.util.IIcon;
import java.util.List;
/**
* Created by ProfessorVennie on 11/14/2014 at 7:50 PM.
*/
public class BlockOre extends BlockBase {
@SideOnly(Side.CLIENT)
private IIcon[] icons;
public BlockOre() {
super(Material.rock, "ore");
}
@Override
public void getSubBlocks(Item item, CreativeTabs tab, List list) {
for (int i = 0; i < icons.length; i++)
list.add(new ItemStack(item, 1, i));
}
@Override
@SideOnly(Side.CLIENT)
public void registerBlockIcons(IIconRegister iconRegister) {
icons = new IIcon[2];
String[] names = new String[]{"oreCopper", "oreTin"};
for (int i = 0; i < icons.length; i++)
icons[i] = iconRegister.registerIcon(Reference.MOD_ID + ":" + names[i]);
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getIcon(int side, int meta) {
return icons[meta % icons.length];
}
public static class ItemBlockOre extends ItemBlock {
public ItemBlockOre(Block block) {
super(block);
}
@Override
public void getSubItems(Item item, CreativeTabs tab, List list) {
for (int i = 0; i < 2; i++)
list.add(new ItemStack(item, 1, i));
}
@Override
public String getUnlocalizedName(ItemStack itemStack) {
return super.getUnlocalizedName() + "." + itemStack.getItemDamage();
}
}
}
|
<filename>doc/html/search/variables_e.js<gh_stars>1-10
var searchData=
[
['priority_0',['priority',['../classproxen_1_1gui_1_1_q_thread_stump.html#aa89d8169afdbac9b901a2c49459d3858',1,'proxen::gui::QThreadStump']]],
['proxy_5fobjs_1',['PROXY_OBJS',['../namespaceproxen_1_1gui.html#a647a8dc40f81a3ca5d1a5ba84b94e430',1,'proxen::gui']]]
];
|
/**
* Copyright © 2014-2021 The SiteWhere Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sitewhere.spi.device;
/**
* Used as a clear indicator of the current status of a device assignment.
*/
public enum DeviceAssignmentStatus {
/** Device is active */
Active,
/** Device reported missing */
Missing,
/** Device released */
Released;
}
|
#!/bin/bash
#
# Test if you can reast host via ipv6 and ipv4.
# Run:
# ./ipv46test.sh
#
# Defaults to "trustroots.org" and test string "<!DOCTYPE html>"
#
# Or to set hostname, run:
# ./ipv46test.sh example.org
#
# Or to set hostname and test string to find, run:
# ./ipv46test.sh example.org "test string"
#
# Options
DEFAULT_HOSTNAME="trustroots.org"
DEFAULT_TEST_STRING="<!DOCTYPE html>"
# Helpers
HOSTNAME=${1:-$DEFAULT_HOSTNAME}
TEST_STRING=${2:-$DEFAULT_TEST_STRING}
IPV4_IP=$(dig ${HOSTNAME} A +short | tail -n1)
IPV6_IP=$(dig ${HOSTNAME} AAAA +short | tail -n1)
RED='\033[0;31m'
GREEN='\033[0;32m'
BOLD='\033[1m'
NC='\033[0m' # No Color
print_error() {
printf "${RED}[ERROR]${NC} $TEST_NAME\n"
}
print_success() {
printf "${GREEN}[OK]${NC} $TEST_NAME\n"
}
printf "\nTesting access to \"${BOLD}$HOSTNAME${NC}\""
printf "\nLooking for test string \"${BOLD}$TEST_STRING${NC}\"\n"
#
# IPV4
#
printf "\n...via ${BOLD}IPv4${NC}:\n"
# IPV6 host
TEST_NAME="$HOSTNAME"
TEST=$(curl -4 -gsL "$HOSTNAME" --max-time 5 | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
TEST_NAME="www.$HOSTNAME"
TEST=$(curl -4 -gsL "www.$HOSTNAME" --max-time 5 | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
# IPV6 ip
if [ -n "$IPV4_IP" ]; then
TEST_NAME="https://$IPV4_IP"
TEST=$(curl -4 -gskL "https://$IPV4_IP" --max-time 5 | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
TEST_NAME="http://$IPV4_IP"
TEST=$(curl -4 -gsL "http://$IPV4_IP" --max-time 5 | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
TEST_NAME="$IPV4_IP"
TEST=$(curl -4 -gsL "$IPV4_IP" --max-time 5 | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
else
TEST_NAME="Could not get IPV4 IP address for hostname."
print_error
fi
#
# IPV6
#
printf "\n...via ${BOLD}IPv6${NC}:\n"
# IPV6 host
TEST_NAME="$HOSTNAME"
TEST=$(curl -6 -gsL "$HOSTNAME" --max-time 5 | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
TEST_NAME="www.$HOSTNAME"
TEST=$(curl -6 -gsL "www.$HOSTNAME" --max-time 5 | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
# IPV6 ip
if [ -n "$IPV6_IP" ]; then
TEST_NAME="http://$IPV6_IP"
TEST=$(curl -6 -gsL "http://[$IPV6_IP]" | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
TEST_NAME="https://$IPV6_IP"
TEST=$(curl -6 -gskL "https://[$IPV6_IP]" | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
TEST_NAME="$IPV6_IP"
TEST=$(curl -6 -gskL "[$IPV6_IP]" | head -1 | xargs)
if [ "$TEST" = "$TEST_STRING" ] ; then print_success ; else print_error ; fi
else
TEST_NAME="Could not get IPV6 IP address for hostname."
print_error
fi
|
import gzip
import logging
import json
import requests
class JsonFeedProcessor:
def __init__(self, name):
self.name = name
self._log = logging.getLogger(__name__)
self._log.setLevel(logging.DEBUG)
self._log.addHandler(logging.StreamHandler())
def parse(self, content):
try:
json_data = json.loads(content)
# Perform processing on the parsed JSON data
# Example: print the parsed JSON data
self._log.debug('Parsed JSON data: %s', json_data)
except json.JSONDecodeError as e:
self._log.error('Failed to parse JSON content: %s', e)
def update_headers_for(self, url, headers):
try:
response = requests.head(url)
response.headers.update(headers)
# Example: print the updated headers
self._log.debug('Updated headers for URL %s: %s', url, response.headers)
except requests.RequestException as e:
self._log.error('Failed to update headers for URL %s: %s', url, e)
|
<reponame>binarytemple/mybatis-all-syncing-test
/*
* Copyright 2009-2012 The MyBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibatis.common.util;
import org.w3c.dom.*;
import org.xml.sax.*;
import org.apache.ibatis.parsing.*;
import javax.xml.parsers.*;
import java.io.*;
import java.lang.reflect.Method;
import java.util.*;
public class NodeEventParser {
private Map nodeletMap = new HashMap();
private XPathParser xpathParser;
private boolean validation;
private EntityResolver entityResolver;
private Properties variables;
public NodeEventParser() {
setValidation(false);
setVariables(new Properties());
setEntityResolver(null);
}
/*
* Registers a nodelet for the specified XPath. Current XPaths supported
* are:
* <ul>
* <li> Element Path - /rootElement/childElement/theElement
* <li> Closing element - /rootElement/childElement/end()
* <li> All Elements Named - //theElement
* </ul>
*/
public void addNodeletHandler(Object handler) {
Class type = handler.getClass();
Method[] methods = type.getMethods();
for (Method m : methods) {
NodeEvent n = m.getAnnotation(NodeEvent.class);
if (n != null) {
checkMethodApplicable(n, type, m);
nodeletMap.put(n.value(), new NodeEventWrapper(handler, m));
}
}
}
/*
* Begins parsing from the provided Reader.
*/
public void parse(Reader reader) throws ParsingException {
try {
Document doc = createDocument(reader);
xpathParser = new XPathParser(doc,validation, variables, entityResolver);
parse(doc.getLastChild());
} catch (Exception e) {
throw new ParsingException("Error parsing XML. Cause: " + e, e);
}
}
public void setVariables(Properties variables) {
this.variables = variables;
}
public void setValidation(boolean validation) {
this.validation = validation;
}
public void setEntityResolver(EntityResolver resolver) {
this.entityResolver = resolver;
}
private void checkMethodApplicable(NodeEvent n, Class type, Method m) {
if (nodeletMap.containsKey(n.value())) {
throw new ParsingException("This nodelet parser already has a handler for path " + n.value());
}
Class<?>[] params = m.getParameterTypes();
if (params.length != 1 || params[0] != XNode.class) {
throw new ParsingException("The method " + m.getName() + " on " + type + " does not take a single parameter of type XNode.");
}
}
/*
* Begins parsing from the provided Node.
*/
private void parse(Node node) {
Path path = new Path();
processNodelet(node, "/");
process(node, path);
}
/*
* A recursive method that walkes the DOM tree, registers XPaths and
* calls Nodelets registered under those XPaths.
*/
private void process(Node node, Path path) {
if (node instanceof Element) {
// Element
String elementName = node.getNodeName();
path.add(elementName);
processNodelet(node, path.toString());
processNodelet(node, new StringBuffer("//").append(elementName).toString());
// Children
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
process(children.item(i), path);
}
path.add("end()");
processNodelet(node, path.toString());
path.remove();
path.remove();
}
}
private void processNodelet(Node node, String pathString) {
NodeEventWrapper nodelet = (NodeEventWrapper) nodeletMap.get(pathString);
if (nodelet != null) {
try {
nodelet.process(new XNode(xpathParser,node, variables));
} catch (Exception e) {
throw new ParsingException("Error parsing XPath '" + pathString + "'. Cause: " + e, e);
}
}
}
/*
* Creates a JAXP Document from a reader.
*/
private Document createDocument(Reader reader) throws ParserConfigurationException, FactoryConfigurationError, SAXException, IOException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setValidating(validation);
factory.setNamespaceAware(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(false);
factory.setCoalescing(false);
factory.setExpandEntityReferences(true);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.setEntityResolver(entityResolver);
builder.setErrorHandler(new ErrorHandler() {
public void error(SAXParseException exception) throws SAXException {
throw exception;
}
public void fatalError(SAXParseException exception) throws SAXException {
throw exception;
}
public void warning(SAXParseException exception) throws SAXException {
}
});
return builder.parse(new InputSource(reader));
}
/*
* Inner helper class that assists with building XPath paths.
* <p/>
* Note: Currently this is a bit slow and could be optimized.
*/
private static class Path {
private List nodeList = new ArrayList();
public Path() {
}
public void add(String node) {
nodeList.add(node);
}
public void remove() {
nodeList.remove(nodeList.size() - 1);
}
public String toString() {
StringBuffer buffer = new StringBuffer("/");
for (int i = 0; i < nodeList.size(); i++) {
buffer.append(nodeList.get(i));
if (i < nodeList.size() - 1) {
buffer.append("/");
}
}
return buffer.toString();
}
}
}
|
#!/usr/bin/env python
import msikeys
import msikeys.ui
if __name__ == '__main__':
msikeys.ui.main()
|
<reponame>leongaban/redux-saga-exchange
import { makeReduxFormEntry } from 'shared/helpers/redux';
import * as NS from '../namespace';
export const editMarketFormEntry = makeReduxFormEntry<NS.IEditMarketForm>('edit-market',
[
'id', 'makerFee', 'takerFee', 'baseFee', 'quoteFee', 'priceScale', 'amountScale',
'minTradeAmount', 'minOrderValue',
]);
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
mkdir -p "${DWARF_DSYM_FOLDER_PATH}"
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/GlowingTextField/GlowingTextField.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/GlowingTextField/GlowingTextField.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
exports._createTypography = require("@material-ui/core/styles/createTypography").default;
|
<reponame>quicksprout/naas-api-client-python
import json
from naas.client import Client
class Projects:
@staticmethod
def list(params=None):
"""
Retrieve the list of projects
:param params: dict
:return: Response
"""
if params is None:
params = {}
rel = Client.rel_for('rels/projects')
route = Client.routes().route_for(rel)
url = route.url_for(params)
request = Client.get(url)
return request
@staticmethod
def retrieve(_id, params=None):
"""
Retrieve the instance of a project
:param _id: int
:param params: dict
:return: Response
"""
if params is None:
params = {}
rel = Client.rel_for('rels/project')
route = Client.routes().route_for(rel)
url = route.url_for(args={**params, **{'id': _id}})
request = Client.get(url)
return request
@staticmethod
def create(params=None):
"""
Create a new project
:param params: dict
:return: Response
"""
if params is None:
params = {}
request_body = {
"project": params
}
headers = {
'Content-Type': 'application/json'
}
rel = Client.rel_for('rels/projects')
route = Client.routes().route_for(rel)
url = route.url_for()
request = Client.post(
url, headers=headers, data=json.dumps(request_body))
return request
@staticmethod
def update(_id, params=None):
"""
Update an existing project
:param _id: int
:param params: dict
:return: Response
"""
if params is None:
params = {}
request_body = {
"project": params
}
headers = {
'Content-Type': 'application/json'
}
rel = Client.rel_for('rels/project')
route = Client.routes().route_for(rel)
url = route.url_for(args={**params, **{'id': _id}})
request = Client.put(
url, headers=headers, data=json.dumps(request_body))
return request
|
<filename>app/controllers/users_controller.rb
class UsersController < ApiController
# GET /users
def index
users = User.all
render json: users
end
# GET /users/1
def show
user = User.find(params[:id])
render json: user
end
end
|
// implement your server here
// require your posts router and connect it here
const express = require('express')
const postRouter = require('./posts/posts-router')
const server = express();
server.use(postRouter);
module.exports = server
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.PassColor = void 0;
const colorNames = require("color-name");
const ABBR_RE = /^#([\da-f])([\da-f])([\da-f])([\da-f])?$/i;
const HEX_RE = /^#([\da-f]{2})([\da-f]{2})([\da-f]{2})([\da-f]{2})?$/i;
const PERCENT_RE = /^rgba?\(\s*([+-]?[\d.]+)%\s*,\s*([+-]?[\d.]+)%\s*,\s*([+-]?[\d.]+)%\s*(?:,\s*([+-]?[\d.]+)\s*)?\)$/i;
const RGBA_RE = /^rgba?\(\s*(1?\d{1,2}|2[0-4]\d|25[0-5])\s*,\s*(1?\d{1,2}|2[0-4]\d|25[0-5])\s*,\s*(1?\d{1,2}|2[0-4]\d|25[0-5])\s*(?:,\s*([+-]?[\d.]+)\s*)?\)$/i;
function is0to255(num) {
if (!Number.isInteger(num))
return false;
return num >= 0 && num <= 255;
}
/**
* Converts given string into RGB array
*
* @param {string} colorString - color string, like 'blue', "#FFF", "rgba(200, 60, 60, 0.3)", "rgb(200, 200, 200)", "rgb(0%, 0%, 100%)"
*/
function getRgb(colorString) {
// short paths
const string = colorString.trim();
if (string in colorNames)
return colorNames[string];
if (/transparent/i.test(string))
return [0, 0, 0];
// we don't need to recheck values because they are enforced by regexes
let match = ABBR_RE.exec(string);
if (match) {
return match.slice(1, 4).map(c => parseInt(c + c, 16));
}
if ((match = HEX_RE.exec(string))) {
return match.slice(1, 4).map(v => parseInt(v, 16));
}
if ((match = RGBA_RE.exec(string))) {
return match.slice(1, 4).map(c => parseInt(c, 10));
}
if ((match = PERCENT_RE.exec(string))) {
return match.slice(1, 4).map(c => {
const r = Math.round(parseFloat(c) * 2.55);
if (is0to255(r))
return r;
throw new TypeError(`Invalid color value "${colorString}": value ${c}% (${r}) is not between 0 and 255`);
});
}
throw new TypeError(`Invalid color value "${colorString}": unknown format - must be something like 'blue', "#FFF", "rgba(200, 60, 60, 0.3)", "rgb(200, 200, 200)", "rgb(0%, 0%, 100%)"`);
}
/**
* returns current value as [r,g,b] array, but stringifies to JSON as string 'rgb(r, g, b)'
*/
class PassColor extends Array {
constructor(v) {
super();
if (v)
this.set(v);
}
set(v) {
this.length = 0;
if (Array.isArray(v)) {
if (v.length < 3 || v.length > 4)
throw new TypeError(`RGB colors array must have length 3 or 4, received ${v.length}`);
// copying first 3 numbers to our array
for (let i = 0, n = v[i]; i < 3; n = v[++i]) {
if (!is0to255(n))
throw new TypeError(`RGB colors array must consist only integers between 0 and 255, received ${JSON.stringify(v)}`);
super.push(n);
}
}
else if (typeof v === 'string') {
super.push(...getRgb(v));
}
return this;
}
toJSON() {
if (this.length !== 3)
return undefined;
return `rgb(${this.join(', ')})`;
}
}
exports.PassColor = PassColor;
//# sourceMappingURL=pass-color.js.map
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -e
set -u
BUILD_DIR=$1
mkdir -p "$BUILD_DIR"
cd "$BUILD_DIR"
cp ../cmake/config.cmake .
echo set\(USE_CUBLAS ON\) >> config.cmake
echo set\(USE_CUDNN ON\) >> config.cmake
echo set\(USE_CUDA ON\) >> config.cmake
echo set\(USE_VULKAN ON\) >> config.cmake
echo set\(USE_OPENGL ON\) >> config.cmake
echo set\(USE_MICRO ON\) >> config.cmake
echo set\(USE_MICRO_STANDALONE_RUNTIME ON\) >> config.cmake
echo set\(USE_LLVM \"/usr/bin/llvm-config-9 --link-static\"\) >> config.cmake
echo set\(USE_NNPACK ON\) >> config.cmake
echo set\(NNPACK_PATH /NNPACK/build/\) >> config.cmake
echo set\(USE_RPC ON\) >> config.cmake
echo set\(USE_SORT ON\) >> config.cmake
echo set\(USE_GRAPH_EXECUTOR ON\) >> config.cmake
echo set\(USE_STACKVM_RUNTIME ON\) >> config.cmake
echo set\(USE_PROFILER ON\) >> config.cmake
echo set\(USE_ANTLR ON\) >> config.cmake
echo set\(USE_VTA_FSIM ON\) >> config.cmake
echo set\(USE_BLAS openblas\) >> config.cmake
echo set\(CMAKE_CXX_FLAGS -Werror\) >> config.cmake
echo set\(USE_TENSORRT_CODEGEN ON\) >> config.cmake
echo set\(USE_LIBBACKTRACE AUTO\) >> config.cmake
echo set\(USE_CCACHE OFF\) >> config.cmake
echo set\(SUMMARIZE ON\) >> config.cmake
echo set\(HIDE_PRIVATE_SYMBOLS ON\) >> config.cmake
|
import time
from db.redis_db import Cookies
from logger import crawler
from login import get_session
from db.dao import LoginInfoOper
from .workers import app
# 通过@app.task创建一个任务
# ignore_result: 不保存返回值
# 这个任务,属于监听消息队列的任务,属于消费者
@app.task(ignore_result=True)
def login_task(name, password):
get_session(name, password)
# 这个任务,属于消费者,负责生产消息的
# There should be login interval, if too many accounts login at the same time from the same ip, all the
# accounts can be banned by weibo's anti-cheating system
@app.task(ignore_result=True)
def execute_login_task():
# 获取所有的需要登录的weibo账号信息
infos = LoginInfoOper.get_login_info()
# Clear all stacked login tasks before each time for login
Cookies.check_login_task()
crawler.info('The login task is starting...')
for info in infos:
# 对xx任务 发送参数args
# 让这个任务启动
# queue参数:表示通过这个队列来路由通知任务
# 路由的key由参数routing_key 指定
app.send_task('tasks.login.login_task', args=(info.name, info.password), queue='login_queue',
routing_key='for_login')
time.sleep(10)
|
<gh_stars>0
package cn.crabapples.common;
import lombok.Getter;
/**
* TODO http请求返回状态骂
*
* @author Mr.He
* 8/29/20 11:50 PM
* e-mail <EMAIL>
* qq 294046317
* pc-name root
*/
public enum ResponseCode {
SUCCESS(200),
ERROR(500),
AUTH_FAIL(401);
@Getter
private final int code;
ResponseCode(int code) {
this.code = code;
}
}
|
<gh_stars>0
import { ComplexBase } from '@syncfusion/ej2-react-base';
import { StockChartIndexesModel } from '@syncfusion/ej2-charts';
/**
* `SelectedDataIndex` directive represent the selected data in react Chart.
* It must be contained in a Chart component(`ChartComponent`).
* ```tsx
* <ChartComponent>
* <SelectedDataIndexesDirective>
* <SelectedDataIndexDirective></SelectedDataIndexDirective>
* </SelectedDataIndexesDirective>
* </ChartComponent>
* ```
*/
export class StockChartSelectedDataIndexDirective extends ComplexBase<StockChartIndexesModel & { children?: React.ReactNode }, StockChartIndexesModel> {
public static moduleName: string = 'stockChartSelectedDataIndex';
}
export class StockChartSelectedDataIndexesDirective extends ComplexBase<{}, {}> {
public static propertyName: string = 'selectedDataIndexes';
public static moduleName: string = 'stockChartSelectedDataIndexes';
}
|
import Vue from 'vue'
import Vuex from 'vuex'
import axios from 'axios'
import { Message } from 'element-ui'
Vue.use(Vuex)
export default new Vuex.Store({
strict: process.env.NODE_ENV !== 'production',
state: {
cat: {
name: ''
}
},
getters: {
getCatName: state => {
return state.cat.name
}
},
mutations: {
UPDATE_CAT (state, payload) {
state.cat = Object.assign({}, state.cat, payload)
}
},
actions: {
syncCat (context) {
let dataFile = 'data.json'
if (RUN_CORDOVA) { // eslint-disable-line
let dataDirectory = cordova.file.externalRootDirectory // eslint-disable-line
self.resolveLocalFileSystemURL(dataDirectory, root => {
root.getFile(dataFile, { create: false }, fileEntry => {
fileEntry.file(file => {
var reader = new FileReader()
reader.onloadend = event => {
context.commit('UPDATE_CAT', JSON.parse(reader.result))
}
reader.readAsText(file)
}, error => {
Message.error('read ' + dataFile + ' file fail')
console.log(error)
})
}, error => {
Message.error('can not find ' + dataFile + ' file')
console.log(error)
})
}, error => {
Message.error('can not find ' + dataDirectory + ' folder')
console.log(error)
})
} else {
axios.get('./' + dataFile)
.then(function (response) {
context.commit('UPDATE_CAT', response.data)
})
.catch(function (error) {
console.log(error)
})
}
}
}
})
|
<gh_stars>0
(function () {
"use strict";
// ======= Sticky
window.onscroll = function () {
const ud_header = document.querySelector(".ud-header");
const sticky = ud_header.offsetTop;
const logo = document.querySelector(".navbar-brand img");
if (window.pageYOffset > sticky) {
ud_header.classList.add("sticky");
} else {
ud_header.classList.remove("sticky");
}
// === logo change
if (ud_header.classList.contains("sticky")) {
logo.src = "assets/images/logo/logo_branding.svg";
} else {
logo.src = "assets/images/logo/logo_branding_white.svg";
}
// show or hide the back-top-top button
const backToTop = document.querySelector(".back-to-top");
if (
document.body.scrollTop > 50 ||
document.documentElement.scrollTop > 50
) {
backToTop.style.display = "flex";
} else {
backToTop.style.display = "none";
}
};
//===== close navbar-collapse when a clicked
let navbarToggler = document.querySelector(".navbar-toggler");
const navbarCollapse = document.querySelector(".navbar-collapse");
document.querySelectorAll(".ud-menu-scroll").forEach((e) =>
e.addEventListener("click", () => {
navbarToggler.classList.remove("active");
navbarCollapse.classList.remove("show");
})
);
navbarToggler.addEventListener("click", function () {
navbarToggler.classList.toggle("active");
navbarCollapse.classList.toggle("show");
});
// ===== submenu
const submenuButton = document.querySelectorAll(".nav-item-has-children");
submenuButton.forEach((elem) => {
elem.querySelector("a").addEventListener("click", () => {
elem.querySelector(".ud-submenu").classList.toggle("show");
});
});
// ===== wow js
new WOW().init();
// ====== scroll top js
function scrollTo(element, to = 0, duration = 500) {
const start = element.scrollTop;
const change = to - start;
const increment = 20;
let currentTime = 0;
const animateScroll = () => {
currentTime += increment;
const val = Math.easeInOutQuad(currentTime, start, change, duration);
element.scrollTop = val;
if (currentTime < duration) {
setTimeout(animateScroll, increment);
}
};
animateScroll();
}
Math.easeInOutQuad = function (t, b, c, d) {
t /= d / 2;
if (t < 1) return (c / 2) * t * t + b;
t--;
return (-c / 2) * (t * (t - 2) - 1) + b;
};
document.querySelector(".back-to-top").onclick = () => {
scrollTo(document.documentElement);
};
})();
|
<gh_stars>1-10
import unittest
import astropy.units as q
import numpy as np
from .. import uncertainties as un
class TestUnum(unittest.TestCase):
"""Tests for the Unum class"""
def setUp(self):
"""Setup the tests"""
# Symmetry
self.sym = un.Unum(10.1, 0.2)
self.asym = un.Unum(9.3, 0.08, 0.11)
self.sym_u = un.Unum(12 * q.um, 0.1 * q.um)
# Data structures
self.u1 = un.Unum(10 * q.um, 1 * q.um, n_samples=200)
self.u2 = un.Unum(10, 1, n_samples=20)
self.a1 = un.UArray(np.ones(3) * q.um, np.abs(np.random.normal(size=3)) * q.um, n_samples=1000)
self.a2 = un.UArray(np.ones(3), np.abs(np.random.normal(size=3)), n_samples=1000)
self.i1 = 5 * q.um
self.i2 = 5
self.s1 = np.array([1, 2, 3]) * q.um
self.s2 = np.array([1, 2, 3])
def test_attrs(self):
"""Test attributes"""
x = self.sym
x.value
x.quantiles
def test_add(self):
"""Test add method"""
# Equivalent units
x = self.sym + self.asym
# Not equivalent
try:
x = self.sym + self.sym_u
except TypeError:
pass
x = self.u1 + self.u1
x = self.u1 + self.i1
x = self.u2 + self.u2
x = self.u2 + self.i2
x = self.a1 + self.u1
x = self.a1 + self.i1
x = self.a1 + self.s1
x = self.a2 + self.u2
x = self.a2 + self.i2
x = self.a2 + self.s2
def test_mul(self):
"""Test mul method"""
x = self.sym * self.asym
x = self.u1 * self.u1
x = self.u1 * self.i1
x = self.u1 * self.i2
x = self.u2 * self.u2
x = self.u2 * self.i1
x = self.u2 * self.i2
x = self.a1 * self.u1
x = self.a1 * self.u2
x = self.a1 * self.i1
x = self.a1 * self.i2
x = self.a1 * self.s1
x = self.a1 * self.s2
x = self.a1 * self.a1
x = self.a1 * self.a2
x = self.a2 * self.u1
x = self.a2 * self.u2
x = self.a2 * self.i1
x = self.a2 * self.i2
x = self.a2 * self.s1
x = self.a2 * self.s2
x = self.a2 * self.a1
x = self.a2 * self.a2
def test_sub(self):
"""Test sub method"""
# Equivalent units
x = self.sym - self.asym
# Not equivalent
try:
x = self.sym - self.sym_u
except TypeError:
pass
x = self.u1 - self.u1
x = self.u1 - self.i1
x = self.u2 - self.u2
x = self.u2 - self.i2
x = self.a1 - self.u1
x = self.a1 - self.i1
x = self.a1 - self.s1
x = self.a2 - self.u2
x = self.a2 - self.i2
x = self.a2 - self.s2
def test_pow(self):
"""Test pow method"""
x = self.sym ** 2
x = self.u1 ** 2
x = self.u2 ** 2
x = self.a1 ** 2
x = self.a2 ** 2
def test_truediv(self):
"""Test truediv method"""
x = self.sym / self.asym
x = self.u1 / self.u1
x = self.u1 / self.i1
x = self.u1 / self.i2
x = self.u2 / self.u2
x = self.u2 / self.i1
x = self.u2 / self.i2
x = self.a1 / self.u1
x = self.a1 / self.u2
x = self.a1 / self.i1
x = self.a1 / self.i2
x = self.a1 / self.s1
x = self.a1 / self.s2
x = self.a1 / self.a1
x = self.a1 / self.a2
x = self.a2 / self.u1
x = self.a2 / self.u2
x = self.a2 / self.i1
x = self.a2 / self.i2
x = self.a2 / self.s1
x = self.a2 / self.s2
x = self.a2 / self.a1
x = self.a2 / self.a2
def test_floordiv(self):
"""Test floordiv method"""
# Equivalent units
x = self.sym // self.asym
# Not equivalent
try:
x = self.sym // self.sym_u
except TypeError:
pass
x = self.u1 // self.u1
x = self.u1 // self.i1
x = self.u2 // self.u2
x = self.u2 // self.i2
x = self.a1 // self.u1
x = self.a1 // self.i1
x = self.a1 // self.s1
x = self.a1 // self.a1
x = self.a2 // self.u2
x = self.a2 // self.i2
x = self.a2 // self.s2
x = self.a2 // self.a2
def test_log10(self):
"""Test log10 method"""
x = self.u2.log10()
x = self.a2.log10()
def test_polyval(self):
"""Test polyval method"""
coeffs = [1, 2, 3]
x = self.sym.polyval(coeffs)
x = self.u1.polyval([1, 2, 3])
x = self.u2.polyval([1, 2, 3])
x = self.a1.polyval([1, 2, 3])
x = self.a2.polyval([1, 2, 3])
def test_plot(self):
"""Test plot method"""
x = self.sym
x.plot()
def test_sample_from_errors(self):
"""Test the sample_from_errors method"""
# Test symmetric error case
x = self.sym
x.sample_from_errors()
x.sample_from_errors(low_lim=0, up_lim=100)
# Test asymmetric error case
y = self.asym
y.sample_from_errors()
y.sample_from_errors(low_lim=0, up_lim=100)
|
package ru.autometry.obd.commands.adaptor.honda;
import ru.autometry.obd.commands.Answer;
import ru.autometry.obd.commands.adaptor.FormulaAdaptor;
import ru.autometry.obd.exception.OBDException;
/**
* Created by jeck on 14/08/14
*/
public class TPSAdaptor extends FormulaAdaptor {
public static double[] percent = {
0.00, 0.39, 0.78, 1.18, 1.57, 1.96, 2.35, 2.75, 3.14,
3.53, 3.92, 4.31, 4.71, 5.10, 5.49, 5.88, 6.27, 6.67, 7.06, 7.45, 7.84,
8.24, 8.63, 9.02, 9.41, 9.80, 10.20, 10.59, 10.98, 11.37, 11.76, 12.16,
12.55, 12.94, 13.33, 13.73, 14.12, 14.51, 14.90, 15.29, 15.69, 16.08, 16.47,
16.86, 17.25, 17.65, 18.04, 18.43, 18.82, 19.22, 19.61, 20.00, 20.39, 20.78,
21.18, 21.57, 21.96, 22.35, 22.75, 23.14, 23.53, 23.92, 24.31, 24.71, 25.10,
25.49, 25.88, 26.27, 26.67, 27.06, 27.45, 27.84, 28.24, 28.63, 29.02, 29.41,
29.80, 30.20, 30.59, 30.98, 31.37, 31.76, 32.16, 32.55, 32.94, 33.33, 33.73,
34.12, 34.51, 34.90, 35.29, 35.69, 36.08, 36.47, 36.86, 37.25, 37.65, 38.04,
38.43, 38.82, 39.22, 39.61, 40.00, 40.39, 40.78, 41.18, 41.57, 41.96, 42.35,
42.75, 43.14, 43.53, 43.92, 44.31, 44.71, 45.10, 45.49, 45.88, 46.27, 46.67,
47.06, 47.45, 47.84, 48.24, 48.63, 49.02, 49.41, 49.80, 50.20, 50.59, 50.98,
51.37, 51.76, 52.16, 52.55, 52.94, 53.33, 53.73, 54.12, 54.51, 54.90, 55.29,
55.69, 56.08, 56.47, 56.86, 57.25, 57.65, 58.04, 58.43, 58.82, 59.22, 59.61,
60.00, 60.39, 60.78, 61.18, 61.57, 61.96, 62.35, 62.75, 63.14, 63.53, 63.92,
64.31, 64.71, 65.10, 65.49, 65.88, 66.27, 66.67, 67.06, 67.45, 67.84, 68.24,
68.63, 69.02, 69.41, 69.80, 70.20, 70.59, 70.98, 71.37, 71.76, 72.16, 72.55,
72.94, 73.33, 73.73, 74.12, 74.51, 74.90, 75.29, 75.69, 76.08, 76.47, 76.86,
77.25, 77.65, 78.04, 78.43, 78.82, 79.22, 79.61, 80.00, 80.39, 80.78, 81.18,
81.57, 81.96, 82.35, 82.75, 83.14, 83.53, 83.92, 84.31, 84.71, 85.10, 85.49,
85.88, 86.27, 86.67, 87.06, 87.45, 87.84, 88.24, 88.63, 89.02, 89.41, 89.80,
90.20, 90.59, 90.98, 91.37, 91.76, 92.16, 92.55, 92.94, 93.33, 93.73, 94.12,
94.51, 94.90, 95.29, 95.69, 96.08, 96.47, 96.86, 97.25, 97.65, 98.04, 98.43,
98.82, 99.22, 99.61, 100.00};
@Override
protected Object adapt(Integer value, Answer response) throws OBDException {
int tvalue = value - 25;
return tvalue > 0 ? percent[tvalue] : -percent[-tvalue];
}
}
|
#!/bin/sh
java -cp libs/*:cops-client.jar com.yahoo.ycsb.Client -load -db edu.msu.cse.dkvf.ycsbDriver.DKVFDriver -P properties.txt -p clientClassName=edu.msu.cse.cops.client.COPSClient -p clientConfigFile=client_config > exp_results/client_Client_2_original_load.result 2> exp_errors/client_Client_2_original_load.error
|
<filename>src/adapters/gateways/reports/repository.go
package reports
import (
"time"
"github.com/koki-develop/qiita-lgtm-ranking/src/adapters/gateways"
"github.com/koki-develop/qiita-lgtm-ranking/src/entities"
"github.com/pkg/errors"
)
type Repository struct {
config *Config
}
type Config struct {
QiitaAPI gateways.QiitaAPI
ReportBuilder gateways.ReportBuilder
}
func New(cfg *Config) *Repository {
return &Repository{config: cfg}
}
func (repo *Repository) UpdateDaily(from time.Time, id string, items entities.Items) error {
rpt, err := repo.config.ReportBuilder.Daily(from, items)
if err != nil {
return errors.WithStack(err)
}
if err := repo.update(id, rpt); err != nil {
return errors.WithStack(err)
}
return nil
}
func (repo *Repository) UpdateDailyByTag(from time.Time, id string, items entities.Items, tag string) error {
rpt, err := repo.config.ReportBuilder.DailyByTag(from, items, tag)
if err != nil {
return errors.WithStack(err)
}
if err := repo.update(id, rpt); err != nil {
return errors.WithStack(err)
}
return nil
}
func (repo *Repository) UpdateWeekly(from time.Time, id string, items entities.Items) error {
rpt, err := repo.config.ReportBuilder.Weekly(from, items)
if err != nil {
return errors.WithStack(err)
}
if err := repo.update(id, rpt); err != nil {
return errors.WithStack(err)
}
return nil
}
func (repo *Repository) UpdateWeeklyByTag(from time.Time, id string, items entities.Items, tag string) error {
rpt, err := repo.config.ReportBuilder.WeeklyByTag(from, items, tag)
if err != nil {
return errors.WithStack(err)
}
if err := repo.update(id, rpt); err != nil {
return errors.WithStack(err)
}
return nil
}
func (repo *Repository) update(id string, rpt *entities.Report) error {
if err := repo.config.QiitaAPI.UpdateItem(id, rpt.Title, rpt.Body, rpt.Tags); err != nil {
return errors.WithStack(err)
}
return nil
}
|
<reponame>amitse/fluentui-react-native
export * from './FocusTrapZoneTest';
|
def evaluate_polynomial(polynomial, x):
terms = polynomial.split("+")
result = 0
for term in terms:
factor, exponent = term.split("x^")
result += int(factor)*(x**int(exponent))
return result
polynomial = "4x^3 + 7x + 2"
x = 6
print(evaluate_polynomial(polynomial, x))
|
<reponame>IzaacBaptista/ads-senac
package ExerciciosExtras;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
public class Forca {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
List<String> list = new ArrayList<>();
list.add("canastra");
list.add("xadrez");
list.add("imã");
list.add("coentro");
list.add("eleitor");
list.add("janela");
list.add("curso");
list.add("constitucional");
list.add("programa");
list.add("react");
double random = Math.random();
int value = (int)(random * 10);
String palavraOculta = list.get(value);
String palavraTentativa = "";
for(int i=0;i<palavraOculta.length();i++){
palavraTentativa += "*";
}
boolean enforcado = false;
int membros = 0;
do{
System.out.println(palavraTentativa);
System.out.println("Digite a letra: ");
String letra = scanner.next();
char letraChar = letra.charAt(0);
String resposta = "";
boolean existe = false;
for(int j=0; j<palavraOculta.length();j++){
if(letraChar == palavraOculta.charAt(j)){
existe = true;
resposta += letraChar;
} else {
resposta += palavraTentativa.charAt(j);
}
}
palavraTentativa = resposta;
if(!existe){
System.out.println("Você errou!");
switch(membros){
case 0:
System.out.println("xx Cabeça xx \n");
break;
case 1:
System.out.println("xx Tronco xx \n");
break;
case 2:
System.out.println("xx Braço xx \n");
break;
case 3:
System.out.println("xx Perna xx \n");
break;
case 4:
System.out.println("xx Pé xx \n");
enforcado = true;
break;
}
membros++;
}
if(palavraTentativa.equals(palavraOculta)){
System.out.println(palavraTentativa);
System.out.println("Parabéns! Você é o bichão!");
break;
}
}while(!enforcado);
if(enforcado){
System.out.println("Perdeu! A palavra era: " + palavraOculta);
}
scanner.close();
}
}
|
/* These are what my variables do:
1. container grabs my container class so I can store time blocks in it later.
2. hourArray stores times that loop over the time blocks in correct order.
3. currentDay is an empty <p> that gets stuffed with the Day, Month and Year.
4a. dataHour is a counter that corresponds with 9am-5pm
4b. for the code however, it corresponds with 9 - 17 due to military time.
5a. militaryTimeHour is a military number (0-23) supplied by dayjs for 24hrs.
5b. data-hour attributes 9-5pm reflect military time to compare time of day.
5c. if time block is before, during or after the hour, block color will change.
6. textInputArr is an array with a pretty interesting lifecycle:
- starts at [] because user hasn't added anything yet!
- "" gets pushed to the empty array for each item (hour) in the hourArray
- when saveBtn[i] is clicked, corresponding textInputArr[i] gets stored new value of corresponding textarea[i].value;
- local storage gets updated with a stringified textInputArr for each click as well.
- on page load, if local storage isn't null, parse the JSON key 'textInputArr'
- then loop over each textInputArray[i] item to store into textarea[i].value again!
- otherwise, make sure the textInputArr is still an array;
7. clearBtn is created on the HTML. It will clear localStorage, empty array + empty textarea value
*/
let container = document.querySelector('.container');
let textInputArr;
let hourArray = ["9am", "10am", "11am", "12pm", "1pm", "2pm", "3pm", "4pm", "5pm"];
let clearBtn = document.getElementById('clearBtn');
let currentDay = document.getElementById('currentDay');
let dataHour = 9;
let militaryTimeHour = dayjs().get('hour');
currentDay.textContent = dayjs().format('dddd'+ ', ' + 'MMMM' + ' ' + 'DD');
// when a save button is clicked, store its corresponding textarea value into the corresponding textIntputArr index, then serialize
// also, should remove hourArray from this
// this function also loops over the textInput array and makes each area empty
function setLocalTextareas() {
let button = document.querySelectorAll('.saveBtn');
let textarea = document.querySelectorAll('textarea');
for (let i = 0; i < hourArray; i++) {
textInputArr.push("");
}
for (let i = 0; i < button.length; i++) {
button[i].addEventListener("click", function(event) {
textInputArr[i] = textarea[i].value;
localStorage.setItem("textInputArr", JSON.stringify(textInputArr));
});
};
}
// deserializes the textInputArr if there is one and converts index to textarea.values
function getLocalTextareas() {
if (localStorage.getItem('textInputArr') !== null) {
textInputArr = JSON.parse(localStorage.getItem('textInputArr'));
for (let i = 0; i < textInputArr.length; i++) {
let textarea = document.querySelectorAll('textarea');
textarea[i].value = textInputArr[i];
}
} else {
textInputArr = [];
};
}
// creates the entire time block and calls hour-color, set/get storage functions
function renderTimeBlock() {
for (let i = 0; i < hourArray.length; i++) {
let timeBlock = document.createElement('div');
let hourDiv = document.createElement('div');
let hourP = document.createElement('p');
let formDiv = document.createElement('form');
let planLabel = document.createElement('label');
let planTextArea = document.createElement('textarea');
let saveDiv = document.createElement('div');
let saveBtn = document.createElement('button');
let saveIcon = document.createElement('i');
timeBlock.classList.add('time-block');
hourDiv.classList.add('hour');
hourP.setAttribute('data-index', i);
hourP.textContent = hourArray[i];
formDiv.classList.add('plan-input');
planLabel.setAttribute('for', 'hour-' + dataHour);
planTextArea.setAttribute('data-hour', dataHour);
planTextArea.setAttribute('data-index', i);
planTextArea.setAttribute('id', 'hour-' + dataHour);
planTextArea.setAttribute('aria-labelledby', 'hour-' + dataHour);
planTextArea.classList.add('text-input');
planTextArea.setAttribute('placeholder', 'Your text here...');
saveDiv.classList.add('save');
saveBtn.setAttribute('data-index', i);
saveBtn.setAttribute('aria-label', 'Save Input');
saveBtn.classList.add('saveBtn');
saveIcon.classList.add('far', 'fa-save', 'save-icon');
container.append(timeBlock);
hourDiv.append(hourP);
timeBlock.append(hourDiv);
formDiv.append(planLabel);
planLabel.append(planTextArea);
timeBlock.append(formDiv);
saveBtn.append(saveIcon);
saveDiv.append(saveBtn);
timeBlock.append(saveDiv);
dataHour++;
};
pastPresentFuture();
setLocalTextareas();
getLocalTextareas();
};
renderTimeBlock()
// makes past blocks grey if time of day is later than the hour block
// makes present hour red if military time corresponds to time block
// makes future hour green if time block hours are later than current time of day.
function pastPresentFuture() {
let textarea = document.querySelectorAll('textarea');
textarea.forEach(function(area) {
if (parseInt(area.dataset.hour) < militaryTimeHour) {
area.classList.add('past');
} else if (parseInt(area.dataset.hour) === militaryTimeHour) {
area.classList.add('present');
} else {
area.classList.add('future');
}
})
};
// when clicked, essentially resets application - clears local storage, empties array, even empties every textarea value, which is fine because there is placeholder text to still indicate to user
clearBtn.addEventListener('click', function() {
localStorage.clear();
textInputArr = [];
let textarea = document.querySelectorAll('textarea');
for (let i = 0; i < textarea.length; i++) {
textarea[i].value = '';
}
})
|
<reponame>eusholli/image-pipeline
import traceback
import io
import numpy as np
import sys
import time
import cv2
import datetime
import jsonpickle
import argparse
import asyncio
import os
from common.imagebusutil import FrameDetails, ImagebusTopic # noqa
from common.natsclient import create, subscribe_handler # noqa
modelFile = os.path.join(
sys.path[0], 'res10_300x300_ssd_iter_140000.caffemodel')
configFile = os.path.join(sys.path[0], 'deploy.prototxt.txt')
net = cv2.dnn.readNetFromCaffe(configFile, modelFile)
threshold = 0.4
async def analyzeImages(nc, frameDetails, parent):
"""
Start analyzing images
"""
print("Start analyzing images...")
try:
frameReference = 0
totalAnalysisTime = 0
frameReference += 1
byteStream = io.BytesIO(parent.image)
originalTime = parent.dateTime
image = np.asarray(bytearray(byteStream.read()), dtype="uint8")
image = cv2.imdecode(image, cv2.IMREAD_UNCHANGED)
beforeDetection = time.process_time()
# imageai_frame, detection = detector.detectObjectsFromImage(
# input_image=image, input_type="array", output_type="array"
# )
h, w = image.shape[:2]
blob = cv2.dnn.blobFromImage(cv2.resize(image, (300, 300)), 1.0,
(300, 300), (104.0, 117.0, 123.0))
net.setInput(blob)
faces = net.forward()
detectionTime = time.process_time() - beforeDetection
totalAnalysisTime += detectionTime
imageTime = datetime.datetime.now()
print("--------------------------------")
identified_objects = []
for i in range(faces.shape[2]):
confidence = faces[0, 0, i, 2]
if confidence > threshold:
box = faces[0, 0, i, 3:7] * np.array([w, h, w, h])
(x, y, x1, y1) = box.astype("int")
box = [x, y, x1, y1]
print(
"face",
" : ",
confidence,
" : ",
box,
)
identified_objects.append(
{
"name": "face",
"percentage_probability": round(confidence*100, 2),
"position": [
int(box[0]),
int(box[1]),
int(box[2]),
int(box[3]),
],
}
)
cv2.rectangle(image, (x, y), (x1, y1), (0, 255, 0), 4)
annotate = str(round(confidence*100, 2))
cv2.putText(image, annotate, (box[0]+10, box[1]+20),
cv2.FONT_HERSHEY_COMPLEX, 0.5, (0, 255, 0), 2)
print("--------------------------------\n\r")
# Convert image to png
ret, buffer = cv2.imencode(".jpg", image)
frameDetails.setChildFrame(
frameReference,
buffer.tobytes(),
identified_objects,
parent,
round(detectionTime, 2),
round(totalAnalysisTime / frameReference, 2),
)
await nc.publish(frameDetails.topic, jsonpickle.encode(frameDetails).encode("utf-8"))
await asyncio.sleep(.01)
except Exception as e:
traceback.print_exc()
print("\nExiting.")
async def initiate(loop):
"""
Producer will publish to Kafka Server a video file given as a system arg.
Otherwise it will default by streaming webcam feed.
"""
parser = argparse.ArgumentParser(
prog="dnnFaceProcessor",
description="start image recognition on incoming frames",
)
parser.add_argument(
"-t",
"--topic",
default=ImagebusTopic.IMAGEAI_FRAME.name,
help="set the topic name for publishing the feed, defaults to "
+ ImagebusTopic.IMAGEAI_FRAME.name,
)
parser.add_argument(
"-i",
"--input",
default=ImagebusTopic.SOURCE_FRAME.name,
help="set the topic name for reading the incoming feed, defaults to "
+ ImagebusTopic.SOURCE_FRAME.name,
)
parser.add_argument(
"-n",
"--name",
help='set the display name of this object detection process, defaults to "dnnFaceProcessor" if missing',
default="dnnFaceProcessor",
)
args = parser.parse_args()
nc = await create(loop)
frameDetails = FrameDetails(name=args.name, topic=args.topic)
async def receive_original(msg):
global original_frameDetails
print("receive_original")
data = msg.data.decode("utf-8")
frame = jsonpickle.decode(data)
await analyzeImages(nc, frameDetails, frame)
await nc.subscribe(ImagebusTopic.SOURCE_FRAME.name, cb=receive_original)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(initiate(loop))
loop.run_forever()
|
<reponame>blacelle/pepper<filename>java/src/main/java/cormoran/pepper/metrics/TaskStartEvent.java<gh_stars>0
/**
* The MIT License
* Copyright (c) 2014 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package cormoran.pepper.metrics;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.LongSupplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Joiner;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.eventbus.EventBus;
import cormoran.pepper.thread.CurrentThreadStack;
/**
* A {@link StartMetricEvent} can be used to stats the duration of some events. Typically posted into an
* {@link EventBus}
*
* @author <NAME>
*
*/
public class TaskStartEvent implements ITaskActivityEvent {
protected static final Logger LOGGER = LoggerFactory.getLogger(TaskStartEvent.class);
public static final String KEY_USERNAME = "UserName";
public static final String KEY_SOURCE_ID = "SourceId";
@Deprecated
public static final String KEY_PIVOT_ID = KEY_SOURCE_ID;
public static final String KEY_ROOT_SOURCE = "RootSource";
/**
* Could be Excel, Live, Distributed, or anything else like the name of feature executing queries
*/
public static final String KEY_CLIENT = "Client";
// Xmla is typically used by Excel
public static final String VALUE_CLIENT_XMLA = "XMLA";
// Streaming is typically used by Live
public static final String VALUE_CLIENT_STREAMING = "Streaming";
// As retrieving the stack could be expensive, this boolean has to be set to
// true manually or with SetStaticMBean
private static boolean doRememberStack = false;
// This UUID is constant through the whole application lifecycle. It can be
// used to seggregate events from different application runs
public static final String INSTANCE_UUID = UUID.randomUUID().toString();
private static final AtomicLong EVENT_INCREMENTER = new AtomicLong();
// This id is unique amongst a given INSTANCE_UUID
public final long eventId = EVENT_INCREMENTER.getAndIncrement();
/**
* Ability to retrieve all encountered source classes, and then monitor available events
*/
protected static final Set<Class<?>> SOURCE_CLASSES = Sets.newConcurrentHashSet();
public final Object source;
public final List<? extends String> names;
public static void setDoRememberStack(boolean doRememberStack) {
TaskStartEvent.doRememberStack = doRememberStack;
}
// Remember the stack could be much helpful
public final Optional<StackTraceElement[]> stack;
protected final String startThread = Thread.currentThread().getName();
public final long startTime = System.currentTimeMillis();
protected final Map<String, ?> startDetails;
protected final Map<String, Object> endDetails;
/**
* Filled on EndMetricEvent construction
*/
final AtomicReference<TaskEndEvent> endMetricEvent = new AtomicReference<>();
protected final LongSupplier progress;
private static final LongSupplier NO_PROGRESS = () -> -1L;
// By default, we have no result size
// protected long resultSize = -1;
private static Optional<StackTraceElement[]> fastCurrentStackIfRemembering() {
if (doRememberStack) {
return Optional.ofNullable(fastCurrentStack());
} else {
return Optional.empty();
}
}
public static Set<Class<?>> getEncounteredSourceClasses() {
return ImmutableSet.copyOf(SOURCE_CLASSES);
}
public TaskStartEvent(Object source, String firstName, String... otherNames) {
this(source, Collections.emptyMap(), NO_PROGRESS, Lists.asList(firstName, otherNames));
}
public TaskStartEvent(Object source,
Map<String, ?> details,
LongSupplier progress,
String firstName,
String... otherNames) {
this(source, details, progress, Lists.asList(firstName, otherNames), fastCurrentStackIfRemembering());
}
public TaskStartEvent(Object source, Map<String, ?> details, LongSupplier progress, List<? extends String> names) {
this(source, details, progress, names, fastCurrentStackIfRemembering());
}
protected TaskStartEvent(Object source,
Map<String, ?> details,
LongSupplier progress,
List<? extends String> names,
Optional<StackTraceElement[]> stack) {
this.source = Objects.requireNonNull(source);
if (names == null) {
this.names = Collections.emptyList();
} else {
this.names = names;
}
SOURCE_CLASSES.add(source.getClass());
this.startDetails = ImmutableMap.copyOf(details);
// We are allowed to add details after the construction
this.endDetails = new ConcurrentHashMap<>();
this.progress = progress;
this.stack = stack;
}
public static StackTraceElement[] fastCurrentStack() {
return CurrentThreadStack.snapshotStackTrace();
}
public static AutoCloseableTaskStartEvent post(Consumer<? super ITaskActivityEvent> eventBus,
Object source,
String firstName,
String... otherNames) {
return post(eventBus, source, Collections.emptyMap(), NO_PROGRESS, firstName, otherNames);
}
public static AutoCloseableTaskStartEvent post(Consumer<? super ITaskActivityEvent> eventBus,
Object source,
Map<String, ?> details,
LongSupplier progress,
String firstName,
String... otherNames) {
TaskStartEvent startEvent = new TaskStartEvent(source, details, progress, firstName, otherNames);
post(eventBus, startEvent);
// This is used in try-with-resources: do not return null
return new AutoCloseableTaskStartEvent(startEvent, eventBus);
}
@Override
public String toString() {
// Append the stack to the simple toString
return toStringNoStack() + stack.map(s -> '\n' + Joiner.on('\n').join(s)).orElse("");
}
public String toStringNoStack() {
String suffix = "";
if (!startDetails.isEmpty()) {
suffix += " startDetails=" + startDetails;
}
if (!endDetails.isEmpty()) {
suffix += " endDetails=" + endDetails;
}
String defaultToString = MoreObjects.toStringHelper(this).add("names", names).add("source", source).toString();
long currentProgress = progress.getAsLong();
String prefix = "Started in '" + startThread + "': ";
if (currentProgress < 0L) {
return prefix + defaultToString + suffix;
} else {
return prefix + defaultToString + " progress=" + currentProgress + suffix;
}
}
public Object getDetail(String key) {
Object result = endDetails.get(key);
if (result == null) {
result = startDetails.get(key);
}
return result;
}
public void setEndDetails(Map<String, ?> moreEndDetails) {
this.endDetails.putAll(moreEndDetails);
}
/**
*
* @param endMetricEvent
* @return true if we successfully registered an EndMetricEvent. Typically fails if already ended
*/
public boolean registerEndEvent(TaskEndEvent endMetricEvent) {
return this.endMetricEvent.compareAndSet(null, endMetricEvent);
}
public Optional<TaskEndEvent> getEndEvent() {
return Optional.ofNullable(endMetricEvent.get());
}
public OptionalLong getProgress() {
long current = progress.getAsLong();
if (current == -1) {
return OptionalLong.empty();
} else {
return OptionalLong.of(current);
}
}
public static void post(Consumer<? super ITaskActivityEvent> eventBus, ITaskActivityEvent simpleEvent) {
if (eventBus == null) {
logNoEventBus(simpleEvent.getSource(), simpleEvent.getNames());
} else {
eventBus.accept(simpleEvent);
}
}
protected static void logNoEventBus(Object source, List<?> names) {
LOGGER.info("No EventBus has been injected for {} on {}", names, source);
}
public static Set<Class<?>> getSourceClasses() {
return Collections.unmodifiableSet(SOURCE_CLASSES);
}
@Override
public Object getSource() {
return source;
}
@Override
public List<? extends String> getNames() {
return names;
}
}
|
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# The Letter of Authority for the cross-connect. You must submit this letter when
# requesting cabling for the cross-connect at the FastConnect location.
#
class Core::Models::LetterOfAuthority
CIRCUIT_TYPE_ENUM = [
CIRCUIT_TYPE_SINGLE_MODE_LC = 'Single_mode_LC'.freeze,
CIRCUIT_TYPE_SINGLE_MODE_SC = 'Single_mode_SC'.freeze,
CIRCUIT_TYPE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# The name of the entity authorized by this Letter of Authority.
# @return [String]
attr_accessor :authorized_entity_name
# The type of cross-connect fiber, termination, and optical specification.
# @return [String]
attr_reader :circuit_type
# The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the cross-connect.
# @return [String]
attr_accessor :cross_connect_id
# The address of the FastConnect location.
# @return [String]
attr_accessor :facility_location
# The meet-me room port for this cross-connect.
# @return [String]
attr_accessor :port_name
# The date and time when the Letter of Authority expires, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339).
#
# @return [DateTime]
attr_accessor :time_expires
# The date and time the Letter of Authority was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339).
#
# Example: `2016-08-25T21:10:29.600Z`
#
# @return [DateTime]
attr_accessor :time_issued
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'authorized_entity_name': :'authorizedEntityName',
'circuit_type': :'circuitType',
'cross_connect_id': :'crossConnectId',
'facility_location': :'facilityLocation',
'port_name': :'portName',
'time_expires': :'timeExpires',
'time_issued': :'timeIssued'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'authorized_entity_name': :'String',
'circuit_type': :'String',
'cross_connect_id': :'String',
'facility_location': :'String',
'port_name': :'String',
'time_expires': :'DateTime',
'time_issued': :'DateTime'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :authorized_entity_name The value to assign to the {#authorized_entity_name} property
# @option attributes [String] :circuit_type The value to assign to the {#circuit_type} property
# @option attributes [String] :cross_connect_id The value to assign to the {#cross_connect_id} property
# @option attributes [String] :facility_location The value to assign to the {#facility_location} property
# @option attributes [String] :port_name The value to assign to the {#port_name} property
# @option attributes [DateTime] :time_expires The value to assign to the {#time_expires} property
# @option attributes [DateTime] :time_issued The value to assign to the {#time_issued} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.authorized_entity_name = attributes[:'authorizedEntityName'] if attributes[:'authorizedEntityName']
raise 'You cannot provide both :authorizedEntityName and :authorized_entity_name' if attributes.key?(:'authorizedEntityName') && attributes.key?(:'authorized_entity_name')
self.authorized_entity_name = attributes[:'authorized_entity_name'] if attributes[:'authorized_entity_name']
self.circuit_type = attributes[:'circuitType'] if attributes[:'circuitType']
raise 'You cannot provide both :circuitType and :circuit_type' if attributes.key?(:'circuitType') && attributes.key?(:'circuit_type')
self.circuit_type = attributes[:'circuit_type'] if attributes[:'circuit_type']
self.cross_connect_id = attributes[:'crossConnectId'] if attributes[:'crossConnectId']
raise 'You cannot provide both :crossConnectId and :cross_connect_id' if attributes.key?(:'crossConnectId') && attributes.key?(:'cross_connect_id')
self.cross_connect_id = attributes[:'cross_connect_id'] if attributes[:'cross_connect_id']
self.facility_location = attributes[:'facilityLocation'] if attributes[:'facilityLocation']
raise 'You cannot provide both :facilityLocation and :facility_location' if attributes.key?(:'facilityLocation') && attributes.key?(:'facility_location')
self.facility_location = attributes[:'facility_location'] if attributes[:'facility_location']
self.port_name = attributes[:'portName'] if attributes[:'portName']
raise 'You cannot provide both :portName and :port_name' if attributes.key?(:'portName') && attributes.key?(:'port_name')
self.port_name = attributes[:'port_name'] if attributes[:'port_name']
self.time_expires = attributes[:'timeExpires'] if attributes[:'timeExpires']
raise 'You cannot provide both :timeExpires and :time_expires' if attributes.key?(:'timeExpires') && attributes.key?(:'time_expires')
self.time_expires = attributes[:'time_expires'] if attributes[:'time_expires']
self.time_issued = attributes[:'timeIssued'] if attributes[:'timeIssued']
raise 'You cannot provide both :timeIssued and :time_issued' if attributes.key?(:'timeIssued') && attributes.key?(:'time_issued')
self.time_issued = attributes[:'time_issued'] if attributes[:'time_issued']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] circuit_type Object to be assigned
def circuit_type=(circuit_type)
# rubocop:disable Style/ConditionalAssignment
if circuit_type && !CIRCUIT_TYPE_ENUM.include?(circuit_type)
OCI.logger.debug("Unknown value for 'circuit_type' [" + circuit_type + "]. Mapping to 'CIRCUIT_TYPE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@circuit_type = CIRCUIT_TYPE_UNKNOWN_ENUM_VALUE
else
@circuit_type = circuit_type
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
authorized_entity_name == other.authorized_entity_name &&
circuit_type == other.circuit_type &&
cross_connect_id == other.cross_connect_id &&
facility_location == other.facility_location &&
port_name == other.port_name &&
time_expires == other.time_expires &&
time_issued == other.time_issued
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[authorized_entity_name, circuit_type, cross_connect_id, facility_location, port_name, time_expires, time_issued].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
|
<reponame>Adaptivity/EvilCraft<gh_stars>0
package evilcraft.api.degradation.effects;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.EnumCreatureType;
import net.minecraft.world.World;
import net.minecraft.world.WorldServer;
import net.minecraft.world.biome.BiomeGenBase.SpawnListEntry;
import net.minecraftforge.event.ForgeEventFactory;
import cpw.mods.fml.common.eventhandler.Event.Result;
import evilcraft.api.Coordinate;
import evilcraft.api.Helpers;
import evilcraft.api.config.DegradationEffectConfig;
import evilcraft.api.config.ExtendedConfig;
import evilcraft.api.degradation.IDegradable;
import evilcraft.api.degradation.StochasticDegradationEffect;
/**
* Degradation that will eventually spawn mobs in the area.
* @author rubensworks
*
*/
public class MobSpawnDegradation extends StochasticDegradationEffect {
private static MobSpawnDegradation _instance = null;
/**
* Initialise the configurable.
* @param eConfig The config.
*/
public static void initInstance(ExtendedConfig<DegradationEffectConfig> eConfig) {
if(_instance == null)
_instance = new MobSpawnDegradation(eConfig);
else
eConfig.showDoubleInitError();
}
/**
* Get the unique instance.
* @return The instance.
*/
public static MobSpawnDegradation getInstance() {
return _instance;
}
private static final double CHANCE = 0.01D;
private MobSpawnDegradation(ExtendedConfig<DegradationEffectConfig> eConfig) {
super(eConfig, CHANCE);
}
@Override
public void runClientSide(IDegradable degradable) {
}
@SuppressWarnings("unchecked")
@Override
public void runServerSide(IDegradable degradable) {
WorldServer world = (WorldServer) degradable.getWorld();
Coordinate spawn =
Helpers.getRandomPointInSphere(degradable.getLocation(), degradable.getRadius());
float x = spawn.x + 0.5F;
float y = spawn.y;
float z = spawn.z + 0.5F;
SpawnListEntry spawnlistentry = world.spawnRandomCreature(EnumCreatureType.monster, spawn.x, spawn.y, spawn.z);
EntityLiving entityliving;
try {
entityliving = (EntityLiving)spawnlistentry.entityClass.getConstructor(new Class[] {World.class}).newInstance(new Object[] {world});
} catch (Exception exception) {
exception.printStackTrace();
return;
}
entityliving.setLocationAndAngles((double)x, (double)y, (double)z, world.rand.nextFloat() * 360.0F, 0.0F);
Result canSpawn = ForgeEventFactory.canEntitySpawn(entityliving, world, x, y, z);
if (canSpawn == Result.ALLOW || (canSpawn == Result.DEFAULT)) { // && entityliving.getCanSpawnHere()
world.spawnEntityInWorld(entityliving);
if (!ForgeEventFactory.doSpecialSpawn(entityliving, world, x, y, z)) {
entityliving.onSpawnWithEgg(null);
}
}
}
}
|
SELECT p.ProductID, p.ProductName, p.Rating
FROM Products p
ORDER BY p.Rating DESC
LIMIT 3;
|
// Copyright 2019 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package debug
import (
"context"
"fmt"
"io"
"strings"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/strslice"
dockerclient "github.com/docker/docker/client"
"github.com/docker/docker/pkg/jsonmessage"
"github.com/spf13/cobra"
)
const (
defaultDockerSocket = "unix:///var/run/docker.sock"
dockerContainerPrefix = "docker://"
CAP_SYS_PTRACE = "SYS_PTRACE"
CAP_SYS_ADMIN = "SYS_ADMIN"
)
type IOStreams struct {
In io.ReadCloser
Out io.WriteCloser
ErrOut io.WriteCloser
}
// Launcher is responsible for launching debug container
type Launcher struct {
IOStreams
targetContainerID string
image string
dockerSocket string
ctx context.Context
privileged bool
client *dockerclient.Client
}
// NewLauncher create a launcher instance
func NewLauncher(streams IOStreams) *Launcher {
return &Launcher{
dockerSocket: defaultDockerSocket,
ctx: context.Background(),
IOStreams: streams,
}
}
// NewLauncherCmd create the launcher command
func NewLauncherCmd(streams IOStreams) *cobra.Command {
launcher := NewLauncher(streams)
cmd := &cobra.Command{
Use: "debug-launcher --target-container=CONTAINER_ID --image=IMAGE -- COMMAND",
RunE: func(c *cobra.Command, args []string) error {
return launcher.Run(args)
},
}
cmd.Flags().StringVar(&launcher.targetContainerID, "target-container", launcher.targetContainerID,
"target container id")
cmd.Flags().StringVar(&launcher.image, "image", launcher.image,
"debug container image")
cmd.Flags().StringVar(&launcher.dockerSocket, "docker-socket", launcher.dockerSocket,
"docker socket to bind")
cmd.Flags().BoolVar(&launcher.privileged, "privileged", launcher.privileged,
"whether launch container in privileged mode (full container capabilities)")
return cmd
}
// Run launches the debug container and attach it.
// We could alternatively just run docker exec in command line, but this brings shell and docker client to the
// image, which is unwanted.
func (l *Launcher) Run(args []string) error {
client, err := dockerclient.NewClient(l.dockerSocket, "", nil, nil)
if err != nil {
return err
}
l.client = client
err = l.pullImage()
if err != nil {
return err
}
resp, err := l.createContainer(args)
if err != nil {
return err
}
containerID := resp.ID
fmt.Fprintf(l.Out, "starting debug container...\n")
err = l.startContainer(containerID)
if err != nil {
return err
}
defer l.cleanContainer(containerID)
err = l.attachToContainer(containerID)
if err != nil {
return err
}
return nil
}
func (l *Launcher) pullImage() error {
out, err := l.client.ImagePull(l.ctx, l.image, types.ImagePullOptions{})
if err != nil {
return err
}
defer out.Close()
// write pull progress to user
jsonmessage.DisplayJSONMessagesStream(out, l.Out, 1, true, nil)
return nil
}
func (l *Launcher) createContainer(command []string) (*container.ContainerCreateCreatedBody, error) {
if !strings.HasPrefix(l.targetContainerID, dockerContainerPrefix) {
return nil, fmt.Errorf("Only docker containers are supported now")
}
dockerContainerID := l.targetContainerID[len(dockerContainerPrefix):]
config := &container.Config{
Entrypoint: strslice.StrSlice(command),
Image: l.image,
Tty: true,
OpenStdin: true,
StdinOnce: true,
}
hostConfig := &container.HostConfig{
NetworkMode: container.NetworkMode(containerMode(dockerContainerID)),
UsernsMode: container.UsernsMode(containerMode(dockerContainerID)),
IpcMode: container.IpcMode(containerMode(dockerContainerID)),
PidMode: container.PidMode(containerMode(dockerContainerID)),
CapAdd: strslice.StrSlice([]string{CAP_SYS_PTRACE, CAP_SYS_ADMIN}),
Privileged: l.privileged,
}
body, err := l.client.ContainerCreate(l.ctx, config, hostConfig, nil, "")
if err != nil {
return nil, err
}
return &body, nil
}
func (l *Launcher) startContainer(id string) error {
err := l.client.ContainerStart(l.ctx, id, types.ContainerStartOptions{})
if err != nil {
return err
}
return nil
}
func (l *Launcher) attachToContainer(id string) error {
opts := types.ContainerAttachOptions{
Stream: true,
Stdin: true,
Stdout: true,
Stderr: true,
}
resp, err := l.client.ContainerAttach(l.ctx, id, opts)
if err != nil {
return err
}
streamer := newHijackedIOStreamer(l.IOStreams, resp)
defer resp.Close()
return streamer.stream(l.ctx)
}
func (l *Launcher) cleanContainer(id string) error {
// once attach complete, the debug container is considered to be exited, so its safe to rm --force
err := l.client.ContainerRemove(l.ctx, id,
types.ContainerRemoveOptions{
Force: true,
})
if err != nil {
return err
}
return nil
}
func containerMode(id string) string {
return fmt.Sprintf("container:%s", id)
}
|
#!/usr/bin/env bash
source src/dev/ci_setup/setup_env.sh
yarn run grunt run:i18nCheck
|
<gh_stars>10-100
package io.opensphere.analysis.export.controller;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.awt.Color;
import org.junit.Test;
import io.opensphere.analysis.export.model.ColorFormat;
import io.opensphere.analysis.export.model.ExportOptionsModel;
/**
* Unit test for {@link ColorFormatter}.
*/
public class ColorFormatterTest
{
/**
* Tests formatting a color to hexadecimal.
*/
@Test
public void testFormat()
{
ExportOptionsModel model = new ExportOptionsModel();
ColorFormatter formatter = new ColorFormatter(model);
String format = formatter.format(Color.RED);
assertEquals(Integer.toHexString(Color.RED.getRGB()), format);
}
/**
* Tests formatting a null color.
*/
@Test
public void testFormatNull()
{
ExportOptionsModel model = new ExportOptionsModel();
ColorFormatter formatter = new ColorFormatter(model);
String format = formatter.format(null);
assertNull(format);
}
/**
* Tests formatting a color to rgb coded.
*/
@Test
public void testFormatRgb()
{
ExportOptionsModel model = new ExportOptionsModel();
model.setSelectedColorFormat(ColorFormat.RGB_CODED);
ColorFormatter formatter = new ColorFormatter(model);
String format = formatter.format(Color.RED);
assertEquals("color[r=255,g=0,b=0,a=255]", format);
}
}
|
#!/bin/bash
# remediation = no
groupadd cac_test$(date +%s)
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models import manager
from rest_framework import serializers
from .models import Article, Category, Tag, Avatar
from user_info.serializers import UserInfoSerializer
class CategorySerializer(serializers.ModelSerializer):
'''
文章分类序列化类
'''
class Meta:
model = Category
fields = [
"url",
"name",
"status"
]
read_only_fields = ["id", "create_time", "update_time"]
extra_kwargs = {
'url': {'view_name': 'category-detail'}
}
class TagSerializer(serializers.ModelSerializer):
'''
文章标签序列化类
'''
class Meta:
model = Tag
fields = [
"url",
"text",
"status"
]
read_only_fields = ["id", "create_time"]
extra_kwargs = {
'url': {'view_name': 'tag-detail'}
}
# 创建标签前先行验证,如果已存在,则不再重复创建(保证唯一性)
class AvatarSerializer(serializers.ModelSerializer):
'''
图像序列化类
'''
url = serializers.HyperlinkedIdentityField(view_name="avatar-detail")
class Meta:
model = Avatar
# fields = "__all__"
fields = [
"url",
"content",
"status"
]
read_only_fields = ["id", "create_time"]
# extra_kwargs = {
# 'url': {'view_name': 'avatar-detail'}
# }
class ArticleBasicSerializer(serializers.ModelSerializer):
'''
文章列表序列化类
返回本身url的超链接
'''
author = serializers.CharField(required=True, label="作者", help_text="作者")
# author_id = serializers.IntegerField(required=False, allow_null=True, write_only=True, label="作者用户外键ID", help_text="作者用户外键ID")
category = serializers.CharField(required=False, max_length=50, label="文章分类", help_text="文章分类")
# category_id = serializers.IntegerField(required=False, allow_null=True, write_only=True, label="文章分类外键ID", help_text="文章分类外键ID")
tags = serializers.SlugRelatedField(required=False, queryset=Tag.objects.all(), many=True, slug_field="text",label="文章标签", help_text="文章标签")
avatar = AvatarSerializer(required=False, read_only=True)
avatar_id = serializers.IntegerField(required=False, write_only=True, allow_null=True)
# 覆写反序列化过程,在进行字段验证之前,先行对request_body中的 tag字段做检验,如果不存在则先行创建,否则在序列化字段验证时会抛错
def to_internal_value(self, data):
# 从 request_body 中获取 tags的值
tag_list = data.get('tags')
if isinstance(tag_list, list): # 需求 tags 是一个列表,故只有当其是一个列表时才做覆写处理
for text in tag_list:
if not Tag.objects.filter(text__exact=text).exists(): # 如果不存在该tag对象,则主动创建
Tag.objects.create(text=text)
return super().to_internal_value(data)
def validate_category(self, value):
"""
字段验证 - category
验证: category 表中是否存在 name,如果不存在则创建
"""
# 如果传入的值不为空,并且category 表中不存在该主键
# if not Category.objects.filter(pk=value).exists and value is not None:
# raise serializers.ValidationError(f"id {value} doesn't exist in Category.")
# return value
try:
# 如果 category表中存在对应 name,则返回其对象
return Category.objects.get(name__exact=value)
except Category.DoesNotExist:
# 如果 category表中不存在对应的 name,则自动创建该对象
return self.create_category(category_name=value)
def validate_avatar(self, value):
"""
字段验证 - avatar
验证: avatar 表中是否存在该 content,如果不存在则创建
"""
try:
return Avatar.objects.get(content__exact=value)
except Avatar.DoesNotExist:
return self.create_avatar(avatar_content=value)
def create_category(self, category_name, *args, **kwargs):
"""
根据传入的 name 自动创建文章分类对象
"""
return Category.objects.create(name=category_name)
def create_avatar(self, avatar_content, *args, **kwargs):
"""
根据传入的 content 自动创建媒体对象
"""
return Avatar.objects.create(content=avatar_content)
def validate(self, data):
"""
对象验证 - author
验证: author 对应的用户是否存在于 User表中,如不存在则抛出异常
"""
author_obj = data["author"]
try:
data["author"] = User.objects.get(username__exact=data["author"]) # 验证用户是否存在
data["author_id"] = User.objects.get(username__exact=data["author"]).id # 如果用户存在,则设置 外键关联ID
except User.DoesNotExist:
raise serializers.ValidationError("user {} doesn't exist in table User.".format(data["author"]))
return data
class ArticleSerializer(ArticleBasicSerializer):
'''
文章列表序列化类 - 继承父类
'''
class Meta:
model = Article
fields = [
"url",
"category",
"tags",
"title",
"content",
"status",
]
extra_kwargs = {
'url': {'view_name': 'article-detail'},
'content': {'write_only': True}
}
def to_representation(self, instance):
"""
覆写序列化过程,article表中并没有 author字段,而是外键字段 author_id,故如果要在序列化结果中显示,则需要做覆写处理
如果存在父类继承的情况,由于系列化是在直接反馈的最后一步,super覆写需要在子类做,而不能在父类进行
"""
print(instance.author)
author = instance.author
ret = super(ArticleSerializer, self).to_representation(instance)
ret["author"] = {
"id": author.id,
"username": author.username,
"date_joined": author.date_joined,
"last_login": author.last_login
}
return ret
class ArticleDetailUserSerializer(ArticleBasicSerializer):
'''
文章详情序列化类
'''
class Meta:
model = Article
fields = "__all__"
def to_representation(self, instance):
"""
覆写序列化过程,article表中并没有 author字段,而是外键字段 author_id,故如果要在序列化结果中显示,则需要做覆写处理
如果存在父类继承的情况,由于系列化是在直接反馈的最后一步,super覆写需要在子类做,而不能在父类进行
"""
author = instance.author
ret = super(ArticleDetailUserSerializer, self).to_representation(instance)
ret["author"] = {
"id": author.id,
"username": author.username,
"date_joined": author.date_joined,
"last_login": author.last_login
}
return ret
class ArticleDetailAdminSerializer(ArticleBasicSerializer):
'''
文章详情序列化类
'''
content_html = serializers.SerializerMethodField()
toc_html = serializers.SerializerMethodField()
# SerializerMethodField 方法,会自动调用 get_xx 方法
def get_content_html(self, obj):
# 获取渲染后的 md格式文本内容
return obj.transform_md()[0]
def get_toc_html(self, obj):
# 获取渲染后的 md格式目录
return obj.transform_md()[1]
class Meta:
model = Article
fields = "__all__"
def to_representation(self, instance):
"""
覆写序列化过程,article表中并没有 author字段,而是外键字段 author_id,故如果要在序列化结果中显示,则需要做覆写处理
如果存在父类继承的情况,由于系列化是在直接反馈的最后一步,super覆写需要在子类做,而不能在父类进行
"""
author = instance.author
ret = super(ArticleDetailAdminSerializer, self).to_representation(instance)
ret["author"] = {
"id": author.id,
"username": author.username,
"date_joined": author.date_joined,
"last_login": author.last_login
}
return ret
class ArticleCategoryDetailSerializer(serializers.ModelSerializer):
'''
文章分类详情嵌套
'''
class Meta:
model = Article
fields = [
"url",
"title",
]
extra_kwargs = {
'url': {'view_name': 'article-detail'}
}
class CategoryDetailSerializer(serializers.ModelSerializer):
'''
文章分类详情序列化类
'''
articles = ArticleCategoryDetailSerializer(many=True, read_only=True)
class Meta:
model = Category
fields = [
"id",
"name",
"create_time",
"articles"
]
|
<filename>chapter_002/src/main/java/ru/job4j/pseudo/Paint.java<gh_stars>0
package ru.job4j.pseudo;
/**
* Paint - main class with entry point in programm.
* Paint provides drawing triangle and square.
* @author <NAME> (<EMAIL>)
* @version $Id$
* @since 0.1
*/
public class Paint {
/**
* The method draws on a console geometrical figure received as parameter.
* @param shape - reference on a figure object.
*/
public void draw(Shape shape) {
System.out.println(shape.draw());
}
}
|
import requests
def get_stock_price(symbol):
url = "https://finance.yahoo.com/quote/" + symbol
req = requests.get(url)
data = req.json()
return data['regularMarketPrice']
|
<gh_stars>1-10
function Button(text, origin , width, height, onClick)
{
this.container = new PIXI.Container();
var padding = {x: 20, y: 20};
// var origin = {x: 150, y: 200};
var textureButton = PIXI.Texture.fromImage('static/scroller/png/UI/PNG/blue_button07.png');
var textureButtonDown = PIXI.Texture.fromImage('static/scroller/png/UI/PNG/blue_button08.png');
var textureButtonOver = PIXI.Texture.fromImage('static/scroller/png/UI/PNG/blue_button07.png');
var tx = new PIXI.Text(text, {font: '30px sans-serif', fill: 0xffffff, align: 'center', wordWrap: true, wordWrapWidth: 200});
tx.position = {x: origin.x + padding.x, y: origin.y + padding.y};
tx.anchor.set(0, 0);
var noop = function () {
console.log('click');
};
var button = new PIXI.Sprite(textureButton);
button.buttonMode = true;
button.anchor.set(0);
button.position = {x: origin.x, y: origin.y};
button.width = width;
button.height = height;
// make the button interactive...
button.interactive = true;
button
// set the mousedown and touchstart callback...
.on('mousedown', onButtonDown)
.on('touchstart', onButtonDown)
// set the mouseup and touchend callback...
.on('mouseup', onButtonUp)
.on('touchend', onButtonUp)
.on('mouseupoutside', onButtonUp)
.on('touchendoutside', onButtonUp)
// set the mouseover callback...
.on('mouseover', onButtonOver)
// set the mouseout callback...
.on('mouseout', onButtonOut)
// you can also listen to click and tap events :
//.on('click', noop)
button.tap = onClick;
button.click = onClick;
// add it to the stage
//stage.addChild(button);
// add button to array
//buttons.push(button);
function onButtonDown()
{
this.isdown = true;
this.texture = textureButtonDown;
this.alpha = 1;
}
function onButtonUp()
{
this.isdown = false;
if (this.isOver)
{
this.texture = textureButtonOver;
}
else
{
this.texture = textureButton;
}
}
function onButtonOver()
{
this.isOver = true;
if (this.isdown)
{
return;
}
this.texture = textureButtonOver;
}
function onButtonOut()
{
this.isOver = false;
if (this.isdown)
{
return;
}
this.texture = textureButton;
}
this.container.addChild(button);
this.container.addChild(tx);
return this.container;
}
|
<filename>dependencies/otp/r15b03-1/lib/ic/java_src/com/ericsson/otp/ic/Ref.java
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1999-2009. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
*
* %CopyrightEnd%
*
*/
package com.ericsson.otp.ic;
/**
Ref class mapps the built-in erlang type Ref, a message reference.
**/
final public class Ref extends com.ericsson.otp.erlang.OtpErlangRef {
public Ref(com.ericsson.otp.erlang.OtpSelf self) {
super(self);
}
public Ref(com.ericsson.otp.erlang.OtpInputStream buf)
throws com.ericsson.otp.erlang.OtpErlangDecodeException {
super(buf);
}
/**
Old style Ref costructor. Costructs an Ref that coresponds to the
old erlang Ref type.
**/
public Ref(String node, int id, int creation) {
super(node,id,creation);
}
public Ref(String node, int[] ids, int creation) {
super(node,ids,creation);
}
/**
Comparisson method for Ref.
@return true if the input Ref value equals the value of the current object, false otherwize
**/
public boolean equal(Ref _ref) {
return super.equals(_ref);
}
}
|
<reponame>VicidominiLab/spad-ffs
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 22 10:33:10 2019
@author: SPAD-FCS
"""
class Photon:
def __init__(self, time, detectorElement):
self.time = time
self.detectorElement = detectorElement
|
import React from 'react';
import PropTypes from 'prop-types';
import Undersporsmal from './Undersporsmal';
import { sporsmal as sporsmalPt, soknadPt } from '../../../propTypes/index';
const Undersporsmalsliste = ({ undersporsmal, soknad, parentValue }) => {
const sporsmalsliste = undersporsmal
.filter((underspm) => {
return underspm.svar !== null;
})
.map((underspm) => {
return (parentValue
&& underspm.visningskriterie
&& parentValue === underspm.visningskriterie) || !underspm.visningskriterie
? <Undersporsmal
sporsmal={underspm}
key={underspm.tag}
soknad={soknad} />
: null;
})
.filter((underspm) => {
return underspm !== null;
});
return sporsmalsliste.length > 0
? <React.Fragment>{sporsmalsliste}</React.Fragment>
: null;
};
Undersporsmalsliste.propTypes = {
undersporsmal: PropTypes.arrayOf(sporsmalPt),
soknad: soknadPt.isRequired,
parentValue: PropTypes.string,
};
export default Undersporsmalsliste;
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyMarkdown(PythonPackage):
"""This is a Python implementation of John Gruber's Markdown. It is
almost completely compliant with the reference implementation, though
there are a few very minor differences. See John's Syntax
Documentation for the syntax rules.
"""
homepage = "https://pythonhosted.org/Markdown/"
url = "https://github.com/waylan/Python-Markdown/archive/2.6.7-final.tar.gz"
version('2.6.7', 'fd27044042e197ad99249b3d60215d97')
version('2.6.6', '2b47a0ff7eb19ef34453fe198a0cccc4')
version('2.6.5', 'e4b6b65b2d6bcac07176fb209bc55614')
version('2.6.4', '5fb3cd9945eb534e71af597f8ee3622b')
version('2.6.3', 'ec7a50ce9fd4a5fd0b24555d47e9d7d1')
version('2.6.2', '6ce86913e9bf5bb34d9ee394ac71f044')
version('2.6.1', '0ae69693c5adb27caf0160941d7dcbdf')
version('2.6', '9acdde43d99847d0c4ef03ea56b1d2c5')
version('2.5.2', 'ed2a662d22799186c1ef85d173d38b8a')
version('2.5.1', 'be<PASSWORD>')
version('2.5', '8393ceab9c6e33357fb8a7be063a4849')
depends_on('python@2.7:2.8,3.2:3.4')
|
<reponame>cvxgrp/coneos
/* coneos 1.0 */
#include "coneOS.h"
#include "normalize.h"
static int _lineLen_;
// constants and data structures
static const char* HEADER[] = {
" Iter ",
" pri res ",
" dua res ",
" rel gap ",
" pri obj ",
" dua obj ",
" kappa ",
" time (s)",
};
static const int HEADER_LEN = 8;
static inline void updateDualVars(Data * d, Work * w);
static inline void projectCones(Data * d,Work * w,Cone * k, int iter);
static inline void sety(Data * d, Work * w, Sol * sol);
static inline void setx(Data * d, Work * w, Sol * sol);
static inline void sets(Data * d, Work * w, Sol * sol);
static inline void setSolution(Data * d, Work * w, Sol * sol, Info * info);
static inline void getInfo(Data * d, Work * w, Sol * sol, Info * info, struct residuals * r);
static inline void printSummary(Data * d,Work * w,int i, struct residuals *r);
static inline void printHeader(Data * d, Work * w, Cone * k);
static inline void printFooter(Data * d, Info * info, Work * w);
static inline void freeWork(Work * w);
static inline void projectLinSys(Data * d,Work * w, int iter);
static inline Work * initWork(Data * d, Cone * k);
static inline int converged(Data * d, Work * w, struct residuals * r, int iter);
static inline int exactConverged(Data * d, Work * w, struct residuals * r, int iter);
#define PRINT_INTERVAL 100
#define CONVERGED_INTERVAL 20
/* coneOS returns one of the following integers: */
/* (zero should never be returned) */
#define FAILURE -4
#define INDETERMINATE -3
#define INFEASIBLE -2 // primal infeasible, dual unbounded
#define UNBOUNDED -1 // primal unbounded, dual infeasible
#define SOLVED 1
int coneOS(Data * d, Cone * k, Sol * sol, Info * info)
{
if(d == NULL || k == NULL) {
return FAILURE;
}
tic();
info->stint = 0; // not yet converged
int i;
struct residuals r = {-1, -1, -1, -1, -1, -1, -1};
Work * w = initWork(d,k);
if(d->VERBOSE) {
printHeader(d, w, k);
} /* coneOS: */
for (i=0; i < d->MAX_ITERS; ++i){
memcpy(w->u_prev, w->u, w->l*sizeof(double));
projectLinSys(d,w,i);
projectCones(d,w,k,i);
updateDualVars(d,w);
info->stint = converged(d,w,&r,i);
if (info->stint != 0) break;
if (i % PRINT_INTERVAL == 0){
if (d->VERBOSE) printSummary(d,w,i,&r);
}
}
if(d->VERBOSE) printSummary(d,w,i,&r);
setSolution(d,w,sol,info);
if(d->NORMALIZE) unNormalize(d,w,sol);
info->iter = i;
getInfo(d,w,sol,info,&r);
if(d->VERBOSE) printFooter(d, info, w);
freeWork(w);
return info->stint;
}
static inline int converged(Data * d, Work * w, struct residuals * r, int iter){
/* approximate convergence check:
double tau = fabs(w->u[w->l-1]); // abs to prevent negative stopping tol
double kap = fabs(w->v[w->l-1]);
r->resPri = calcNormDiff(w->u, w->u_t, w->l);
r->resDual = calcNormDiff(w->u, w->u_prev, w->l);
r->tau = tau;
r->kap = kap;
if (fmin(tau,kap)/fmax(tau,kap) < 1e-6 && fmax(r->resPri, r->resDual) < d->EPS_ABS*(tau+kap)){
return 1;
}
*/
if (iter % CONVERGED_INTERVAL == 0) {
return exactConverged(d,w,r,iter);
}
return 0;
}
static inline int exactConverged(Data * d, Work * w, struct residuals * r, int iter){
double * pr = coneOS_calloc(d->m,sizeof(double));
double * dr = coneOS_calloc(d->n,sizeof(double));
double * Axs = coneOS_calloc(d->m,sizeof(double));
double * ATy = coneOS_calloc(d->n,sizeof(double));
double tau = fabs(w->u[w->l-1]);
double kap = fabs(w->v[w->l-1]);
double * x = w->u, * y = &(w->u[d->n]);
double * D = w->D, * E = w->E;
int i;
/*
// requires mult by A:
double * s = &(w->v[d->n]);
accumByA(d,x,Axs); // Axs = Ax
addScaledArray(Axs,s,d->m,1.0); // Axs = Ax + s
memcpy(pr, Axs, d->m * sizeof(double)); // pr = Ax + s
addScaledArray(pr,d->b,d->m,-tau); // pr = Ax + s - b * tau
*/
// does not require mult by A:
memcpy(pr,&(w->u[d->n]),d->m * sizeof(double));
addScaledArray(pr,&(w->u_prev[d->n]),d->m,d->ALPH-2);
addScaledArray(pr,&(w->u_t[d->n]),d->m,1-d->ALPH);
addScaledArray(pr,d->b, d->m, w->u_t[w->l-1] - tau) ; // pr = Ax + s - b * tau
memcpy(Axs, pr, d->m * sizeof(double));
addScaledArray(Axs, d->b, d->m, tau); // Axs = Ax + s
double cTx = innerProd(x,d->c,d->n);
if (d->NORMALIZE) {
kap /= (w->scale * w->sc_c * w->sc_b);
for (i = 0; i < d->m; ++i) {
pr[i] *= D[i]/(w->sc_b * w->scale);
Axs[i] *= D[i]/(w->sc_b * w->scale);
}
cTx /= (w->scale * w->sc_c * w->sc_b);
}
r->tau = tau;
r->kap = kap;
double nmAxs = calcNorm(Axs,d->m);
r->resPri = cTx < 0 ? w->nm_c * nmAxs / -cTx : NAN;
//coneOS_printf("unbounded cert: %4e\n", w->nm_c * nmAxs / (1+w->nm_b) / -cTx);
if (r->resPri < d->EPS_ABS) {
return UNBOUNDED;
}
accumByAtrans(d,y,ATy); // ATy = A'y
memcpy(dr, ATy, d->n * sizeof(double));
addScaledArray(dr,d->c,d->n,tau); // dr = A'y + c * tau
double bTy = innerProd(y,d->b,d->m);
if (d->NORMALIZE) {
for (i = 0; i < d->n; ++i) {
dr[i] *= E[i]/(w->sc_c * w->scale);
ATy[i] *= E[i]/(w->sc_c * w->scale);
}
bTy /= (w->scale * w->sc_c * w->sc_b);
}
double nmATy = calcNorm(ATy,d->n);
r->resDual = bTy < 0 ? w->nm_b * nmATy / -bTy : NAN;
//coneOS_printf("infeas cert: %4e\n", w->nm_b * nmATy / (1+w->nm_c) / - bTy );
if (r->resDual < d->EPS_ABS) {
return INFEASIBLE;
}
r->relGap = NAN;
int status = 0;
if (tau > kap) {
double rpri = calcNorm(pr,d->m) / (1+w->nm_b) / tau;
double rdua = calcNorm(dr,d->n) / (1+w->nm_c) / tau;
double gap = fabs(cTx + bTy) / (tau + fabs(cTx) + fabs(bTy));
r->resPri = rpri;
r->resDual = rdua;
r->relGap = gap;
r->cTx = cTx / tau;
r->bTy = bTy / tau;
// coneOS_printf("primal resid: %4e, dual resid %4e, pobj %4e, dobj %4e, gap %4e\n", rpri,rdua,cTx,-bTy,gap);
// coneOS_printf("primal resid: %4e, dual resid %4e, gap %4e\n",rpri,rdua,gap);
if (fmax(fmax(rpri,rdua),gap) < d->EPS_ABS) {
status = SOLVED;
}
} else {
r->cTx = NAN;
r->bTy = NAN;
}
coneOS_free(dr); coneOS_free(pr); coneOS_free(Axs); coneOS_free(ATy);
return status;
}
static inline void getInfo(Data * d, Work * w, Sol * sol, Info * info, struct residuals * r){
double * x = sol->x, * y = sol->y, * s = sol->s;
double * dr = coneOS_calloc(d->n,sizeof(double));
double * pr = coneOS_calloc(d->m,sizeof(double));
accumByA(d,x,pr); // pr = Ax
addScaledArray(pr,s,d->m,1.0); // pr = Ax + s
accumByAtrans(d,y,dr); // dr = A'y
double cTx = innerProd(x,d->c,d->n);
double bTy = innerProd(y,d->b,d->m);
info->pobj = cTx;
info->dobj = -bTy;
if (info->stint == SOLVED){
addScaledArray(pr,d->b,d->m,-1.0); // pr = Ax + s - b
addScaledArray(dr,d->c,d->n,1.0); // dr = A'y + c
info->relGap = fabs(cTx + bTy) / (1 + fabs(cTx) + fabs(bTy));
info->resPri = calcNorm(pr,d->m) / (1 + w->nm_b);
info->resDual = calcNorm(dr,d->n) / (1+ w->nm_c);
} else {
if (info->stint == UNBOUNDED) {
info->dobj = NAN;
info->relGap = NAN;
info->resPri = w->nm_c * calcNorm(pr,d->m) / -cTx ;
info->resDual = NAN;
scaleArray(x,-1/cTx,d->n);
scaleArray(s,-1/cTx,d->m);
info->pobj = -1;
}
else {
info->pobj = NAN;
info->relGap = NAN;
info->resPri = NAN;
info->resDual = w->nm_b * calcNorm(dr,d->n) / -bTy ;
scaleArray(y,-1/bTy,d->m);
info->dobj = -1;
}
}
info->time = tocq();
coneOS_free(dr); coneOS_free(pr);
}
static inline Work * initWork(Data *d, Cone * k) {
Work * w = coneOS_malloc(sizeof(Work));
w->nm_b = calcNorm(d->b, d->m);
w->nm_c = calcNorm(d->c, d->n);
//w->nm_b = calcNormInf(d->b, d->m);
//w->nm_c = calcNormInf(d->c, d->n);
//w->nm_Q = calcNormFroQ(d);
if(d->NORMALIZE) {
normalize(d,w,k);
}
else {
w->D = NULL;
w->E = NULL;
w->sc_c = 1.0;
w->sc_b = 1.0;
w->scale = 1.0;
}
w->l = d->n+d->m+1;
w->u = coneOS_calloc(w->l,sizeof(double));
w->u[w->l-1] = sqrt(w->l);
w->v = coneOS_calloc(w->l,sizeof(double));
w->v[w->l-1] = sqrt(w->l);
w->u_t = coneOS_calloc(w->l,sizeof(double));
w->u_prev = coneOS_calloc(w->l,sizeof(double));
w->h = coneOS_calloc((w->l-1),sizeof(double));
memcpy(w->h,d->c,d->n*sizeof(double));
memcpy(&(w->h[d->n]),d->b,d->m*sizeof(double));
w->g = coneOS_calloc((w->l-1),sizeof(double));
memcpy(w->g,w->h,(w->l-1)*sizeof(double));
/* initialize the private data: */
int status = privateInitWork(d, w);
if (status < 0){
coneOS_printf("privateInitWork failure: %i\n",status);
exit(-1);
}
//else coneOS_printf("privateInitWork success: %i\n",status);
if (k->s){
/* eigenvector decomp workspace */
int i, nMax = 0;
for (i=0; i < k->ssize; ++i){
if (k->s[i] > nMax) nMax = k->s[i];
}
w->Xs = coneOS_calloc(nMax*nMax,sizeof(double));
w->Z = coneOS_calloc(nMax*nMax,sizeof(double));
w->e = coneOS_calloc(nMax,sizeof(double));
} else {
w->Xs = NULL;
w->Z = NULL;
w->e = NULL;
}
solveLinSys(d,w,w->g, NULL, -1);
scaleArray(&(w->g[d->n]),-1,d->m);
w->gTh = innerProd(w->h, w->g, w->l-1);
return w;
}
static inline void projectLinSys(Data * d,Work * w, int iter){
// ut = u + v
memcpy(w->u_t,w->u,w->l*sizeof(double));
addScaledArray(w->u_t,w->v,w->l,1.0);
scaleArray(w->u_t,d->RHO_X,d->n);
addScaledArray(w->u_t,w->h,w->l-1,-w->u_t[w->l-1]);
addScaledArray(w->u_t, w->h, w->l-1, -innerProd(w->u_t,w->g,w->l-1)/(w->gTh+1));
scaleArray(&(w->u_t[d->n]),-1,d->m);
solveLinSys(d, w, w->u_t, w->u, iter);
w->u_t[w->l-1] += innerProd(w->u_t,w->h,w->l-1);
}
static inline void freeWork(Work * w){
freePriv(w);
if(w){
if(w->method) free(w->method); //called via malloc not mxMalloc
if(w->Xs) coneOS_free(w->Xs);
if(w->Z) coneOS_free(w->Z);
if(w->e) coneOS_free(w->e);
if(w->u) coneOS_free(w->u);
if(w->v) coneOS_free(w->v);
if(w->u_t) coneOS_free(w->u_t);
if(w->u_prev) coneOS_free(w->u_prev);
if(w->h) coneOS_free(w->h);
if(w->g) coneOS_free(w->g);
if(w->D) coneOS_free(w->D);
if(w->E) coneOS_free(w->E);
coneOS_free(w);
}
}
void printSol(Data * d, Sol * sol, Info * info){
int i;
coneOS_printf("%s\n",info->status);
if (sol->x != NULL){
for ( i=0;i<d->n; ++i){
coneOS_printf("x[%i] = %4f\n",i, sol->x[i]);
}
}
if (sol->y != NULL){
for ( i=0;i<d->m; ++i){
coneOS_printf("y[%i] = %4f\n",i, sol->y[i]);
}
}
}
static inline void updateDualVars(Data * d, Work * w){
int i;
/*
for(i = 0; i < d->n; ++i) {
w->v[i] += w->u[i] - w->u_t[i];
}
*/
//for(i = 0; i < w->l; ++i) {
if (fabs(d->ALPH - 1.0) < 1e-9) {
// this is over-step parameter:
//double sig = (1+sqrt(5))/2;
double sig = 1.0;
for(i = d->n; i < w->l; ++i) {
w->v[i] += sig*(w->u[i] - w->u_t[i]);
}
}
else {
// this does not relax 'x' variable
for(i = d->n; i < w->l; ++i) {
w->v[i] += (w->u[i] - d->ALPH*w->u_t[i] - (1.0 - d->ALPH)*w->u_prev[i]);
}
}
}
static inline void projectCones(Data *d,Work * w,Cone * k, int iter){
int i;
// this does not relax 'x' variable
for(i = 0; i < d->n; ++i) {
w->u[i] = w->u_t[i] - w->v[i];
}
//for(i = 0; i < w->l; ++i){
for(i = d->n; i < w->l; ++i){
w->u[i] = d->ALPH*w->u_t[i] + (1-d->ALPH)*w->u_prev[i] - w->v[i];
}
/* u = [x;y;tau] */
projCone(&(w->u[d->n]),k,w,iter);
if (w->u[w->l-1]<0.0) w->u[w->l-1] = 0.0;
}
static inline int solved(Data * d, Sol * sol, Info * info, double tau){
strcpy(info->status,"Solved");
scaleArray(sol->x,1.0/tau,d->n);
scaleArray(sol->y,1.0/tau,d->m);
scaleArray(sol->s,1.0/tau,d->m);
return SOLVED;
}
static inline int indeterminate(Data * d, Sol * sol, Info * info){
strcpy(info->status, "Indeterminate");
scaleArray(sol->x,NAN,d->n);
scaleArray(sol->y,NAN,d->m);
scaleArray(sol->s,NAN,d->m);
return INDETERMINATE;
}
static inline int infeasible(Data * d, Sol * sol, Info * info){
strcpy(info->status,"Infeasible");
//scaleArray(sol->y,-1/ip_y,d->m);
scaleArray(sol->x,NAN,d->n);
scaleArray(sol->s,NAN,d->m);
return INFEASIBLE;
}
static inline int unbounded(Data * d, Sol * sol, Info * info){
strcpy(info->status,"Unbounded");
//scaleArray(sol->x,-1/ip_x,d->n);
scaleArray(sol->y,NAN,d->m);
return UNBOUNDED;
}
static inline void setSolution(Data * d,Work * w,Sol * sol, Info * info){
setx(d,w,sol);
sety(d,w,sol);
sets(d,w,sol);
if (info->stint == 0 || info->stint == SOLVED){
double tau = w->u[w->l-1];
double kap = fabs(w->v[w->l-1]);
if (tau > d->UNDET_TOL && tau > kap){
info->stint = solved(d,sol,info,tau);
}
else{
if (calcNorm(w->u,w->l)<d->UNDET_TOL*sqrt(w->l)){
info->stint = indeterminate(d,sol,info);
}
else {
double bTy = innerProd(d->b,sol->y,d->m);
double cTx = innerProd(d->c,sol->x,d->n);
if (bTy < cTx){
info->stint = infeasible(d,sol,info);
}
else{
info->stint = unbounded(d,sol,info);
}
}
}
} else if (info->stint == INFEASIBLE) {
info->stint = infeasible(d,sol,info);
} else {
info->stint = unbounded(d,sol,info);
}
}
static inline void sety(Data * d,Work * w, Sol * sol){
sol->y = coneOS_malloc(sizeof(double)*d->m);
memcpy(sol->y, &(w->u[d->n]), d->m*sizeof(double));
}
static inline void sets(Data * d,Work * w, Sol * sol){
sol->s = coneOS_malloc(sizeof(double)*d->m);
memcpy(sol->s, &(w->v[d->n]), d->m*sizeof(double));
}
static inline void setx(Data * d,Work * w, Sol * sol){
sol->x = coneOS_malloc(sizeof(double)*d->n);
memcpy(sol->x, w->u, d->n*sizeof(double));
}
static inline void printSummary(Data * d,Work * w,int i, struct residuals *r){
coneOS_printf("%*i|", (int)strlen(HEADER[0]), i);
coneOS_printf(" %*.2e ", (int)strlen(HEADER[1])-1, r->resPri);
coneOS_printf(" %*.2e ", (int)strlen(HEADER[2])-1, r->resDual);
coneOS_printf(" %*.2e ", (int)strlen(HEADER[3])-1, r->relGap);
if (r->cTx < 0) {
coneOS_printf("%*.2e ", (int)strlen(HEADER[4])-1, r->cTx);
} else {
coneOS_printf(" %*.2e ", (int)strlen(HEADER[4])-1, r->cTx);
}
if (r->bTy >= 0) {
coneOS_printf("%*.2e ", (int)strlen(HEADER[5])-1, -r->bTy);
} else {
coneOS_printf(" %*.2e ", (int)strlen(HEADER[5])-1, -r->bTy);
}
coneOS_printf(" %*.2e ", (int)strlen(HEADER[6])-1, r->kap);
coneOS_printf(" %*.2e ", (int)strlen(HEADER[7])-1, tocq()/1e3);
coneOS_printf("\n");
#ifdef MATLAB_MEX_FILE
mexEvalString("drawnow;");
#endif
}
static inline void printHeader(Data * d, Work * w, Cone * k) {
int i;
_lineLen_ = -1;
for(i = 0; i < HEADER_LEN; ++i) {
_lineLen_ += strlen(HEADER[i]) + 1;
}
for(i = 0; i < _lineLen_; ++i) {
coneOS_printf("-");
}
coneOS_printf("\nconeOS 1.0: %s\n", w->method);
for(i = 0; i < _lineLen_; ++i) {
coneOS_printf("-");
}
coneOS_printf("\nEPS = %.2e, ALPHA = %.2f, MAX_ITERS = %i, NORMALIZE = %i\n", d->EPS_ABS, d->ALPH, d->MAX_ITERS, d->NORMALIZE);
coneOS_printf("variables n = %i, constraints m = %i, non-zeros in A = %i\n", d->n, d->m, d->Anz);
int socVars = 0;
for (int i=0;i<k->qsize;i++){
socVars += k->q[i];
}
int sdVars = 0;
for (int i=0;i<k->ssize;i++){
sdVars += k->s[i]*k->s[i];
}
coneOS_printf("cones:\tzero/free vars: %i\n\tlinear vars: %i\n\tsoc vars: %i, soc blks: %i\n\tsd vars: %i, sd blks: %i\n\texp vars: %i\n\tdual exp vars: %i\n", k->f, k->l, socVars, k->qsize, sdVars,k->ssize, k->ep*3, k->ed*3);
for(i = 0; i < _lineLen_; ++i) {
coneOS_printf("-");
}
coneOS_printf("\n");
for(i = 0; i < HEADER_LEN - 1; ++i) {
coneOS_printf("%s|", HEADER[i]);
}
coneOS_printf("%s\n", HEADER[HEADER_LEN-1]);
for(i = 0; i < _lineLen_; ++i) {
coneOS_printf("=");
}
coneOS_printf("\n");
}
static inline void printFooter(Data * d, Info * info, Work * w) {
int i;
for(i = 0; i < _lineLen_; ++i) {
coneOS_printf("-");
}
coneOS_printf("\nStatus: %s\n",info->status);
if (info->iter == d->MAX_ITERS) {
coneOS_printf("Hit MAX_ITERS, solution may be inaccurate\n");
}
coneOS_printf("Time taken: %.4f seconds\n",info->time/1e3);
for(i = 0; i < _lineLen_; ++i) {
coneOS_printf("-");
}
coneOS_printf("\n");
if (info->stint == INFEASIBLE) {
coneOS_printf("Certificate of primal infeasibility:\n");
coneOS_printf("|A'y|_2 * |b|_2 = %.4e\n", info->resDual);
coneOS_printf("dist(y, K*) = 0\n");
coneOS_printf("b'y = %.4f\n", info->dobj);
}
else if (info->stint == UNBOUNDED) {
coneOS_printf("Certificate of dual infeasibility:\n");
coneOS_printf("|Ax + s|_2 * |c|_2 = %.4e\n", info->resPri);
coneOS_printf("dist(s, K) = 0\n");
coneOS_printf("c'x = %.4f\n", info->pobj);
}
else {
coneOS_printf("Error metrics:\n");
coneOS_printf("|Ax + s - b|_2 / (1 + |b|_2) = %.4e\n",info->resPri);
coneOS_printf("|A'y + c|_2 / (1 + |c|_2) = %.4e\n",info->resDual);
coneOS_printf("|c'x + b'y| / (1 + |c'x| + |b'y|) = %.4e\n", info->relGap);
coneOS_printf("dist(s, K) = 0, dist(y, K*) = 0, s'y = 0\n");
for(i = 0; i < _lineLen_; ++i) {
coneOS_printf("-");
}
coneOS_printf("\n");
coneOS_printf("c'x = %.4f, -b'y = %.4f\n",info->pobj, info->dobj);
}
for(i = 0; i < _lineLen_; ++i) {
coneOS_printf("=");
}
coneOS_printf("\n");
}
|
#!/usr/bin/env bash
# Get the nightly builds using maven
# This is an alternative to using the ../bin/getnightly.sh shell script
rm -fr ./staging
mkdir staging
mvn install | tee ./staging/RELEASE
|
#!/usr/bin/env bash
# Development: Flutter, Postman
sudo snap install flutter --classic
sudo snap install postman
# Entertainment: Discord, Spotify
sudo snap install discord
sudo snap install spotify
# Tools: Bitwarden, Slack, Mailspring
sudo snap install bitwarden
sudo snap install slack --classic
sudo snap install mailspring
|
function searchNumber(arr, target) {
for (let i = 0; i < arr.length; i++) {
if (arr[i] == target) {
return i;
}
}
return -1;
}
let result = searchNumber([1, 2, 4, 5, 6, 7, 8], 7);
console.log(result);
|
// Receiver class
public class FanBingBingReceiver {
public void wearTrousers() {
System.out.println("Fan BingBing is wearing trousers.");
}
}
// Command interface
public interface Command {
void execute();
}
// Concrete command class
public class TuoKuZiCommand implements Command {
private FanBingBingReceiver bingBing;
public TuoKuZiCommand(FanBingBingReceiver bingBing) {
this.bingBing = bingBing;
}
@Override
public void execute() {
bingBing.wearTrousers();
}
}
// Client class to demonstrate the command pattern
public class Client {
public static void main(String[] args) {
FanBingBingReceiver bingBing = new FanBingBingReceiver();
TuoKuZiCommand command = new TuoKuZiCommand(bingBing);
command.execute();
}
}
|
<gh_stars>10-100
package io.opensphere.core.preferences;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import org.w3c.dom.Element;
import io.opensphere.core.util.Utilities;
import io.opensphere.core.util.XMLUtilities;
/**
* A preference key and JAXB object value pair.
*
* @param <T> The type of value held by this object.
*/
class JAXBElementPreference<T> extends Preference<T>
{
/**
* Constructor.
*
* @param key The preference key.
* @param value The value.
* @throws IllegalArgumentException If the key or value is {@code null}.
*/
public JAXBElementPreference(String key, JAXBElement<T> value) throws IllegalArgumentException
{
super(key, Utilities.checkNull(value, "value").getDeclaredType(), value);
}
@Override
protected Element getElement() throws JAXBException
{
if (getData() instanceof JAXBElement)
{
return XMLUtilities.marshalJAXBObjectToElement(getData());
}
return (Element)getData();
}
@Override
@SuppressWarnings("unchecked")
protected T getValue() throws JAXBException
{
Object data = getData();
if (data instanceof JAXBElement)
{
return ((JAXBElement<T>)data).getValue();
}
synchronized (data)
{
return (T)XMLUtilities.readXMLObject((Element)data, JAXBElement.class).getValue();
}
}
}
|
<reponame>StuntsPT/BangleApps
const c={"x":g.getWidth()/2,"y":g.getHeight()/2};
let zahlpos=[];
let unlock = false;
function zeiger(len,dia,tim){
const x =c.x+ Math.cos(tim)*len/2,
y =c.y + Math.sin(tim)*len/2,
d={"d":3,"x":dia/2*Math.cos(tim+Math.PI/2),"y":dia/2*Math.sin(tim+Math.PI/2)},
pol=[c.x-d.x,c.y-d.y,c.x+d.x,c.y+d.y,x+d.x,y+d.y,x-d.x,y-d.y];
return pol;
}
function draw(){
const d=new Date();
let m=d.getMinutes(), h=d.getHours(), s=d.getSeconds();
//draw black rectangle in the middle to clear screen from scale and hands
g.setColor(0,0,0);
g.fillRect(10,10,2*c.x-10,2*c.x-10);
g.setColor(1,1,1);
if(h>12){
h=h-12;
}
//calculates the position of the minute, second and hour hand
h=2*Math.PI/12*(h+m/60)-Math.PI/2;
//more accurate
//m=2*Math.PI/60*(m+s/60)-Math.PI/2;
m=2*Math.PI/60*(m)-Math.PI/2;
s=2*Math.PI/60*s-Math.PI/2;
g.setFontAlign(0,0);
g.setFont("Vector",10);
let dateStr = " "+require("locale").date(d)+" ";
g.drawString(dateStr, c.x, c.y+20, true);
// g.drawString(d.getDate(),1.4*c.x,c.y,true);
g.drawString(Math.round(E.getBattery()/5)*5+"%",c.x,c.y+40,true);
drawlet();
//g.setColor(1,0,0);
const hz = zeiger(100,5,h);
g.fillPoly(hz,true);
// g.setColor(1,1,1);
const minz = zeiger(150,5,m);
g.fillPoly(minz,true);
if (unlock){
const sekz = zeiger(150,2,s);
g.fillPoly(sekz,true);
}
g.fillCircle(c.x,c.y,4);
}
//draws the scale once the app is startet
function drawScale(){
for(let i=-14;i<47;i++){
const win=i*2*Math.PI/60;
let d=2;
if(i%5==0){d=5;}
g.fillPoly(zeiger(300,d,win),true);
g.setColor(0,0,0);
g.fillRect(10,10,2*c.x-10,2*c.x-10);
g.setColor(1,1,1);
}
}
//draws the numbers on the screen
function drawlet(){
g.setFont("Vector",20);
for(let i = 0;i<12;i++){
g.drawString(zahlpos[i][0],zahlpos[i][1],zahlpos[i][2]);
}
}
//calcultes the Position of the numbers when app starts and saves them in an array
function setlet(){
let sk=1;
for(let i=-10;i<50;i+=5){
let win=i*2*Math.PI/60;
let xsk =c.x+2+Math.cos(win)*(c.x-10),
ysk =c.y+2+Math.sin(win)*(c.x-10);
if(sk==3){xsk-=10;}
if(sk==6){ysk-=10;}
if(sk==9){xsk+=10;}
if(sk==12){ysk+=10;}
if(sk==10){xsk+=3;}
zahlpos.push([sk,xsk,ysk]);
sk+=1;
}
}
setlet();
// Clear the screen once, at startup
g.setBgColor(0,0,0);
g.clear();
drawScale();
draw();
let secondInterval= setInterval(draw, 1000);
// Stop updates when LCD is off, restart when on
Bangle.on('lcdPower',on=>{
if (secondInterval) clearInterval(secondInterval);
secondInterval = undefined;
if (on) {
secondInterval = setInterval(draw, 1000);
draw(); // draw immediately
}else{
}
});
Bangle.on('lock',on=>{
if (secondInterval) clearInterval(secondInterval);
secondInterval = undefined;
if (!on) {
secondInterval = setInterval(draw, 1000);
unlock = true;
draw(); // draw immediately
}else{
secondInterval = setInterval(draw, 60000);
unlock = false;
draw();
}
});
// Show launcher when middle button pressed
Bangle.setUI("clock");
|
#!/bin/sh
set -uex
NIM_DIR=$1
cd "${NIM_DIR}"
sh build.sh
bin/nim c koch
./koch boot -d:release
./koch tools -d:release
|
// Creamos la base de datos de un pais con 20 personas y sus salarios
const colombia = [];
colombia.push({
name: "Yuki",
salary: 500,
});
colombia.push({
name: "Kaito",
salary: 1500,
});
colombia.push({
name: "Jorge",
salary: 1800,
});
colombia.push({
name: "Mariela",
salary: 1000,
});
colombia.push({
name: "Jaquelin",
salary: 2200,
});
colombia.push({
name: "Karina",
salary: 200,
});
colombia.push({
name: "<NAME>",
salary: 500,
});
colombia.push({
name: "<NAME>",
salary: 1500,
});
colombia.push({
name: "Angel",
salary: 1300,
});
colombia.push({
name: "Ticao",
salary: 2400,
});
colombia.push({
name: "Alan",
salary: 3400,
});
colombia.push({
name: "Cata",
salary: 400,
});
colombia.push({
name: "Angélica",
salary: 400,
});
colombia.push({
name: "Tatiana",
salary: 400,
});
colombia.push({
name: "Lorena",
salary: 600,
});
colombia.push({
name: "Carolina",
salary: 1600,
});
colombia.push({
name: "Fernanda",
salary: 2600,
});
colombia.push({
name: "Nora",
salary: 1000,
});
colombia.push({
name: "Gisselle",
salary: 2000,
});
colombia.push({
name: "<NAME>",
salary: 100000000,
});
// console.log(colombia);
|
#pragma once
#include <toy_compiler/munster/ast/decl/decl.hpp>
#include <vector>
#include <string>
namespace toy_compiler::munster::ast {
class ASTDeclaration : public ASTNode {
public:
enum class DeclarationType {
Variable,
Function,
Class
// Add more declaration types as needed
};
ASTDeclaration(DeclarationType type, const std::string& name)
: type_{type}, name_{name} {}
// Member functions to manipulate and retrieve information about the declarations
void addParameter(const std::string& param) {
if (type_ == DeclarationType::Function) {
parameters_.push_back(param);
}
}
// Getters for declaration type and name
DeclarationType getType() const { return type_; }
const std::string& getName() const { return name_; }
// Additional member functions and data members as needed for specific declaration types
private:
DeclarationType type_;
std::string name_;
std::vector<std::string> parameters_;
// Additional member variables as needed for specific declaration types
};
} // namespace toy_compiler::munster::ast
|
cat <<EOF | tee /etc/profile.d/10-<%= name %>.sh
#!/bin/sh
export PATH=$PATH:/opt/<%= name %>/bin/
EOF
chmod +x /etc/profile.d/10-<%= name %>.sh
|
<reponame>patrontech/gh-action-amazon-ecs-run-task
import { ECSClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECSClient";
import { DeregisterTaskDefinitionRequest, DeregisterTaskDefinitionResponse } from "../models/models_0";
import { Command as $Command } from "@aws-sdk/smithy-client";
import { Handler, MiddlewareStack, HttpHandlerOptions as __HttpHandlerOptions, MetadataBearer as __MetadataBearer } from "@aws-sdk/types";
export interface DeregisterTaskDefinitionCommandInput extends DeregisterTaskDefinitionRequest {
}
export interface DeregisterTaskDefinitionCommandOutput extends DeregisterTaskDefinitionResponse, __MetadataBearer {
}
/**
* <p>Deregisters the specified task definition by family and revision. Upon deregistration,
* the task definition is marked as <code>INACTIVE</code>. Existing tasks and services that
* reference an <code>INACTIVE</code> task definition continue to run without disruption.
* Existing services that reference an <code>INACTIVE</code> task definition can still
* scale up or down by modifying the service's desired count.</p>
* <p>You cannot use an <code>INACTIVE</code> task definition to run new tasks or create new
* services, and you cannot update an existing service to reference an
* <code>INACTIVE</code> task definition. However, there may be up to a 10-minute
* window following deregistration where these restrictions have not yet taken
* effect.</p>
* <note>
* <p>At this time, <code>INACTIVE</code> task definitions remain discoverable in your
* account indefinitely. However, this behavior is subject to change in the future, so
* you should not rely on <code>INACTIVE</code> task definitions persisting beyond the
* lifecycle of any associated tasks and services.</p>
* </note>
* @example
* Use a bare-bones client and the command you need to make an API call.
* ```javascript
* import { ECSClient, DeregisterTaskDefinitionCommand } from "@aws-sdk/client-ecs"; // ES Modules import
* // const { ECSClient, DeregisterTaskDefinitionCommand } = require("@aws-sdk/client-ecs"); // CommonJS import
* const client = new ECSClient(config);
* const command = new DeregisterTaskDefinitionCommand(input);
* const response = await client.send(command);
* ```
*
* @see {@link DeregisterTaskDefinitionCommandInput} for command's `input` shape.
* @see {@link DeregisterTaskDefinitionCommandOutput} for command's `response` shape.
* @see {@link ECSClientResolvedConfig | config} for command's `input` shape.
*
*/
export declare class DeregisterTaskDefinitionCommand extends $Command<DeregisterTaskDefinitionCommandInput, DeregisterTaskDefinitionCommandOutput, ECSClientResolvedConfig> {
readonly input: DeregisterTaskDefinitionCommandInput;
constructor(input: DeregisterTaskDefinitionCommandInput);
/**
* @internal
*/
resolveMiddleware(clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>, configuration: ECSClientResolvedConfig, options?: __HttpHandlerOptions): Handler<DeregisterTaskDefinitionCommandInput, DeregisterTaskDefinitionCommandOutput>;
private serialize;
private deserialize;
}
|
def is_palindrome(word):
reversed_word = word[::-1]
if word == reversed_word:
return True
else:
return False
|
from datetime import datetime
now = datetime.now()
print("Today's date is", now.strftime("%A, %B %dth, %Y, at %I:%M%p"))
|
version=1.2.0
platform=xinchang
cdndir=/svn/$platform/$version/game_server/htdoc/resource/
svn up $cdndir
hot_version=$(awk -F"," '{print $1}' $cdndir/hot_update_file.json|awk -F":" '{print $2}')
hot_file=cdn/hot_version.txt
hot_file_version_old=$(awk -F":" '/hot_version/{print $2}' cdn/hot_version.txt)
echo -e "\e[33mThe old version is $hot_file_version_old\e[0m"
if [ "$hot_file_version_old" != "$hot_version" ];then
echo -e "\e[33mThe new version will be updated from $hot_file_version_old to $hot_version\e[0m"
sudo sed -i "/hot_version/s/$hot_file_version_old/$hot_version/" $hot_file
fi
cat $hot_file
hot_version_dir=$version.$hot_version
sudo rsync -avz $cdndir/* --exclude ".svn" cdn/$hot_version_dir
sudo rsync -aP --exclude ".svn/" cdn/$hot_version_dir 60.191.203.70::channels_xc_cdn
|
<filename>dist/src/commands/scaffold-module.command.js
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ScaffoldModuleCommand = void 0;
const clipanion_1 = require("clipanion");
const t = __importStar(require("typanion"));
const directory_utils_1 = require("../utils/directory-utils");
const formatter_utils_1 = require("../utils/formatter-utils");
const spinner_util_1 = require("../utils/spinner-util");
/**
* ScaffoldModuleCommand
*
* Scaffolds a Module.
*/
class ScaffoldModuleCommand extends clipanion_1.Command {
constructor() {
super(...arguments);
// arguments.
this.moduleName = clipanion_1.Option.String({ required: true, name: 'domain-name', validator: t.isString() });
}
async execute() {
this.context.stdout.write(formatter_utils_1.formatLogInfo(`Creating module\n`));
// vallidate we are in a domeniere project.
try {
if (!await directory_utils_1.isDomeniereProject(process.cwd())) {
throw new Error('Not a Domeniere project');
}
}
catch (e) {
this.context.stdout.write(formatter_utils_1.formatLogError(`Error: ${e.message}\n`));
return 1;
}
try {
// make sure the current directory is a Domeniere Root
spinner_util_1.startSpinner(formatter_utils_1.formatLogInfo("Verifying..."));
// make sure the module does not already exist.
const moduleName = formatter_utils_1.formatClassName(this.moduleName);
const modulePath = directory_utils_1.moduleDirectoryPath(this.moduleName, process.cwd());
if (await directory_utils_1.moduleExists(this.moduleName, process.cwd())) {
throw new Error('Module already exists.');
}
spinner_util_1.stopSpinnerWithSuccess(formatter_utils_1.formatLogInfo("Verification complete."));
// create the module.
spinner_util_1.startSpinner(formatter_utils_1.formatLogInfo(`Writing module files...`));
await directory_utils_1.createModule(moduleName, process.cwd());
spinner_util_1.stopSpinnerWithSuccess(formatter_utils_1.formatLogInfo(`Successfully created module files.`));
// add module to index
spinner_util_1.startSpinner(formatter_utils_1.formatLogInfo(`updating exports...`));
await directory_utils_1.exposeModule(moduleName, process.cwd());
spinner_util_1.stopSpinnerWithSuccess(formatter_utils_1.formatLogInfo('Successfully updated exports.'));
this.context.stdout.write(formatter_utils_1.formatLogInfo(`Successfully created module ${moduleName}\n`));
return 0;
}
catch (e) {
spinner_util_1.stopSpinnerWithFailure(formatter_utils_1.formatLogError("Somthing went wrong."));
this.context.stdout.write(formatter_utils_1.formatLogError(`Error: ${e.message}\n`));
return 1;
}
}
}
exports.ScaffoldModuleCommand = ScaffoldModuleCommand;
ScaffoldModuleCommand.paths = [
['create', 'module'],
];
// This information is shown on the help command.
ScaffoldModuleCommand.usage = {
category: 'Templates',
description: "Creates a Module",
details: "Creates a Module within an existing domain.",
};
|
<filename>iot-suite-server-web/src/main/java/com/tuya/iot/suite/web/config/RequestLoggerFilter.java
package com.tuya.iot.suite.web.config;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import javax.servlet.*;
import javax.servlet.http.HttpServletRequest;
/**
* @author <EMAIL>
* @description
* @date 2021/06/11
*/
@Component
@Order(Ordered.LOWEST_PRECEDENCE)
@Slf4j
public class RequestLoggerFilter implements Filter {
@Override
public void destroy() {
}
@Override
@SneakyThrows
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterchain) {
HttpServletRequest req = (HttpServletRequest) request;
log.info("request =>{} {}", req.getMethod(), req.getRequestURI());
filterchain.doFilter(request, response);
}
@Override
public void init(FilterConfig filterconfig) throws ServletException {
}
}
|
arr = ["zebra", "ant", "cat", "dog"]
sorted_arr = arr.sort
puts "Sorted array: #{sorted_arr}"
|
#!/bin/bash
#### percona-xtrabackup
if ! grep '^XTRABACKUP$' ${INST_LOG} > /dev/null 2>&1 ; then
## handle source packages
file_proc ${XTRABACKUP_SRC}
get_file
unpack
SYMLINK='/usr/local/percona-xtrabackup'
mv ${STORE_DIR}/${SRC_DIR} ${INST_DIR}
ln -sf ${INST_DIR}/${SRC_DIR} $SYMLINK
## for install config files
succ_msg "Begin to install ${SRC_DIR} config files"
## backup
install -m 0700 ${TOP_DIR}/conf/percona-xtrabackup/xtrabackup.sh /usr/local/bin/xtrabackup.sh
sed -i "s#^BACKUP_DIR.*#BACKUP_DIR=${XTRABACKUP_BACKUP_DIR}#" /usr/local/bin/xtrabackup.sh
sed -i "s#password=#password=${MYSQL_ROOT_PASS}#" /usr/local/bin/xtrabackup.sh
## cron job
echo '' >> /var/spool/cron/root
echo '# MySQL Backup' >> /var/spool/cron/root
echo '0 4 * * * /usr/local/bin/xtrabackup.sh > /dev/null 2>&1' >> /var/spool/cron/root
chown root:root /var/spool/cron/root
chmod 600 /var/spool/cron/root
## record installed tag
echo 'XTRABACKUP' >> ${INST_LOG}
else
succ_msg "Percona Xtrabackup already installed!"
fi
|
#!/bin/sh
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
PIDS=$(ps ax | grep -i 'kafka\.Kafka' | grep java | grep -v grep | awk '{print $1}')
if [ -z "$PIDS" ]; then
echo "No kafka server to stop"
exit 1
else
kill -SIGTERM $PIDS
fi
|
import React from "react"
import Footer from './footer'
import 'bootstrap/dist/css/bootstrap.min.css';
import "slick-carousel/slick/slick.css";
import "slick-carousel/slick/slick-theme.css";
const Layout = ({ children }) => {
return (
<>
{children}
<Footer />
</>
)
}
export default Layout
|
from tensorflow.keras import Model
from tensorflow.keras.layers import Dense, Input
def fcnn():
input = Input(shape=(784,), dtype='float32', name='input')
x = Dense(128, activation='relu')(input)
x = Dense(64, activation='relu')(x)
x = Dense(32, activation='relu')(x)
output = Dense(num_classes, activation='softmax')(x) # Assuming num_classes is defined elsewhere
model = Model(inputs=input, outputs=output, name='fcnn_model')
return model
|
<gh_stars>1-10
//
// IRApplicationController.h
// ExpenseManager
//
// Created by <NAME> on 30/09/14.
// Copyright (c) 2014 Shibin. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface IRApplicationController : NSObject
+ (IRApplicationController *)sharedInstance;
- (void)showAlertForDailyReminder;
@end
|
<reponame>guoshuangyang/images-editor
/**
* @param image 必要的参数,图片
* @param x 绘制的x坐标位置
* @param y 绘制的y坐标的开始
* @param w 绘制的宽度
* @param h 绘制图片的高度
*/
interface Options {
image: CanvasImageSource;
x?: number;
y?: number;
w?: number;
h?: number;
shape?: string;
eventStatus?: number;
}
/**
* borderColor 边框的颜色
* borderStyle 边框的类型
* fillColor 绘制的填充色
* textColor 文字颜色
*/
interface CtxOption {
borderColor?: string;
textColor?: string;
fillColor?: string;
borderStyle?: string;
}
/**
* 图层数据,渲染的数据
*/
interface Data {
graph: {
leftTop: number[];
rightBottom: number[];
center: number[];
w: number,
h: number,
}[];
}
/**
* @interface canvasStatus
* @description eventStatus 鼠标事件出发后事件的类型
* @description drawShape 鼠标事件的触发绘制之后的形状
*/
interface canvasStatus {
eventStatus: number;
// 绘制的形状
drawShape: string
}
|
#! /bin/bash
set -o errexit
ROOT=$PWD
rm -rf temp
mkdir temp
if [ ! -d lib/bundles ]; then
mkdir lib/bundles
fi
cd temp
# vscode languages
git clone https://github.com/Microsoft/vscode
cd vscode/extensions
for f in *; do
if [ -d "$f/syntaxes" ]; then
echo "Adding $f"
cp -r "$f" "$ROOT/lib/bundles"
rm -rf "$ROOT/lib/bundles/$f/test"
rm -rf "$ROOT/lib/bundles/$f/build"
rm -rf "$ROOT/lib/bundles/$f/resources"
fi
done
cd ../..
# mdx
git clone https://github.com/silvenon/vscode-mdx.git
cd vscode-mdx
echo "Adding mdx"
mkdir -p "$ROOT/lib/bundles/mdx"
cp -r "package.json" "$ROOT/lib/bundles/mdx/"
cp -r "license" "$ROOT/lib/bundles/mdx/"
cp -r "syntaxes" "$ROOT/lib/bundles/mdx/"
cd ..
# kotlin
git clone https://github.com/sargunv/kotlin-textmate-bundle.git
cd kotlin-textmate-bundle/Kotlin.tmbundle
mv "Snippets" "snippets"
mv "Syntaxes" "syntaxes"
echo "Adding kotlin"
mkdir -p "$ROOT/lib/bundles/kotlin"
cp -r "info.plist" "$ROOT/lib/bundles/kotlin/"
cp -r "snippets" "$ROOT/lib/bundles/kotlin/"
cp -r "syntaxes" "$ROOT/lib/bundles/kotlin/"
cd $ROOT
rm -rf $ROOT/temp
echo "Applying patch"
git apply $ROOT/bundles.patch
|
#!/bin/bash
# Note these environment variables on the right need to be set on the host machine. These values on the left are curried to inside the docker instance.
export ENV1="${EVENT_HUB_CONNECTION1}"
export ENV2="${STORAGE_CONNECTION1}"
|
export default () => {
const theme = {
light: {
textColor: "black",
backgroundColor: "white",
containerColor: "transparent",
activeColor: "tomato",
dividerColor: "#cccccc"
},
dark: {
textColor: "white",
backgroundColor: "#171717",
containerColor: "#171717",
activeColor: "orange",
dividerColor: "#474747",
warningColor: "orange",
warningBackgroundColor: "orange",
dangerColor: '#cd3e05',
textColorMuted: '#c0c0c0',
textInput:{
default: {
backgroundColor: '#1D1D1D',
color: 'white'
}
},
headers: {
headerStyle: {
backgroundColor: "#181818",
borderBottomColor: '#2C2C2C'
},
headerTintColor: "#fff",
headerTitleStyle: {
fontWeight: "bold"
}
},
tabBar: {
inactiveBackgroundColor: "#181818",
activeBackgroundColor: "#181818",
activeTintColor: "orange",
inactiveTintColor: "gray",
style: {
borderTopColor: '#2C2C2C'
}
},
uiTable: {
section: {
sectionTintColor: "#171717",
separatorTintColor: "#303030",
headerTextColor: "#c0c0c0",
footerTextColor: "white"
},
cell: {
accessoryColor: "orange",
backgroundColor: "#1D1D1D",
leftDetailColor: "orange",
rightDetailColor: "orange",
subtitleColor: "#c0c0c0",
titleTextColor: "white"
}
}
}
};
return theme.dark;
};
|
<filename>spec/getter_setter_spec.rb
describe 'Getter And Setter' do
describe "setter" do
before(:each) do
@container= Pimple.new
end
it "should get a service defined with block " do
@container.set(:db_host) {|c| "127.0.0.1" }
@container.set(:db) {|c| "mysql://#{c[:db_host]}" }
@container[:db_host].should == "127.0.0.1"
@container[:db].should == "mysql://127.0.0.1"
end
it "should get a service defined without block" do
@container.set(:db_host,value:"127.0.0.1")
@container[:db_host].should == "127.0.0.1"
end
it "should get a service using method_missing getter and block" do
@container.db_host {|c| "127.0.0.1" }
@container.db {|c| "mysql://#{c[:db_host]}" }
@container[:db_host].should == "127.0.0.1"
@container[:db].should == "mysql://127.0.0.1"
end
it "should get a service using method_missing getter with no block" do
@container.db_host "127.0.0.1"
@container.db "mysql://#{@container[:db_host]}"
@container[:db_host].should == "127.0.0.1"
@container[:db].should == "mysql://127.0.0.1"
end
end
describe "getter" do
before(:each) do
@container= Pimple.new
@container.db_host "127.0.0.1"
@container.db "mysql://#{@container[:db_host]}"
end
it "should get a service with get" do
@container.get(:db).should == "mysql://127.0.0.1"
end
it "should get a service with method_missing" do
@container.db.should == "mysql://127.0.0.1"
end
end
end
|
RW_GATEWAYS=(
#"https://ipfs.works"
#"https://ipfs.work"
)
RO_GATEWAYS=(
"https://ipfs.io"
"https://cloudflare-ipfs.com"
"https://ipfs.wak.io"
)
can_use_ipfs() {
if ! ipfs >/dev/null 2>&1; then
echo "Missing ipfs command" >&2
return 1
fi
if ! ipfs swarm peers >/dev/null 2>&1; then
echo "Ipfs daemon is not running" >&2
return 1
fi
if ipfs pin ls -t direct | grep -q 'api version mismatch'; then
echo "Got ipfs version mismatch" >&2
return 1
fi
return 0
}
|
import { ISubscriberTopicMap } from "@walletconnect/types";
export class SubscriberTopicMap implements ISubscriberTopicMap {
public map = new Map<string, string[]>();
get topics(): string[] {
return Array.from(this.map.keys());
}
public set: ISubscriberTopicMap["set"] = (topic, id) => {
const ids = this.get(topic);
if (this.exists(topic, id)) return;
this.map.set(topic, [...ids, id]);
};
public get: ISubscriberTopicMap["get"] = topic => {
const ids = this.map.get(topic);
return ids || [];
};
public exists: ISubscriberTopicMap["exists"] = (topic, id) => {
const ids = this.get(topic);
return ids.includes(id);
};
public delete: ISubscriberTopicMap["delete"] = (topic, id) => {
if (typeof id === "undefined") {
this.map.delete(topic);
return;
}
if (!this.map.has(topic)) return;
const ids = this.get(topic);
if (!this.exists(topic, id)) return;
const remaining = ids.filter(x => x !== id);
if (!remaining.length) {
this.map.delete(topic);
return;
}
this.map.set(topic, remaining);
};
public clear: ISubscriberTopicMap["clear"] = () => {
this.map.clear();
};
}
|
A = [[3, 1], [2, 4]]
A_inverse = [[4/7, -1/7], [-2/7, 3/7]]
|
<filename>src/app/employee-explorer/employee-explorer.component.ts<gh_stars>0
import {
Component,
EventEmitter,
Input,
Output
} from '@angular/core';
// import { FormControl } from '@angular/forms';
import { Employee } from '../employee-loader.service';
@Component({
selector: 'employee-explorer',
templateUrl: './employee-explorer.component.html',
styleUrls: ['./employee-explorer.component.css']
})
export class EmployeeExplorerComponent {
@Input() title = 'Employees'; // Provide a default value if the user of this component doesn't
@Input() employees: Employee[] = [];
@Input() selectedEmployees: Employee[] = [];
@Output() employeeClicked = new EventEmitter<Employee>();
// Implementing a new feature in this view component improves everywhere that it is used
// In this case, all three lists of employees are made searchable by adding this feature
// Make sure to update the employee-explorer.component.html as well to see it in action
// filter = new FormControl('');
employeeIsSelected(emp: Employee) {
return this.selectedEmployees.find(e => e.id === emp.id);
}
// filteredList() {
// if (!this.employees) {
// return [];
// }
// return this.employees.filter(e =>
// e.first_name.toLowerCase().includes(this.filter.value.toLowerCase()) ||
// e.last_name.toLowerCase().includes(this.filter.value.toLowerCase()));
// }
}
|
// when generate javascript in the template, strings are escaped.
// need to unescape them again to execute js
function normalize_data(data) {
data = data.replace(/u\'/g, "'");
data = data.replace(/\'/g, "'");
data = JSON.stringify(data);
data = JSON.parse(data);
data = JSON.parse(data); // one more time, i don't know why but otherwise it doesn't work
return data;
}
function plot_chart(title, plot_data, limit_data, max_x_value, div_id) {
// Get timestamp for 2 months ago
var d = new Date();
d.setMonth(d.getMonth() - 2);
d.setHours(0,0,0);
d = d.getTime();
$('#'+div_id).highcharts({
chart: {
zoomType: 'x',
backgroundColor: null
},
title: { text: title },
legend: { enabled: false },
xAxis: {
title: { text: 'Date' },
type: 'datetime',
min: d,
},
yAxis: {
min: 0,
max: max_x_value,
title: { text: 'Storage (Tb)' },
},
tooltip: {
pointFormat: '<strong>{series.name}</strong>: {point.y:,.2f} Tb',
},
series: [
{
name: 'Usage',
data: plot_data,
type: 'area'
},
{
name: 'Quota',
data: limit_data,
dashStyle: 'Dash',
color: 'red',
type: 'line',
marker: {
enabled: false
}
}
]
});
};
// Reset x-min to show all data
$('#show_all_data').click(function(){
var d = null;
if($(this).text() == 'Show all data'){
$('#show_all_text').text('Showing all data.');
$(this).text('Show last two months');
} else {
// Get timestamp for 2 months ago
d = new Date();
d.setMonth(d.getMonth() - 2);
d.setHours(0,0,0);
d = d.getTime();
$('#show_all_text').text('Showing data from last two months.');
$(this).text('Show all data');
}
$('.uppmax_plot').each(function(){
if (d == null) {
d = $(this).attr('data-min-time');
}
try {
$(this).highcharts().xAxis[0].update({min: d});
} catch(err) {
console.log('Setting limits for "'+$(this).attr('id')+'" didn\'t work - probably not yet loaded.');
}
});
});
$(document).ready(function(){
// Highlight plots if clicked from the navigation
$('body').on('click', '.quota-nav li a', function(){
var target = $(this).attr('href');
$('.highlighted').removeClass('highlighted');
$(target).addClass('highlighted');
});
});
|
<gh_stars>10-100
package httpx
import (
"fmt"
"net/http"
"net/url"
"context"
"github.com/gorilla/mux"
)
// Router is an httpx.Handler router.
type Router struct {
// NotFoundHandler is a Handler that will be called when a route is not
// found.
NotFoundHandler Handler
// This router is ultimately backed by a gorilla mux router.
mux *mux.Router
// A map of mux.Route to Route so we can map the matched mux.Route back to our Route.
routes map[*mux.Route]*Route
}
// NewRouter returns a new Router instance.
func NewRouter() *Router {
return &Router{
mux: mux.NewRouter(),
routes: make(map[*mux.Route]*Route),
}
}
// Handle registers a new route with a matcher for the URL path
func (r *Router) Handle(path string, h Handler) *Route {
return r.getOrCreateRoute(r.mux.Handle(path, r.handler(h)), path)
}
// HandleFunc registers a new route with a matcher for the URL path
func (r *Router) HandleFunc(path string, f func(context.Context, http.ResponseWriter, *http.Request) error) *Route {
return r.Handle(path, HandlerFunc(f))
}
// Header adds a route that will be used if the header value matches.
func (r *Router) Headers(pairs ...string) *Route {
return r.getOrCreateRoute(r.mux.Headers(pairs...), "")
}
// Match adds a route that will be matched if f returns true.
func (r *Router) Match(f func(*http.Request) bool, h Handler) {
matcher := func(r *http.Request, rm *mux.RouteMatch) bool {
return f(r)
}
r.mux.MatcherFunc(matcher).Handler(r.handler(h))
}
// Path registers a new route with a matcher for the URL path.
func (r *Router) Path(path string) *Route {
return r.getOrCreateRoute(r.mux.Path(path), path)
}
// Caches the routes so we have access to the original path template.
func (r *Router) getOrCreateRoute(muxRoute *mux.Route, pathTpl string) *Route {
if route, ok := r.routes[muxRoute]; !ok {
route = &Route{muxRoute, pathTpl}
r.routes[muxRoute] = route
} else if pathTpl != "" {
route.pathTpl = pathTpl
}
return r.routes[muxRoute]
}
// mux.Handler expects an http.Handler. We wrap the Hander in a handler,
// which satisfies the http.Handler interface. When this route is
// eventually used, it's type asserted back to a Handler.
func (r *Router) handler(h Handler) http.Handler {
return &handler{h}
}
// Handler returns a Handler that can be used to serve the request. Most of this
// is pulled from http://goo.gl/tyxad8.
func (r *Router) Handler(req *http.Request) (route *Route, h Handler, vars map[string]string) {
var match mux.RouteMatch
if r.mux.Match(req, &match) {
route = r.getOrCreateRoute(match.Route, "")
h = match.Handler.(Handler)
vars = match.Vars
return
}
if r.NotFoundHandler == nil {
h = HandlerFunc(NotFound)
return
}
h = r.NotFoundHandler
return
}
// ServeHTTPContext implements the Handler interface.
func (r *Router) ServeHTTPContext(ctx context.Context, w http.ResponseWriter, req *http.Request) error {
route, h, vars := r.Handler(req)
ctx = WithVars(ctx, vars)
ctx = WithRoute(ctx, route)
return h.ServeHTTPContext(ctx, w, req)
}
// Vars extracts the route vars from a context.Context.
func Vars(ctx context.Context) map[string]string {
vars, ok := ctx.Value(varsKey).(map[string]string)
if !ok {
return map[string]string{}
}
return vars
}
// WithVars adds the vars to the context.Context.
func WithVars(ctx context.Context, vars map[string]string) context.Context {
return context.WithValue(ctx, varsKey, vars)
}
// WithVars adds the current Route to the context.Context.
func WithRoute(ctx context.Context, r *Route) context.Context {
return context.WithValue(ctx, routeKey, r)
}
// Route wraps a mux.Route.
type Route struct {
route *mux.Route
// Path template for this route, if any.
pathTpl string
}
// RouteFromContext extracts the current Route from a context.Context.
func RouteFromContext(ctx context.Context) *Route {
r, _ := ctx.Value(routeKey).(*Route)
return r
}
// Methods adds a matcher for HTTP methods.
// It accepts a sequence of one or more methods to be matched, e.g.:
// "GET", "POST", "PUT".
func (r *Route) Methods(methods ...string) *Route {
r.route.Methods(methods...)
return r
}
// HandlerFunc sets the httpx.Handler for this route.
func (r *Route) HandlerFunc(f func(context.Context, http.ResponseWriter, *http.Request) error) *Route {
return r.Handler(HandlerFunc(f))
}
// Handler sets the httpx.Handler for this route.
func (r *Route) Handler(h Handler) *Route {
r.route.Handler(r.handler(h))
return r
}
// Name sets the name for the route, used to build URLs.
// If the name was registered already it will be overwritten.
func (r *Route) Name(name string) *Route {
r.route.Name(name)
return r
}
// GetName returns the name for the route, if any.
func (r *Route) GetName() string {
return r.route.GetName()
}
// See mux.Route.URL.
func (r *Route) URL(pairs ...string) (*url.URL, error) {
return r.route.URL(pairs...)
}
// See mux.Route.URLPath.
func (r *Route) URLPath(pairs ...string) (*url.URL, error) {
return r.route.URLPath(pairs...)
}
// Returns the path template for this route, if any.
func (r *Route) GetPathTemplate() string {
return r.pathTpl
}
// mux.Handler expects an http.Handler. We wrap the Hander in a handler,
// which satisfies the http.Handler interface. When this route is
// eventually used, it's type asserted back to a Handler.
func (r *Route) handler(h Handler) http.Handler {
return &handler{h}
}
// handler adapts a Handler to an http.Handler.
type handler struct {
Handler
}
// ServeHTTP implements the http.Handler interface. This method is never
// actually called by this package, it's only used as a means to pass a Handler
// in and out of mux.
func (h *handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
panic(fmt.Sprintf("httpx: ServeHTTP called on %v", h))
}
// NotFound is a HandlerFunc that just delegates off to http.NotFound.
func NotFound(ctx context.Context, w http.ResponseWriter, r *http.Request) error {
http.NotFound(w, r)
return nil
}
|
#!/usr/bin/env bash
set -e
cd `dirname $0`/..
test/benchmark/benchflatcc/run.sh
|
<filename>2021-05-09/营养品商城/util/http.js
const constant = require("./constant.js");
const storage = require("./storage.js");
const util = require("./util.js");
function request(config) {
wx.showLoading({
title: '加载中..'
});
wx.showToast({
title: '加载中..',
icon: 'loading',
mask: true,
duration: constant.duration * 10
});
wx.request({
url: constant.host + config.url,
method: 'POST',
header: {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Token': storage.getToken(),
'Platform': 'WX',
'Version': '1.0.0'
},
data: config.data,
success: function (response) {
wx.hideToast();
if (response.data.code == 200) {
config.success(response.data.data);
} else {
util.showFailToast({
title: response.data.message
});
}
},
fail: function () {
wx.hideLoading();
util.showFailToast({
title: '网络出现错误'
});
}
});
}
module.exports = {
request: request
};
|
package com.zm.paipai.proj;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Administrator on 2016/10/12.
*/
public class Product {
private String urls;
private String title;
private String dianzan;
public Product(String urls,String title,String dianzan) {
this.urls = urls;
this.title = title;
this.dianzan=dianzan;
}
public String getUrls() {
return urls;
}
public void setUrls(String urls) {
this.urls = urls;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDianzan() {
return dianzan;
}
public void setDianzan(String dianzan) {
this.dianzan = dianzan;
}
@Override
public String toString() {
return "Product{" +
"urls='" + urls + '\'' +
", title='" + title + '\'' +
", dianzan='" + dianzan + '\'' +
'}';
}
}
|
package com.app;
/*
* Classe Cliente
* @params String nome, int cpf
*/
public class Cliente {
protected String nome;
protected String cpf;
// Construtor padrão
public Cliente() {
}
// Construtor
public Cliente(String nome, String cpf) {
this.nome = nome;
this.cpf = cpf;
}
}
|
# Takes a CSV as input and creates initial enrollment CVs.
require 'csv'
require 'bigdecimal'
filename = ""
def does_ssn_exist?(ssn)
if Person.where("members.ssn" => ssn.to_s).count > 0
return ssn_person = Person.where("members.ssn" => ssn).first.name_full
else
return false
end
end
def does_name_dob_exist?(first_name, last_name, dob)
if Person.where("members.dob" => dob, :name_first => first_name, :name_last => last_name).count > 0
return dob_person = Person.where("members.dob" => dob, :name_first => first_name, :name_last => last_name).first.name_full
else
return false
end
end
def does_enrollee_exist(person,policy)
enrollee_hbx_ids = policy.enrollees.map(&:m_id)
hbx_id = person.authority_member_id.to_s
if enrollee_hbx_ids.include?(hbx_id)
return true
else
return false
end
end
CSV.foreach(filename, headers: true) do |row|
begin
data_row = row.to_hash
next if data_row["Market"] == nil
new_person = Person.where(authority_member_id: data_row["HBX ID"].to_s).first
if new_person == nil ## Let's validate that this person exists...
## First, create the person object.
new_person = Person.new
new_person.name_first = data_row["First Name"]
new_person.name_middle = data_row["Middle Name"]
new_person.name_last = data_row["Last Name"]
new_person.name_full = "#{data_row["First Name"]} #{data_row["Middle Name"]} #{data_row["Last Name"]}"
new_person.authority_member_id = data_row["HBX ID"]
new_person.save
## Second, create the addresses.
addy = Address.new
addy.address_type = "home"
addy.address_1 = data_row["Address 1"]
addy.address_2 = data_row["Address 2"]
addy.city = data_row["City"]
addy.state = data_row["State"]
addy.zip = data_row["Zip"]
## Add the address to the person.
new_person.addresses.push(addy)
addy.save
new_person.save
if data_row["Email"] != nil
email = Email.new
email.email_type = "home"
email.email_address = data_row["Email"]
new_person.emails.push(email)
email.save
new_person.save
end
if data_row["Phone"] != nil
telephone = Phone.new
telephone.phone_type = "home"
telephone.phone_number = data_row["Phone"].to_s.gsub("(","").gsub(")","").gsub("-","").strip
new_person.phones.push(telephone)
telephone.save
new_person.save
end
## Add the member object - this has demographic information.
mmr = Member.new
mmr.hbx_member_id = data_row["HBX ID"].strip
mmr.dob = data_row["DOB"].to_date
mmr.ssn = data_row["SSN"].gsub("-","")
mmr.gender = data_row["Gender"].downcase
## Add the member object to the person.
new_person.members.push(mmr)
new_person.save
end
## Create the policy object
new_policy = Policy.where(:eg_id => data_row["Enrollment Group ID"].to_s).first
if new_policy == nil
new_policy = Policy.new
new_policy.eg_id = data_row["Enrollment Group ID"].to_s
## Add a plan
year = data_row["Benefit Begin Date"].to_date.year.to_s
new_plan = Plan.where(hios_plan_id: data_row["HIOS Id (auto)"], year: year).first
new_policy.plan = new_plan
new_policy.carrier = new_plan.carrier
new_policy.save
end
## Add an enrollee if they don't exist.
if does_enrollee_exist(new_person,new_policy) == false
new_policy_enrollee = Enrollee.new
new_policy_enrollee.m_id = data_row["HBX ID"]
new_policy_enrollee.rel_code = data_row["Relationship"].downcase
new_policy_enrollee.coverage_start = data_row["Benefit Begin Date"].to_date
new_policy_enrollee.pre_amt = data_row["Premium"].to_d
new_policy.enrollees.push(new_policy_enrollee)
new_policy.save
end
## Add an Employer
if data_row["<NAME>"] != nil
fein = data_row["FEIN"].gsub("-","")
employer_id = Employer.where(fein: fein).first._id
new_policy.employer_id = employer_id
new_policy.save
end
## Calculate the premiums
# m_ids = []
# new_policy.enrollees.each do |en|
# m_ids << en.m_id
# end
# member_repo = Caches::MemberCache.new(m_ids)
# calc = Premiums::PolicyCalculator.new(member_repo)
# calc.apply_calculations(new_policy)
new_policy.pre_amt_tot = data_row["Premium Tot (auto)"].to_d
if new_policy.is_shop?
new_policy.tot_emp_res_amt = data_row["Employer Contribution/AptC"].gsub("$","").strip.to_d
new_policy.tot_res_amt = new_policy.pre_amt_tot - new_policy.tot_emp_res_amt
end
new_policy.save
## Add a Responsible Party
if data_row["Responsible Party"] != nil
resp_party_per = Person.new
resp_party_per.name_full = data_row["Responsible Party"]
name = resp_party_per.name_full
name_array = name.split
if name_array.count == 2
resp_party_per.name_first = name_array.first
resp_party_per.name_last = name_array.last
elsif name_array.count == 3
resp_party_per.name_first = name_array[0]
resp_party_per.name_middle = name_array[1]
resp_party_per.name_last = name_array[2]
elsif name_array.count == 4
resp_party_per.name_first = name_array[0]
resp_party_per.name_middle = name_array[1]
resp_party_per.name_last = "#{name_array[2]} #{name_array[3]}"
end
resp_party_per.save
resp_party_per.addresses.push(addy)
resp_party_per.save
resp_party = ResponsibleParty.new
resp_party.entity_identifier = "responsible party"
resp_party_per.responsible_parties.push(resp_party)
resp_party.save
resp_party_per.save
new_policy.responsible_party_id = resp_party._id
new_policy.save
end
## Generate a CV
subscriber_id = new_policy.subscriber.m_id
enrollee_list = new_policy.enrollees.all
all_ids = enrollee_list.map(&:m_id) | [subscriber_id]
subby = new_policy.subscriber
edi_type = data_row["Operation Type"]
edi_reason = data_row["Reason"]
out_file = File.open(File.join("initial_enrollments_generated", "#{subby.coverage_start.month}-#{subby.coverage_start.day} Renewal - #{new_policy.market} - #{subby.person.name_full} - #{new_policy.coverage_type}.xml"), 'w')
ie_cv = CanonicalVocabulary::MaintenanceSerializer.new(
new_policy,
edi_type,
edi_reason,
all_ids,
all_ids
)
out_file.write(ie_cv.serialize)
out_file.close
rescue Exception=>e
binding.pry
end
end
|
SELECT SUM(quantity)
FROM books
WHERE booksellerID = '<booksellerID>';
|
let facade = require('gamecloud')
let {EntityType} = facade.const
/**
* 定时遍历所有CP,查询每个CP一定时期内的所有流水,形成连续历史快照,为K线提供数据支撑
* @param {Object} data
*/
async function handle(data) {
this.GetMapping(EntityType.Cp).groupOf().forEach(async (cp, key) => {
let stockRecordList = await this.service.gamegoldHelper.execute('stock.record', [0, cp.getAttr("cid"), Math.max(0, this.chain.height - 144)]);
for(let item of stockRecordList.list) {
switch(item.type) {
case 7: {
let stock_open = stockRecordList[0].price;
let stock_close = stockRecordList[stockRecordList.length - 1].price;
//最高价最低价,先设置为开盘价
let stock_high = stock_open;
let stock_low = stock_open;
let total_amount = 0;
let total_num = 0;
for (let stockInfo of stockRecordList) {
total_num = total_num + stockInfo.sum;
total_amount = total_amount + stockInfo.sum * stockInfo.price;
if (stockInfo.price > stock_high) {
stock_high = stockInfo.price;
}
if (stockInfo.price < stock_low) {
stock_low = stockInfo.price;
}
}
//记录凭证的开盘、收盘价等数据
let today = new Date();
await this.core.GetMapping(EntityType.CpStock).Create(
cp.getAttr("cid"),
today.getFullYear() + '-' + (today.getMonth() + 1) + '-' + today.getDate(),
stock_open,
stock_close,
stock_high,
stock_low,
total_num,
total_amount,
);
break;
}
}
}
}, this);
}
module.exports.handle = handle;
|
# CREDIT: https://gist.github.com/bmhatfield/cc21ec0a3a2df963bffa3c1f884b676b
# In order for gpg to find gpg-agent, gpg-agent must be running, and there must be an env
# variable pointing GPG to the gpg-agent socket. This little script, which must be sourced
# in your shell's init script (ie, .bash_profile, .zshrc, whatever), will either start
# gpg-agent or set up the GPG_AGENT_INFO variable if it's already running.
# Add the following to your shell init to set up gpg-agent automatically for every shell
# Set to function because this takes 0.3s for each zsh shell load
gpg-agent-init(){
if [ -f ~/.gnupg/S.gpg-agent ] && [ -n "$(pgrep gpg-agent)" ]; then
source ~/.gnupg/.gpg-agent-info
export GPG_AGENT_INFO
else
eval $(gpg-agent --daemon 2> /dev/null)
fi
}
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { Routes, RouterModule } from '@angular/router';
import { IonicModule } from '@ionic/angular';
import { MaterialModules } from '../ionic-plugins/material-module';
import { ComponentsPage } from './components.page';
import { ComplaintsComponent } from './complaints/complaints.component';
import { ConnectionsComponent } from './connections/connections.component';
import { CustomersComponent } from './customers/customers.component';
import { EnquiriesComponent } from './enquiries/enquiries.component';
const routes: Routes = [
{
path: 'complaints',
component: ComplaintsComponent
},
{
path: 'connections',
component: ConnectionsComponent
},
{
path: 'customers',
component: CustomersComponent
},
{
path: 'enquiries',
component: EnquiriesComponent
},
];
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
MaterialModules,
RouterModule.forChild(routes)
],
declarations: [ComponentsPage, ComplaintsComponent, ConnectionsComponent, CustomersComponent, EnquiriesComponent]
})
export class ComponentsPageModule {}
|
<reponame>BitPaw/BitFireEngine<filename>SystemResource/Source/Font/TTF/Chunks/OS2/Panose/TTFProportion.cpp<gh_stars>1-10
#include "TTFProportion.h"
BF::TTFProportion BF::ConvertTTFProportion(unsigned char proportion)
{
switch (proportion)
{
case 0: return BF::TTFProportion::Any;
case 1: return BF::TTFProportion::NoFit;
case 2: return BF::TTFProportion::OldStyle;
case 3: return BF::TTFProportion::Modern;
case 4: return BF::TTFProportion::EvenWidth;
case 5: return BF::TTFProportion::Expanded;
case 6: return BF::TTFProportion::Condensed;
case 7: return BF::TTFProportion::VeryExpanded;
case 8: return BF::TTFProportion::VeryCondensed;
case 9: return BF::TTFProportion::Monospaced;
default:
return BF::TTFProportion::Invalid;
}
}
|
import collections
from inference import load_tsv
def process_data(file_path):
entity_name2count = collections.defaultdict(int)
doc_name2instance = load_tsv(file_path)
for entity, doc in doc_name2instance:
entity_name2count[entity] += 1
if doc in doc_name2instance:
doc_name2instance[doc].append(entity)
else:
doc_name2instance[doc] = [entity]
return dict(entity_name2count), doc_name2instance
|
<reponame>shamalainen/financer
import React from 'react';
import { Container } from '../container/container';
type AccentColor = 'pink' | 'red' | 'green' | 'blue';
interface IHeroProps {
accent?: string;
accentColor?: AccentColor;
label: string;
children: React.ReactNode;
standAlone?: boolean;
className?: string;
testId?: string;
}
const getHeroColorClasses = (color: AccentColor): string => {
switch (color) {
case 'blue':
return 'text-blue-500';
case 'green':
return 'text-green-500';
case 'pink':
return 'text-pink-500';
case 'red':
return 'text-red-500';
default:
return '';
}
};
export const Hero = ({
accent,
accentColor = 'pink',
label,
children,
standAlone,
className = '',
testId,
}: IHeroProps): JSX.Element => {
const heroContent = (
<div className="max-w-xl" data-test-id={testId}>
<h1
className={`text-4xl tracking-tight leading-10 font-extrabold ${
standAlone ? 'text-gray-900' : 'text-white'
} sm:leading-none sm:text-5xl`}
data-test-id="hero-title"
>
{accent && (
<>
<span
className={`text-2xl leading-none ${getHeroColorClasses(
accentColor
)}`}
>
{accent}
</span>
<br />
</>
)}
{label}
</h1>
{children}
</div>
);
if (standAlone) {
return <div className={className}>{heroContent}</div>;
}
return (
<div className={`pt-8 pb-14 sm:pt-12 sm:pb-20 ${className} bg-gray-800`}>
<Container>{heroContent}</Container>
</div>
);
};
|
if [ -e .selenium.pid ]; then
echo "Selenium server seems to already be running (PID $(cat .selenium.pid))" >&2
exit 1
fi
selenium-standalone start &
echo $! > .selenium.pid
|
<filename>src/utils/utils.js
import fs from 'fs';
const loadRoutes = async (routesPath, prefix, app) => {
try {
fs.readdirSync(routesPath).filter(el => /\.js$/.test(el)).forEach((fileName) => {
import(`${routesPath}${fileName}`)
.then(fileObject => {
console.log(`Load routes at: ${fileName}`);
app.use(prefix, fileObject.default);
})
.catch(error => console.error('UNABLE TO IMPORT FILES', error));
});
} catch (error) {
console.error('UNABLE TO READ DIRECTORY', error);
}
}
// Filter an array
const filterByKey = (array, key, value) => {
return array.filter(element => (element[key] == value));
}
// Sort an array
const sortByKey = (array, key, order) => {
return array.sort((a, b) => {
var x = a[key];
var y = b[key];
if (order == 'asc') {
return ((x < y) ? -1 : ((x > y) ? 1 : 0));
} else if (order == 'desc') {
return ((x < y) ? 1 : ((x > y) ? -1 : 0));
} else {
return 0;
}
});
}
export {
loadRoutes,
filterByKey,
sortByKey
};
|
#!/usr/bin/env bash
# This script analyzes a video in an openface docker container
start=`date +%s`
echo "Openface processing $1/$2"
# Run container
docker run -i -d --name openface algebr/openface:latest
# Remove any files from a previous run
docker exec -it openface rm video.mp4
docker exec -it openface rm -rf processed
# Copy video to container
echo Copying $1/$2.mp4
docker cp $1/$2.mp4 openface:/home/openface-build/video.mp4
# Execute analysis
echo Analyzing $1/$2.mp4
docker exec -it openface ./build/bin/FeatureExtraction -f video.mp4
# Copy results from container
echo Copying results
docker cp openface:/home/openface-build/processed/video.csv $1/$2_features.csv
# Record processing time
end=`date +%s`
runtime=$((end-start))
echo $2: $runtime >> time.txt
#docker stop openface
|
<reponame>davidyu62/egovframe-runtime
package org.egovframe.rte.itl.integration.type;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.egovframe.rte.itl.integration.message.typed.TypedList;
import org.junit.Test;
public class ListTypeTest
{
@Test
public void testIsAssignableFrom() throws Exception
{
ListType stringListType =
new ListType("string[]", "string[]", PrimitiveType.STRING);
assertTrue(stringListType.isAssignableFrom(TypedList.class));
assertTrue(stringListType.isAssignableFrom(List.class));
assertTrue(stringListType.isAssignableFrom(String[].class));
assertFalse(stringListType.isAssignableFrom(Map.class));
ListType stringListType2 =
new ListType("string[][]", "string[][]", stringListType);
assertTrue(stringListType2.isAssignableFrom(TypedList.class));
assertTrue(stringListType2.isAssignableFrom(List.class));
assertTrue(stringListType2.isAssignableFrom(String[][].class));
assertFalse(stringListType2.isAssignableFrom(Map.class));
assertFalse(stringListType2.isAssignableFrom(String[].class));
ListType integerListType =
new ListType("integer[]", "integer[]", PrimitiveType.INTEGER);
assertTrue(integerListType.isAssignableFrom(int[].class));
assertTrue(integerListType.isAssignableFrom(Integer[].class));
assertTrue(integerListType.isAssignableFrom(byte[].class));
assertTrue(integerListType.isAssignableFrom(short[].class));
assertFalse(integerListType.isAssignableFrom(long[].class));
assertFalse(integerListType.isAssignableFrom(float[].class));
}
@Test
public void testIsAssigableValue() throws Exception
{
ListType stringListType =
new ListType("string[]", "string[]", PrimitiveType.STRING);
// TypedList typedStringList = new TypedList(stringListType)
// {{
// add("a");
// add("b");
// add("c");
// }};
// assertTrue(stringListType.isAssignableValue(typedStringList));
List<String> stringList = new ArrayList<String>()
{/**
* serialVersion UID
*/
private static final long serialVersionUID = 9045815271522604267L;
{
add("a");
add("b");
add("c");
}};
assertTrue(stringListType.isAssignableValue(stringList));
assertTrue(stringListType.isAssignableValue(new String[] { "a", "b", "c" }));
ListType integerListType =
new ListType("integer[]", "integer[]", PrimitiveType.INTEGER);
assertTrue(integerListType.isAssignableValue(new int[] { 1, 2, 3 }));
assertTrue(integerListType.isAssignableValue(new byte[] { 1, 2, 3 }));
assertFalse(integerListType.isAssignableValue(new long[] { 1L, 2L, 3L }));
}
private static boolean isUnassignableValue(Type type, Object value) throws Exception
{
try
{
type.convertToTypedObject(value);
}
catch (UnassignableValueException e)
{
return true;
}
return false;
}
@Test
public void testConvertToTypedObject() throws Exception
{
ListType stringListType =
new ListType("string[]", "string[]", PrimitiveType.STRING);
String[] stringArray = new String[] { "a", "b", "c" };
Object object = stringListType.convertToTypedObject(stringArray);
assertNotNull(object);
assertTrue(object instanceof TypedList);
TypedList typedStringList = (TypedList)object;
assertEquals(stringArray.length, typedStringList.size());
for (int i = 0; i < stringArray.length; i++)
{
assertEquals(stringArray[i], typedStringList.get(i));
}
int[] intArray = new int[] { 1, 2, 3 };
assertTrue(isUnassignableValue(stringListType, intArray));
ListType integerListType =
new ListType("integer[]", "integer[]", PrimitiveType.INTEGER);
object = integerListType.convertToTypedObject(intArray);
assertNotNull(object);
assertTrue(object instanceof TypedList);
TypedList typedIntegerList = (TypedList)object;
assertEquals(intArray.length, typedIntegerList.size());
for (int i = 0; i < intArray.length; i++)
{
assertEquals(intArray[i], typedIntegerList.get(i));
}
}
}
|
package com.zto.testcase.response;
import com.zto.testcase.enums.ErrorCodeEnum;
import java.io.Serializable;
import lombok.Data;
@Data
public class Result<R> implements Serializable {
private String code;
private String msg;
private R data;
public static <R> Result<R> success(R data) {
Result<R> result = new Result<>();
result.setCode(ErrorCodeEnum.SYSTEM_SUCCESS.getErrorCode());
result.setMsg(ErrorCodeEnum.SYSTEM_SUCCESS.getErrorMsg());
result.setData(data);
return result;
}
public static <R> Result<R> success() {
Result<R> result = new Result<>();
result.setCode(ErrorCodeEnum.SYSTEM_SUCCESS.getErrorCode());
result.setMsg(ErrorCodeEnum.SYSTEM_SUCCESS.getErrorMsg());
return result;
}
public static <R> Result<R> successMsg(String msg) {
Result<R> result = new Result<>();
result.setCode(ErrorCodeEnum.SYSTEM_SUCCESS.getErrorCode());
result.setMsg(msg);
return result;
}
public static <R> Result<R> error(String code, String msg) {
Result<R> result = new Result<>();
result.setCode(code);
result.setMsg(msg);
return result;
}
public static <R> Result<R> error(ErrorCodeEnum errorCodeEnum) {
Result<R> result = new Result<>();
result.setCode(errorCodeEnum.getErrorCode());
result.setMsg(errorCodeEnum.getErrorMsg());
return result;
}
public static <R> Result<R> throwable(String code, Throwable throwable) {
Result<R> result = new Result<>();
result.setCode(code);
result.setMsg(throwable.getClass().getName() + ", " + throwable.getMessage());
return result;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.