text stringlengths 1 1.05M |
|---|
import re
def categorize_text(text):
"""Categorizes the given text into positive, negative or neutral."""
text = text.lower()
if re.search(r"\b(happy|glad|fun)\b", text):
return "positive"
elif re.search(r"\b(sad|angry|fearful)\b", text):
return "negative"
else:
return "neutral" |
<filename>src/test/java/com/chanus/yuntao/weixin/mp/api/test/DataCubeApiTest.java
/*
* Copyright (c) 2020 Chanus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.chanus.yuntao.weixin.mp.api.test;
import com.alibaba.fastjson.JSONObject;
import com.chanus.yuntao.weixin.mp.api.DataCubeApi;
import org.junit.Test;
/**
* DataCubeApi 测试
*
* @author Chanus
* @date 2020-06-08 21:10:11
* @since 1.0.0
*/
public class DataCubeApiTest extends WXConfigTest {
@Test
public void getUserSummaryTest() {
JSONObject jsonObject = DataCubeApi.getUserSummary("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUserCumulateTest() {
JSONObject jsonObject = DataCubeApi.getUserCumulate("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getArticleSummaryTest() {
JSONObject jsonObject = DataCubeApi.getArticleSummary("2020-06-07", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getArticleTotalTest() {
JSONObject jsonObject = DataCubeApi.getArticleTotal("2020-06-07", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUserReadTest() {
JSONObject jsonObject = DataCubeApi.getUserRead("2020-06-05", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUserReadHourTest() {
JSONObject jsonObject = DataCubeApi.getUserReadHour("2020-06-07", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUserShareTest() {
JSONObject jsonObject = DataCubeApi.getUserShare("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUserShareHourTest() {
JSONObject jsonObject = DataCubeApi.getUserShareHour("2020-06-07", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUpStreamMsgTest() {
JSONObject jsonObject = DataCubeApi.getUpStreamMsg("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUpStreamMsgHourTest() {
JSONObject jsonObject = DataCubeApi.getUpStreamMsgHour("2020-06-07", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUpStreamMsgWeekTest() {
JSONObject jsonObject = DataCubeApi.getUpStreamMsgWeek("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUpStreamMsgMonthTest() {
JSONObject jsonObject = DataCubeApi.getUpStreamMsgMonth("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUpStreamMsgDistTest() {
JSONObject jsonObject = DataCubeApi.getUpStreamMsgDist("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUpStreamMsgDistWeekTest() {
JSONObject jsonObject = DataCubeApi.getUpStreamMsgDistWeek("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getUpStreamMsgDistMonthTest() {
JSONObject jsonObject = DataCubeApi.getUpStreamMsgDistMonth("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void publisherAdPosGeneralTest() {
JSONObject jsonObject = DataCubeApi.publisherAdPosGeneral(1, 20, "2020-06-01", "2020-06-07", null);
System.out.println(jsonObject.toJSONString());
}
@Test
public void publisherCpsGeneralTest() {
JSONObject jsonObject = DataCubeApi.publisherCpsGeneral(1, 20, "2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void publisherSettlementTest() {
JSONObject jsonObject = DataCubeApi.publisherSettlement(1, 20, "2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getInterfaceSummaryTest() {
JSONObject jsonObject = DataCubeApi.getInterfaceSummary("2020-06-01", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
@Test
public void getInterfaceSummaryHourTest() {
JSONObject jsonObject = DataCubeApi.getInterfaceSummaryHour("2020-06-07", "2020-06-07");
System.out.println(jsonObject.toJSONString());
}
}
|
# Generated by Django 3.1.8 on 2021-06-07 17:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_business_rules', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='businessrulemodel',
name='creation_timestamp',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='businessrulemodel',
name='deleted_timestamp',
field=models.DateTimeField(blank=True, db_index=True, null=True),
),
migrations.AddField(
model_name='businessrulemodel',
name='updated_timestamp',
field=models.DateTimeField(blank=True, db_index=True, null=True),
),
]
|
<gh_stars>1-10
export default [
{
"id":"0",
"src":"https://img.alicdn.com/imgextra/i2/912530100/O1CN0151Um611CbqDwX0kGh_!!0-saturn_solar.jpg_468x468q75.jpg_.webp",
"con":"casio卡西欧透明手表女baby g冰川冰韧系列限量运动女表BA-110CR",
"price":"1290",
"nowPrice":"531",
"monthNum":"154",
"store":"Casio晶晶海外专卖店"
},
{
"id":"1",
"src":"https://img.alicdn.com/imgextra/i1/120868211/O1CN01heVbQO2AWglFDQRVX_!!0-saturn_solar.jpg_468x468q75.jpg_.webp",
"con":"小才天儿童电话手表智能GPS定位电信版多功能防水4g全网通咪咪兔男女孩小学生初中高中生可插卡通话手机手环",
"price":"399",
"nowPrice":"108",
"monthNum":"3.45万",
"store":"mimitoou咪咪兔旗舰店"
},
{
"id":"2",
"src":"https://img.alicdn.com/imgextra/i2/26187138/O1CN01l5RwHa22bFei1kOXi_!!0-saturn_solar.jpg_468x468q75.jpg_.webp",
"con":"时尚潮流帅气情侣表男表蒂玮娜davena镂空大表盘机械表女士手表女",
"price":"1588",
"nowPrice":"1201",
"monthNum":"19",
"store":"STAR BLINGBLING"
},
{
"id":"3",
"src":"https://img.alicdn.com/imgextra/i3/1169140053/O1CN013WMW0a1CGJgbYgcKM_!!0-saturn_solar.jpg_468x468q75.jpg_.webp",
"con":"【章若楠同款】Lola Rose小绿表 手表女专属定制皮带钢带礼盒套装",
"price":"1430",
"nowPrice":"1430",
"monthNum":"316",
"store":"LolaRose海外旗舰店"
},
{
"id":"4",
"src":"https://img.alicdn.com/imgextra/i3/738790163/O1CN01HZG4df1D4hFcxmZAD_!!2-saturn_solar.png_468x468q75.jpg_.webp",
"con":"【预售】Cartier卡地亚Ballon Bleu蓝气球系列精钢手表 七夕挚礼",
"price":"35600",
"nowPrice":"35600",
"monthNum":"54",
"store":"Cartier卡地亚官方旗舰店"
},
{
"id":"5",
"src":"https://img.alicdn.com/imgextra/i1/108169676/O1CN01vNq4gP2LLewoTw0Tl_!!0-saturn_solar.jpg_468x468q75.jpg_.webp",
"con":"瑞士冠琴手表男士全自动机械表镂空方形轻奢潮流夜光腕表真皮男表",
"price":"2980",
"nowPrice":"968",
"monthNum":"111",
"store":"冠琴雷诚专卖店"
},
] |
import { getWasmExport } from "../storage";
import { log } from "../utils/log";
const contractList: any[] = [];
export const getContract = async (moduleName: string, ptr: number, length: number) => {
const wasm_exports = getWasmExport(moduleName);
const buffer = wasm_exports.memory.buffer.slice(ptr, ptr + length);
log().info(buffer, ptr, length, "from get contract");
const { instance } = await WebAssembly.instantiate(buffer, {});
contractList.push(instance);
return contractList.length - 1;
};
export const runContract = (id: number, ptr: number, length: number) => {
log().info(id, ptr, length, "begin run contract");
const contract = contractList[id];
log().info(contract, "the contract got when run contract");
// todo: add env from ptr & length
const result = contract.exports.main();
return result;
};
|
#!/bin/sh
docker build -t ctaggart/golang-vscode . |
# Utility function for golang-using packages to setup a go toolchain.
termux_setup_golang() {
if [ "$TERMUX_ON_DEVICE_BUILD" = "false" ]; then
local TERMUX_GO_VERSION=go1.17.7
local TERMUX_GO_PLATFORM=linux-amd64
local TERMUX_BUILDGO_FOLDER
if [ "${TERMUX_PACKAGES_OFFLINE-false}" = "true" ]; then
TERMUX_BUILDGO_FOLDER=${TERMUX_SCRIPTDIR}/build-tools/${TERMUX_GO_VERSION}
else
TERMUX_BUILDGO_FOLDER=${TERMUX_COMMON_CACHEDIR}/${TERMUX_GO_VERSION}
fi
export GOROOT=$TERMUX_BUILDGO_FOLDER
export PATH=${GOROOT}/bin:${PATH}
if [ -d "$TERMUX_BUILDGO_FOLDER" ]; then return; fi
local TERMUX_BUILDGO_TAR=$TERMUX_COMMON_CACHEDIR/${TERMUX_GO_VERSION}.${TERMUX_GO_PLATFORM}.tar.gz
rm -Rf "$TERMUX_COMMON_CACHEDIR/go" "$TERMUX_BUILDGO_FOLDER"
termux_download https://golang.org/dl/${TERMUX_GO_VERSION}.${TERMUX_GO_PLATFORM}.tar.gz \
"$TERMUX_BUILDGO_TAR" \
02b111284bedbfa35a7e5b74a06082d18632eff824fd144312f6063943d49259
( cd "$TERMUX_COMMON_CACHEDIR"; tar xf "$TERMUX_BUILDGO_TAR"; mv go "$TERMUX_BUILDGO_FOLDER"; rm "$TERMUX_BUILDGO_TAR" )
else
if [[ "$TERMUX_MAIN_PACKAGE_FORMAT" = "debian" && "$(dpkg-query -W -f '${db:Status-Status}\n' golang 2>/dev/null)" != "installed" ]] ||
[[ "$TERMUX_MAIN_PACKAGE_FORMAT" = "pacman" && ! "$(pacman -Q golang 2>/dev/null)" ]]; then
echo "Package 'golang' is not installed."
echo "You can install it with"
echo
echo " pkg install golang"
echo
echo " pacman -S golang"
echo
echo "or build it from source with"
echo
echo " ./build-package.sh golang"
echo
exit 1
fi
export GOROOT="$TERMUX_PREFIX/lib/go"
fi
}
|
<filename>core/src/main/java/com/linecorp/armeria/client/Clients.java
/*
* Copyright 2015 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client;
import static java.util.Objects.requireNonNull;
import java.net.URI;
import java.util.function.Consumer;
import java.util.function.Function;
import javax.annotation.Nullable;
import com.linecorp.armeria.client.endpoint.EndpointGroup;
import com.linecorp.armeria.common.HttpHeaders;
import com.linecorp.armeria.common.HttpHeadersBuilder;
import com.linecorp.armeria.common.Request;
import com.linecorp.armeria.common.Scheme;
import com.linecorp.armeria.common.SerializationFormat;
import com.linecorp.armeria.common.SessionProtocol;
import com.linecorp.armeria.common.util.SafeCloseable;
import com.linecorp.armeria.common.util.Unwrappable;
/**
* Creates a new client that connects to a specified {@link URI}.
* If you are creating an {@link WebClient}, it is recommended to use the factory methods in
* {@link WebClient}.
*/
public final class Clients {
/**
* Creates a new client that connects to the specified {@code uri} using the default
* {@link ClientFactory}.
*
* @param uri the URI of the server endpoint
* @param clientType the type of the new client
*
* @throws IllegalArgumentException if the specified {@code uri} is invalid, or the specified
* {@code clientType} is unsupported for the {@code uri}'s scheme
*/
public static <T> T newClient(String uri, Class<T> clientType) {
return builder(uri).build(clientType);
}
/**
* Creates a new client that connects to the specified {@link URI} using the default
* {@link ClientFactory}.
*
* @param uri the {@link URI} of the server endpoint
* @param clientType the type of the new client
*
* @throws IllegalArgumentException if the specified {@link URI} is invalid, or the specified
* {@code clientType} is unsupported for the {@link URI}'s scheme
*/
public static <T> T newClient(URI uri, Class<T> clientType) {
return builder(uri).build(clientType);
}
/**
* Creates a new client that connects to the specified {@link EndpointGroup} with the specified
* {@code scheme} using the default {@link ClientFactory}.
*
* @param scheme the {@link Scheme} represented as a {@link String}
* @param endpointGroup the server {@link EndpointGroup}
* @param clientType the type of the new client
*
* @throws IllegalArgumentException if the specified {@code scheme} is invalid or
* the specified {@code clientType} is unsupported for
* the specified {@code scheme}.
*/
public static <T> T newClient(String scheme, EndpointGroup endpointGroup, Class<T> clientType) {
return builder(scheme, endpointGroup).build(clientType);
}
/**
* Creates a new client that connects to the specified {@link EndpointGroup} with the specified
* {@code scheme} and {@code path} using the default {@link ClientFactory}.
*
* @param scheme the {@link Scheme} represented as a {@link String}
* @param endpointGroup the server {@link EndpointGroup}
* @param path the path to the endpoint
* @param clientType the type of the new client
*
* @throws IllegalArgumentException if the specified {@code scheme} is invalid or
* the specified {@code clientType} is unsupported for
* the specified {@code scheme}.
*/
public static <T> T newClient(String scheme, EndpointGroup endpointGroup, String path,
Class<T> clientType) {
return builder(scheme, endpointGroup, path).build(clientType);
}
/**
* Creates a new client that connects to the specified {@link EndpointGroup} with the specified
* {@link Scheme} using the default {@link ClientFactory}.
*
* @param scheme the {@link Scheme}
* @param endpointGroup the server {@link EndpointGroup}
* @param clientType the type of the new client
*
* @throws IllegalArgumentException if the specified {@code clientType} is unsupported for
* the specified {@link Scheme}.
*/
public static <T> T newClient(Scheme scheme, EndpointGroup endpointGroup, Class<T> clientType) {
return builder(scheme, endpointGroup).build(clientType);
}
/**
* Creates a new client that connects to the specified {@link EndpointGroup} with the specified
* {@link Scheme} and {@code path} using the default {@link ClientFactory}.
*
* @param scheme the {@link Scheme}
* @param endpointGroup the server {@link EndpointGroup}
* @param path the path to the endpoint
* @param clientType the type of the new client
*
* @throws IllegalArgumentException if the specified {@code clientType} is unsupported for
* the specified {@link Scheme}.
*/
public static <T> T newClient(Scheme scheme, EndpointGroup endpointGroup, String path,
Class<T> clientType) {
return builder(scheme, endpointGroup, path).build(clientType);
}
/**
* Creates a new client that connects to the specified {@link EndpointGroup} with
* the specified {@link SessionProtocol} using the default {@link ClientFactory}.
*
* @param protocol the {@link SessionProtocol}
* @param endpointGroup the server {@link EndpointGroup}
* @param clientType the type of the new client
*
* @throws IllegalArgumentException if the specified {@code clientType} is unsupported for
* the specified {@link SessionProtocol} or
* {@link SerializationFormat} is required.
*/
public static <T> T newClient(SessionProtocol protocol, EndpointGroup endpointGroup, Class<T> clientType) {
return builder(protocol, endpointGroup).build(clientType);
}
/**
* Creates a new client that connects to the specified {@link EndpointGroup} with
* the specified {@link SessionProtocol} and {@code path} using the default {@link ClientFactory}.
*
* @param protocol the {@link SessionProtocol}
* @param endpointGroup the server {@link EndpointGroup}
* @param path the path to the endpoint
* @param clientType the type of the new client
*
* @throws IllegalArgumentException if the specified {@code clientType} is unsupported for
* the specified {@link SessionProtocol} or
* {@link SerializationFormat} is required.
*/
public static <T> T newClient(SessionProtocol protocol, EndpointGroup endpointGroup, String path,
Class<T> clientType) {
return builder(protocol, endpointGroup, path).build(clientType);
}
/**
* Returns a new {@link ClientBuilder} that builds the client that connects to the specified {@code uri}.
*
* @throws IllegalArgumentException if the specified {@code uri} is invalid, or the specified
* {@code clientType} is unsupported for the {@code uri}'s scheme
*/
public static ClientBuilder builder(String uri) {
return builder(URI.create(requireNonNull(uri, "uri")));
}
/**
* Returns a new {@link ClientBuilder} that builds the client that connects to the specified {@link URI}.
*
* @throws IllegalArgumentException if the specified {@link URI} is invalid, or the specified
* {@code clientType} is unsupported for the {@link URI}'s scheme
*/
public static ClientBuilder builder(URI uri) {
return new ClientBuilder(requireNonNull(uri, "uri"));
}
/**
* Returns a new {@link ClientBuilder} that builds the client that connects to the specified
* {@link EndpointGroup} with the specified {@code scheme}.
*
* @throws IllegalArgumentException if the {@code scheme} is invalid.
*/
public static ClientBuilder builder(String scheme, EndpointGroup endpointGroup) {
return builder(Scheme.parse(requireNonNull(scheme, "scheme")), endpointGroup);
}
/**
* Returns a new {@link ClientBuilder} that builds the client that connects to the specified
* {@link EndpointGroup} with the specified {@code scheme} and {@code path}.
*
* @throws IllegalArgumentException if the {@code scheme} is invalid.
*/
public static ClientBuilder builder(String scheme, EndpointGroup endpointGroup, String path) {
return builder(Scheme.parse(requireNonNull(scheme, "scheme")), endpointGroup, path);
}
/**
* Returns a new {@link ClientBuilder} that builds the client that connects to the specified
* {@link EndpointGroup} with the specified {@link SessionProtocol}.
*/
public static ClientBuilder builder(SessionProtocol protocol, EndpointGroup endpointGroup) {
return builder(Scheme.of(SerializationFormat.NONE, requireNonNull(protocol, "protocol")),
endpointGroup);
}
/**
* Returns a new {@link ClientBuilder} that builds the client that connects to the specified
* {@link EndpointGroup} with the specified {@link SessionProtocol} and {@code path}.
*/
public static ClientBuilder builder(SessionProtocol protocol, EndpointGroup endpointGroup,
String path) {
return builder(Scheme.of(SerializationFormat.NONE, requireNonNull(protocol, "protocol")),
endpointGroup, path);
}
/**
* Returns a new {@link ClientBuilder} that builds the client that connects to the specified
* {@link EndpointGroup} with the specified {@link Scheme}.
*/
public static ClientBuilder builder(Scheme scheme, EndpointGroup endpointGroup) {
requireNonNull(scheme, "scheme");
requireNonNull(endpointGroup, "endpointGroup");
return new ClientBuilder(scheme, endpointGroup, null);
}
/**
* Returns a new {@link ClientBuilder} that builds the client that connects to the specified
* {@link EndpointGroup} with the specified {@link Scheme} and {@code path}.
*/
public static ClientBuilder builder(Scheme scheme, EndpointGroup endpointGroup, String path) {
requireNonNull(scheme, "scheme");
requireNonNull(endpointGroup, "endpointGroup");
requireNonNull(path, "path");
return new ClientBuilder(scheme, endpointGroup, path);
}
/**
* Creates a new derived client that connects to the same {@link URI} with the specified {@code client}
* and the specified {@code additionalOptions}.
*
* @see ClientBuilder ClientBuilder, for more information about how the base options and
* additional options are merged when a derived client is created.
*/
public static <T> T newDerivedClient(T client, ClientOptionValue<?>... additionalOptions) {
final ClientBuilderParams params = builderParams(client);
final ClientBuilder builder = newDerivedBuilder(params);
builder.options(additionalOptions);
return newDerivedClient(builder, params.clientType());
}
/**
* Creates a new derived client that connects to the same {@link URI} with the specified {@code client}
* and the specified {@code additionalOptions}.
*
* @see ClientBuilder ClientBuilder, for more information about how the base options and
* additional options are merged when a derived client is created.
*/
public static <T> T newDerivedClient(T client, Iterable<ClientOptionValue<?>> additionalOptions) {
final ClientBuilderParams params = builderParams(client);
final ClientBuilder builder = newDerivedBuilder(params);
builder.options(additionalOptions);
return newDerivedClient(builder, params.clientType());
}
/**
* Creates a new derived client that connects to the same {@link URI} with the specified {@code client}
* but with different {@link ClientOption}s. For example:
*
* <pre>{@code
* WebClient derivedWebClient = Clients.newDerivedClient(webClient, options -> {
* ClientOptionsBuilder builder = options.toBuilder();
* builder.decorator(...); // Add a decorator.
* builder.addHttpHeader(...); // Add an HTTP header.
* return builder.build();
* });
* }</pre>
*
* @param configurator a {@link Function} whose input is the original {@link ClientOptions} of the client
* being derived from and whose output is the {@link ClientOptions} of the new derived
* client
*
* @see ClientBuilder ClientBuilder, for more information about how the base options and
* additional options are merged when a derived client is created.
* @see ClientOptionsBuilder
*/
public static <T> T newDerivedClient(
T client, Function<? super ClientOptions, ClientOptions> configurator) {
final ClientBuilderParams params = builderParams(client);
final ClientBuilder builder = builder(params.uri());
builder.options(configurator.apply(params.options()));
return newDerivedClient(builder, params.clientType());
}
@SuppressWarnings("unchecked")
private static <T> T newDerivedClient(ClientBuilder builder, Class<?> clientType) {
return builder.build((Class<T>) clientType);
}
private static ClientBuilder newDerivedBuilder(ClientBuilderParams params) {
final ClientBuilder builder = builder(params.scheme(), params.endpointGroup(),
params.absolutePathRef());
builder.options(params.options());
return builder;
}
private static ClientBuilderParams builderParams(Object client) {
requireNonNull(client, "client");
final ClientBuilderParams params = ClientFactory.ofDefault().clientBuilderParams(client);
if (params == null) {
throw new IllegalArgumentException("derivation not supported by: " + client.getClass().getName());
}
return params;
}
/**
* Unwraps the specified client into the object of the specified {@code type}.
* Use this method instead of an explicit downcast. For example:
* <pre>{@code
* WebClient client = WebClient.builder(...)
* .decorator(LoggingClient.newDecorator())
* .build();
*
* LoggingClient unwrapped = Clients.unwrap(client, LoggingClient.class);
*
* // If the client implements Unwrappable, you can just use the 'as()' method.
* LoggingClient unwrapped2 = client.as(LoggingClient.class);
* }</pre>
*
* @param type the type of the object to return
* @return the object of the specified {@code type} if found, or {@code null} if not found.
*
* @see Client#as(Class)
* @see ClientFactory#unwrap(Object, Class)
* @see Unwrappable
*/
@Nullable
public static <T> T unwrap(Object client, Class<T> type) {
final ClientBuilderParams params = ClientFactory.ofDefault().clientBuilderParams(client);
if (params == null) {
return null;
}
return params.options().factory().unwrap(client, type);
}
/**
* Sets the specified HTTP header in a thread-local variable so that the header is sent by the client call
* made from the current thread. Use the {@code try-with-resources} block with the returned
* {@link SafeCloseable} to unset the thread-local variable automatically:
* <pre>{@code
* import static com.linecorp.armeria.common.HttpHeaderNames.AUTHORIZATION;
*
* try (SafeCloseable ignored = withHttpHeader(AUTHORIZATION, myCredential)) {
* client.executeSomething(..);
* }
* }</pre>
* You can also nest the header manipulation:
* <pre>{@code
* import static com.linecorp.armeria.common.HttpHeaderNames.AUTHORIZATION;
* import static com.linecorp.armeria.common.HttpHeaderNames.USER_AGENT;
*
* try (SafeCloseable ignored = withHttpHeader(USER_AGENT, myAgent)) {
* for (String secret : secrets) {
* try (SafeCloseable ignored2 = withHttpHeader(AUTHORIZATION, secret)) {
* // Both USER_AGENT and AUTHORIZATION will be set.
* client.executeSomething(..);
* }
* }
* }
* }</pre>
*
* @see #withHttpHeaders(Consumer)
*/
public static SafeCloseable withHttpHeader(CharSequence name, String value) {
requireNonNull(name, "name");
requireNonNull(value, "value");
return withHttpHeaders(headersBuilder -> {
headersBuilder.set(name, value);
});
}
/**
* Sets the specified HTTP header in a thread-local variable so that the header is sent by the client call
* made from the current thread. Use the {@code try-with-resources} block with the returned
* {@link SafeCloseable} to unset the thread-local variable automatically:
* <pre>{@code
* import static com.linecorp.armeria.common.HttpHeaderNames.CONTENT_TYPE;
* import static com.linecorp.armeria.common.MediaType.JSON_UTF_8;
*
* try (SafeCloseable ignored = withHttpHeader(CONTENT_TYPE, JSON_UTF_8)) {
* client.executeSomething(..);
* }
* }</pre>
* You can also nest the header manipulation:
* <pre>{@code
* import static com.linecorp.armeria.common.HttpHeaderNames.AUTHORIZATION;
* import static com.linecorp.armeria.common.HttpHeaderNames.CONTENT_TYPE;
* import static com.linecorp.armeria.common.MediaType.JSON_UTF_8;
*
* try (SafeCloseable ignored = withHttpHeader(CONTENT_TYPE, JSON_UTF_8)) {
* for (String secret : secrets) {
* try (SafeCloseable ignored2 = withHttpHeader(AUTHORIZATION, secret)) {
* // Both CONTENT_TYPE and AUTHORIZATION will be set.
* client.executeSomething(..);
* }
* }
* }
* }</pre>
*
* @see #withHttpHeaders(Consumer)
*/
public static SafeCloseable withHttpHeader(CharSequence name, Object value) {
requireNonNull(name, "name");
requireNonNull(value, "value");
return withHttpHeaders(headersBuilder -> {
headersBuilder.setObject(name, value);
});
}
/**
* Sets the specified HTTP header manipulating function in a thread-local variable so that the manipulated
* headers are sent by the client call made from the current thread. Use the {@code try-with-resources}
* block with the returned {@link SafeCloseable} to unset the thread-local variable automatically:
* <pre>{@code
* import static com.linecorp.armeria.common.HttpHeaderNames.AUTHORIZATION;
* import static com.linecorp.armeria.common.HttpHeaderNames.USER_AGENT;
*
* try (SafeCloseable ignored = withHttpHeaders(headers -> {
* return headers.toBuilder()
* .set(HttpHeaders.AUTHORIZATION, myCredential)
* .set(HttpHeaders.USER_AGENT, myAgent)
* .build();
* })) {
* client.executeSomething(..);
* }
* }</pre>
* You can also nest the header manipulation:
* <pre>{@code
* import static com.linecorp.armeria.common.HttpHeaderNames.AUTHORIZATION;
* import static com.linecorp.armeria.common.HttpHeaderNames.USER_AGENT;
*
* try (SafeCloseable ignored = withHttpHeaders(h -> {
* return h.toBuilder()
* .set(USER_AGENT, myAgent)
* .build();
* })) {
* for (String secret : secrets) {
* try (SafeCloseable ignored2 = withHttpHeaders(h -> {
* return h.toBuilder()
* .set(AUTHORIZATION, secret)
* .build();
* })) {
* // Both USER_AGENT and AUTHORIZATION will be set.
* client.executeSomething(..);
* }
* }
* }
* }</pre>
*
* @see #withHttpHeaders(Consumer)
*
* @deprecated Use {@link #withHttpHeaders(Consumer)}.
*/
@Deprecated
public static SafeCloseable withHttpHeaders(
Function<? super HttpHeaders, ? extends HttpHeaders> headerManipulator) {
requireNonNull(headerManipulator, "headerManipulator");
return withContextCustomizer(ctx -> {
final HttpHeaders manipulatedHeaders = headerManipulator.apply(ctx.additionalRequestHeaders());
ctx.mutateAdditionalRequestHeaders(mutator -> mutator.add(manipulatedHeaders));
});
}
/**
* Sets the specified {@link Consumer}, which mutates HTTP headers, in a thread-local variable so that the
* mutated headers are sent by the client call made from the current thread.
* Use the {@code try-with-resources} block with the returned {@link SafeCloseable} to unset the
* thread-local variable automatically:
* <pre>{@code
* import static com.linecorp.armeria.common.HttpHeaderNames.AUTHORIZATION;
* import static com.linecorp.armeria.common.HttpHeaderNames.USER_AGENT;
*
* try (SafeCloseable ignored = withHttpHeaders(builder -> {
* builder.set(HttpHeaders.AUTHORIZATION, myCredential)
* .set(HttpHeaders.USER_AGENT, myAgent);
* })) {
* client.executeSomething(..);
* }
* }</pre>
* You can also nest the header mutation:
* <pre>{@code
* import static com.linecorp.armeria.common.HttpHeaderNames.AUTHORIZATION;
* import static com.linecorp.armeria.common.HttpHeaderNames.USER_AGENT;
*
* try (SafeCloseable ignored = withHttpHeaders(builder -> {
* builder.set(USER_AGENT, myAgent);
* })) {
* for (String secret : secrets) {
* try (SafeCloseable ignored2 = withHttpHeaders(builder -> {
* builder.set(AUTHORIZATION, secret);
* })) {
* // Both USER_AGENT and AUTHORIZATION will be set.
* client.executeSomething(..);
* }
* }
* }
* }</pre>
*
* @see #withHttpHeader(CharSequence, String)
*/
public static SafeCloseable withHttpHeaders(Consumer<HttpHeadersBuilder> headerMutator) {
requireNonNull(headerMutator, "headerMutator");
return withContextCustomizer(ctx -> {
ctx.mutateAdditionalRequestHeaders(headerMutator);
});
}
/**
* Sets the specified {@link ClientRequestContext} customization function in a thread-local variable so that
* the customized context is used when the client invokes a request from the current thread. Use the
* {@code try-with-resources} block with the returned {@link SafeCloseable} to unset the thread-local
* variable automatically:
* <pre>{@code
* try (SafeCloseable ignored = withContextCustomizer(ctx -> {
* ctx.setAttr(USER_ID, userId);
* ctx.setAttr(USER_SECRET, secret);
* })) {
* client.executeSomething(..);
* }
* }</pre>
* You can also nest the request context customization:
* <pre>{@code
* try (SafeCloseable ignored = withContextCustomizer(ctx -> ctx.setAttr(USER_ID, userId))) {
* String secret = client.getSecret();
* try (SafeCloseable ignored2 = withContextCustomizer(ctx -> ctx.setAttr(USER_SECRET, secret))) {
* // Both USER_ID and USER_SECRET will be set.
* client.executeSomething(..);
* }
* }
* }</pre>
* Note that certain properties of {@link ClientRequestContext}, such as:
* <ul>
* <li>{@link ClientRequestContext#endpoint()}</li>
* <li>{@link ClientRequestContext#localAddress()}</li>
* <li>{@link ClientRequestContext#remoteAddress()}</li>
* </ul>
* may be {@code null} while the customizer function runs, because the target host of the {@link Request}
* is not determined yet.
*
* @see #withHttpHeaders(Consumer)
*/
public static SafeCloseable withContextCustomizer(
Consumer<? super ClientRequestContext> contextCustomizer) {
requireNonNull(contextCustomizer, "contextCustomizer");
final ClientThreadLocalState customizers = ClientThreadLocalState.maybeCreate();
customizers.add(contextCustomizer);
return new SafeCloseable() {
boolean closed;
@Override
public void close() {
if (closed) {
return;
}
closed = true;
customizers.remove(contextCustomizer);
}
};
}
/**
* Prepare to capture the {@link ClientRequestContext} of the next request sent from the current thread.
* Use the {@code try-with-resources} block with the returned {@link ClientRequestContextCaptor}
* to retrieve the captured {@link ClientRequestContext} and to unset the thread-local variable
* automatically.
* <pre>{@code
* try (ClientRequestContextCaptor captor = Clients.newContextCaptor()) {
* WebClient.of().get("https://www.example.com/hello");
* ClientRequestContext ctx = captor.get();
* assert ctx.path().equals("/hello");
* }}</pre>
* Note that you can also capture more than one {@link ClientRequestContext}:
* <pre>{@code
* try (ClientRequestContextCaptor captor = Clients.newContextCaptor()) {
* WebClient.of().get("https://www.example.com/foo");
* WebClient.of().get("https://www.example.com/bar");
* List<ClientRequestContext> contexts = captor.getAll();
* assert contexts.get(0).path().equals("/foo");
* assert contexts.get(1).path().equals("/bar");
* }}</pre>
*/
public static ClientRequestContextCaptor newContextCaptor() {
return ClientThreadLocalState.maybeCreate().newContextCaptor();
}
/**
* Returns {@code true} if the specified {@code uri} is an undefined {@link URI}, which signifies that
* a {@link Client}, was created without a {@link URI} or {@link EndpointGroup}. For example,
* {@code isUndefinedUri(WebClient.of().uri())} will return {@code true}.
*/
public static boolean isUndefinedUri(URI uri) {
return uri == WebClientBuilder.UNDEFINED_URI;
}
private Clients() {}
}
|
package test.base.core;
import java.util.Properties;
import org.junit.Before;
import org.junit.Test;
import com.wpisen.trace.agent.bootstrap.TraceSessionInfo;
import com.wpisen.trace.agent.common.util.Assert;
import com.wpisen.trace.agent.core.AgentFinal;
import com.wpisen.trace.agent.core.DefaultApplication;
/**
* Created by wpisen on 16/11/2.
*/
public class DefaultApplicationTest implements AgentFinal {
private TraceSessionInfo session;
@Before
public void setUp() throws Exception {
session = new TraceSessionInfo();
}
@Test
public void testInit() throws Exception {
DefaultApplication boot = new DefaultApplication();
Properties pro = new Properties();
pro.setProperty(OPEN, "true");
boot.init(session, pro, null, null);
}
@Test
public void testDevcollectTest() throws Exception {
DefaultApplication boot = new DefaultApplication();
Properties pro = new Properties();
pro.setProperty(OPEN, "true");
String[] collects = { "F:\\git\\trace-agent\\agent-collect-servlet\\target\\classes\\",
"F:\\git\\trace-agent\\agent-collects\\target\\classes\\" };
boot.init(session, pro, collects, null);
byte[] result = boot.transform(getClass().getClassLoader(), "com.mysql.jdbc.NonRegisteringDriver", null, null, null);
Assert.notNull(result);
}
@Test
public void testTransform() throws Exception {
DefaultApplication boot = new DefaultApplication();
Properties pro = new Properties();
pro.setProperty(OPEN, "true");
boot.init(session, pro, null, null);
boot.transform(getClass().getClassLoader(), "com.mysql.jdbc.NonRegisteringDriver", null, null, null);
}
public static void main(String[] args) {
DefaultApplicationTest.class.getClassLoader().getResource("com/mysql/jdbc/Driver.class");
}
} |
from anonboard.jsonapi_test_case import JSONAPITestCase
from core import factories
class TopicAPITests(JSONAPITestCase):
def setUp(self):
super(TopicAPITests, self).setUp()
self.topics = factories.TopicFactory.create_batch(10)
def tearDown(self):
super(TopicAPITests, self).tearDown()
for topic in self.topics:
topic.delete()
def test_get_topic_list(self):
response = self.client.get('/api/v1/topics/')
result = self.result(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(result['data']), len(self.topics))
def test_get_topic(self):
response = self.client.get('/api/v1/topics/%i' % self.topics[0].id)
result = self.result(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
result['data']['attributes']['name'],
self.topics[0].name
)
self.assertEqual(
result['data']['attributes']['identifier'],
self.topics[0].identifier
)
self.assertEqual(
result['data']['attributes']['description'],
self.topics[0].description
)
def test_get_topic_thread_last_day(self):
threads = factories.ThreadFactory.create_batch(
size=10,
topic=self.topics[0]
)
response = self.client.get('/api/v1/topics/%i' % self.topics[0].id)
result = self.result(response)
self.assertEqual(
result['data']['attributes']['threads-last-day'],
len(threads)
)
class ThreadAPITests(JSONAPITestCase):
def setUp(self):
super(ThreadAPITests, self).setUp()
self.threads = factories.ThreadFactory.create_batch(10)
def tearDown(self):
super(ThreadAPITests, self).tearDown()
for thread in self.threads:
thread.delete()
def test_get_thread_list(self):
response = self.client.get('/api/v1/threads/')
result = self.result(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(result['data']), len(self.threads))
def test_get_thread(self):
response = self.client.get('/api/v1/threads/%i' % self.threads[0].id)
result = self.result(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
result['data']['attributes']['title'],
self.threads[0].title
)
self.assertEqual(
result['data']['attributes']['content'],
self.threads[0].content
)
def test_post_thread(self):
topic = factories.TopicFactory.create()
data = {
'data': {
'type': 'threads',
'attributes': {
'title': 'Foo',
'content': 'Bar'
},
'relationships': {
'user': {
'data': None
},
'topic': {
'data': {
'type': 'topics',
'id': topic.id
}
}
}
}
}
response = self.client.post('/api/v1/threads/', data)
result = self.result(response)
self.assertEqual(response.status_code, 201)
self.assertEqual(
result['data']['attributes']['title'],
data['data']['attributes']['title']
)
self.assertEqual(
result['data']['attributes']['content'],
data['data']['attributes']['content']
)
class CommentAPITests(JSONAPITestCase):
def setUp(self):
super(CommentAPITests, self).setUp()
self.comments = factories.CommentFactory.create_batch(10)
def tearDown(self):
super(CommentAPITests, self).tearDown()
for comment in self.comments:
comment.delete()
def test_get_comment_list(self):
response = self.client.get('/api/v1/comments/')
result = self.result(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(result['data']), len(self.comments))
def test_get_comment(self):
response = self.client.get('/api/v1/comments/%i' % self.comments[0].id)
result = self.result(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
result['data']['attributes']['content'],
self.comments[0].content
)
def test_comment_thread(self):
thread = factories.ThreadFactory.create()
data = {
'data': {
'type': 'comments',
'attributes': {
'content': 'Bar'
},
'relationships': {
'user': {
'data': None
},
'thread': {
'data': {
'type': 'threads',
'id': thread.id
}
}
}
}
}
response = self.client.post('/api/v1/comments/', data)
result = self.result(response)
self.assertEqual(response.status_code, 201)
self.assertEqual(
result['data']['attributes']['content'],
data['data']['attributes']['content']
)
|
<gh_stars>0
# !/usr/bin/python
# -*- coding:utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import lda
import lda.datasets
from pprint import pprint
if __name__ == "__main__":
# document-term matrix
X = lda.datasets.load_reuters()
print("type(X): {}".format(type(X)))
print("shape: {}\n".format(X.shape))
print(X[:10, :10])
# the vocab
vocab = lda.datasets.load_reuters_vocab()
print("type(vocab): {}".format(type(vocab)))
print("len(vocab): {}\n".format(len(vocab)))
print(vocab[:10])
# titles for each story
titles = lda.datasets.load_reuters_titles()
print("type(titles): {}".format(type(titles)))
print("len(titles): {}\n".format(len(titles)))
pprint(titles[:10])
print('LDA start ----')
topic_num = 20
model = lda.LDA(n_topics=topic_num, n_iter=500, random_state=1)
model.fit(X)
# topic-word
topic_word = model.topic_word_
print("type(topic_word): {}".format(type(topic_word)))
print("shape: {}".format(topic_word.shape))
print(vocab[:5])
print(topic_word[:, :5])
# Print Topic distribution
n = 7
for i, topic_dist in enumerate(topic_word):
topic_words = np.array(vocab)[np.argsort(topic_dist)][:-(n + 1):-1]
print('*Topic {}\n- {}'.format(i, ' '.join(topic_words)))
# Document - topic
doc_topic = model.doc_topic_
print("type(doc_topic): {}".format(type(doc_topic)))
print("shape: {}".format(doc_topic.shape))
for i in range(10):
topic_most_pr = doc_topic[i].argmax()
print(u"文档: {} 主题: {} value: {}".format(i, topic_most_pr, doc_topic[i][topic_most_pr]))
mpl.rcParams['font.sans-serif'] = [u'SimHei']
mpl.rcParams['axes.unicode_minus'] = False
# Topic - word
plt.figure(figsize=(8, 9))
# f, ax = plt.subplots(5, 1, sharex=True)
for i, k in enumerate([0, 5, 9, 14, 19]):
ax = plt.subplot(5, 1, i + 1)
ax.plot(topic_word[k, :], 'r-')
ax.set_xlim(-50, 4350) # [0,4258]
ax.set_ylim(0, 0.08)
ax.set_ylabel(u"概率")
ax.set_title(u"主题 {}".format(k))
plt.xlabel(u"词", fontsize=14)
plt.tight_layout()
plt.suptitle(u'主题的词分布', fontsize=18)
plt.subplots_adjust(top=0.9)
plt.show()
# Document - Topic
plt.figure(figsize=(8, 9))
# f, ax= plt.subplots(5, 1, figsize=(8, 6), sharex=True)
for i, k in enumerate([1, 3, 4, 8, 9]):
ax = plt.subplot(5, 1, i + 1)
ax.stem(doc_topic[k, :], linefmt='g-', markerfmt='ro')
ax.set_xlim(-1, topic_num + 1)
ax.set_ylim(0, 1)
ax.set_ylabel(u"概率")
ax.set_title(u"文档 {}".format(k))
plt.xlabel(u"主题", fontsize=14)
plt.suptitle(u'文档的主题分布', fontsize=18)
plt.tight_layout()
plt.subplots_adjust(top=0.9)
plt.show()
|
package terminal
import (
"image/color"
)
var (
// Maps 256 color terminal codes to color.RGBA
ColorMap = map[ColorCode]color.RGBA{
16: color.RGBA{0, 0, 0, 255},
17: color.RGBA{0, 0, 95, 255},
18: color.RGBA{0, 0, 135, 255},
19: color.RGBA{0, 0, 175, 255},
20: color.RGBA{0, 0, 215, 255},
21: color.RGBA{0, 0, 255, 255},
22: color.RGBA{0, 95, 0, 255},
23: color.RGBA{0, 95, 95, 255},
24: color.RGBA{0, 95, 135, 255},
25: color.RGBA{0, 95, 175, 255},
26: color.RGBA{0, 95, 215, 255},
27: color.RGBA{0, 95, 255, 255},
28: color.RGBA{0, 135, 0, 255},
29: color.RGBA{0, 135, 95, 255},
30: color.RGBA{0, 135, 135, 255},
31: color.RGBA{0, 135, 175, 255},
32: color.RGBA{0, 135, 215, 255},
33: color.RGBA{0, 135, 255, 255},
34: color.RGBA{0, 175, 0, 255},
35: color.RGBA{0, 175, 95, 255},
36: color.RGBA{0, 175, 135, 255},
37: color.RGBA{0, 175, 175, 255},
38: color.RGBA{0, 175, 215, 255},
39: color.RGBA{0, 175, 255, 255},
40: color.RGBA{0, 215, 0, 255},
41: color.RGBA{0, 215, 95, 255},
42: color.RGBA{0, 215, 135, 255},
43: color.RGBA{0, 215, 175, 255},
44: color.RGBA{0, 215, 215, 255},
45: color.RGBA{0, 215, 255, 255},
46: color.RGBA{0, 255, 0, 255},
47: color.RGBA{0, 255, 95, 255},
48: color.RGBA{0, 255, 135, 255},
49: color.RGBA{0, 255, 175, 255},
50: color.RGBA{0, 255, 215, 255},
51: color.RGBA{0, 255, 255, 255},
52: color.RGBA{95, 0, 0, 255},
53: color.RGBA{95, 0, 95, 255},
54: color.RGBA{95, 0, 135, 255},
55: color.RGBA{95, 0, 175, 255},
56: color.RGBA{95, 0, 215, 255},
57: color.RGBA{95, 0, 255, 255},
58: color.RGBA{95, 95, 0, 255},
59: color.RGBA{95, 95, 95, 255},
60: color.RGBA{95, 95, 135, 255},
61: color.RGBA{95, 95, 175, 255},
62: color.RGBA{95, 95, 215, 255},
63: color.RGBA{95, 95, 255, 255},
64: color.RGBA{95, 135, 0, 255},
65: color.RGBA{95, 135, 95, 255},
66: color.RGBA{95, 135, 135, 255},
67: color.RGBA{95, 135, 175, 255},
68: color.RGBA{95, 135, 215, 255},
69: color.RGBA{95, 135, 255, 255},
70: color.RGBA{95, 175, 0, 255},
71: color.RGBA{95, 175, 95, 255},
72: color.RGBA{95, 175, 135, 255},
73: color.RGBA{95, 175, 175, 255},
74: color.RGBA{95, 175, 215, 255},
75: color.RGBA{95, 175, 255, 255},
76: color.RGBA{95, 215, 0, 255},
77: color.RGBA{95, 215, 95, 255},
78: color.RGBA{95, 215, 135, 255},
79: color.RGBA{95, 215, 175, 255},
80: color.RGBA{95, 215, 215, 255},
81: color.RGBA{95, 215, 255, 255},
82: color.RGBA{95, 255, 0, 255},
83: color.RGBA{95, 255, 95, 255},
84: color.RGBA{95, 255, 135, 255},
85: color.RGBA{95, 255, 175, 255},
86: color.RGBA{95, 255, 215, 255},
87: color.RGBA{95, 255, 255, 255},
88: color.RGBA{135, 0, 0, 255},
89: color.RGBA{135, 0, 95, 255},
90: color.RGBA{135, 0, 135, 255},
91: color.RGBA{135, 0, 175, 255},
92: color.RGBA{135, 0, 215, 255},
93: color.RGBA{135, 0, 255, 255},
94: color.RGBA{135, 95, 0, 255},
95: color.RGBA{135, 95, 95, 255},
96: color.RGBA{135, 95, 135, 255},
97: color.RGBA{135, 95, 175, 255},
98: color.RGBA{135, 95, 215, 255},
99: color.RGBA{135, 95, 255, 255},
100: color.RGBA{135, 135, 0, 255},
101: color.RGBA{135, 135, 95, 255},
102: color.RGBA{135, 135, 135, 255},
103: color.RGBA{135, 135, 175, 255},
104: color.RGBA{135, 135, 215, 255},
105: color.RGBA{135, 135, 255, 255},
106: color.RGBA{135, 175, 0, 255},
107: color.RGBA{135, 175, 95, 255},
108: color.RGBA{135, 175, 135, 255},
109: color.RGBA{135, 175, 175, 255},
110: color.RGBA{135, 175, 215, 255},
111: color.RGBA{135, 175, 255, 255},
112: color.RGBA{135, 215, 0, 255},
113: color.RGBA{135, 215, 95, 255},
114: color.RGBA{135, 215, 135, 255},
115: color.RGBA{135, 215, 175, 255},
116: color.RGBA{135, 215, 215, 255},
117: color.RGBA{135, 215, 255, 255},
118: color.RGBA{135, 255, 0, 255},
119: color.RGBA{135, 255, 95, 255},
120: color.RGBA{135, 255, 135, 255},
121: color.RGBA{135, 255, 175, 255},
122: color.RGBA{135, 255, 215, 255},
123: color.RGBA{135, 255, 255, 255},
124: color.RGBA{175, 0, 0, 255},
125: color.RGBA{175, 0, 95, 255},
126: color.RGBA{175, 0, 135, 255},
127: color.RGBA{175, 0, 175, 255},
128: color.RGBA{175, 0, 215, 255},
129: color.RGBA{175, 0, 255, 255},
130: color.RGBA{175, 95, 0, 255},
131: color.RGBA{175, 95, 95, 255},
132: color.RGBA{175, 95, 135, 255},
133: color.RGBA{175, 95, 175, 255},
134: color.RGBA{175, 95, 215, 255},
135: color.RGBA{175, 95, 255, 255},
136: color.RGBA{175, 135, 0, 255},
137: color.RGBA{175, 135, 95, 255},
138: color.RGBA{175, 135, 135, 255},
139: color.RGBA{175, 135, 175, 255},
140: color.RGBA{175, 135, 215, 255},
141: color.RGBA{175, 135, 255, 255},
142: color.RGBA{175, 175, 0, 255},
143: color.RGBA{175, 175, 95, 255},
144: color.RGBA{175, 175, 135, 255},
145: color.RGBA{175, 175, 175, 255},
146: color.RGBA{175, 175, 215, 255},
147: color.RGBA{175, 175, 255, 255},
148: color.RGBA{175, 215, 0, 255},
149: color.RGBA{175, 215, 95, 255},
150: color.RGBA{175, 215, 135, 255},
151: color.RGBA{175, 215, 175, 255},
152: color.RGBA{175, 215, 215, 255},
153: color.RGBA{175, 215, 255, 255},
154: color.RGBA{175, 255, 0, 255},
155: color.RGBA{175, 255, 95, 255},
156: color.RGBA{175, 255, 135, 255},
157: color.RGBA{175, 255, 175, 255},
158: color.RGBA{175, 255, 215, 255},
159: color.RGBA{175, 255, 255, 255},
160: color.RGBA{215, 0, 0, 255},
161: color.RGBA{215, 0, 95, 255},
162: color.RGBA{215, 0, 135, 255},
163: color.RGBA{215, 0, 175, 255},
164: color.RGBA{215, 0, 215, 255},
165: color.RGBA{215, 0, 255, 255},
166: color.RGBA{215, 95, 0, 255},
167: color.RGBA{215, 95, 95, 255},
168: color.RGBA{215, 95, 135, 255},
169: color.RGBA{215, 95, 175, 255},
170: color.RGBA{215, 95, 215, 255},
171: color.RGBA{215, 95, 255, 255},
172: color.RGBA{215, 135, 0, 255},
173: color.RGBA{215, 135, 95, 255},
174: color.RGBA{215, 135, 135, 255},
175: color.RGBA{215, 135, 175, 255},
176: color.RGBA{215, 135, 215, 255},
177: color.RGBA{215, 135, 255, 255},
178: color.RGBA{215, 175, 0, 255},
179: color.RGBA{215, 175, 95, 255},
180: color.RGBA{215, 175, 135, 255},
181: color.RGBA{215, 175, 175, 255},
182: color.RGBA{215, 175, 215, 255},
183: color.RGBA{215, 175, 255, 255},
184: color.RGBA{215, 215, 0, 255},
185: color.RGBA{215, 215, 95, 255},
186: color.RGBA{215, 215, 135, 255},
187: color.RGBA{215, 215, 175, 255},
188: color.RGBA{215, 215, 215, 255},
189: color.RGBA{215, 215, 255, 255},
190: color.RGBA{215, 255, 0, 255},
191: color.RGBA{215, 255, 95, 255},
192: color.RGBA{215, 255, 135, 255},
193: color.RGBA{215, 255, 175, 255},
194: color.RGBA{215, 255, 215, 255},
195: color.RGBA{215, 255, 255, 255},
196: color.RGBA{255, 0, 0, 255},
197: color.RGBA{255, 0, 95, 255},
198: color.RGBA{255, 0, 135, 255},
199: color.RGBA{255, 0, 175, 255},
200: color.RGBA{255, 0, 215, 255},
201: color.RGBA{255, 0, 255, 255},
202: color.RGBA{255, 95, 0, 255},
203: color.RGBA{255, 95, 95, 255},
204: color.RGBA{255, 95, 135, 255},
205: color.RGBA{255, 95, 175, 255},
206: color.RGBA{255, 95, 215, 255},
207: color.RGBA{255, 95, 255, 255},
208: color.RGBA{255, 135, 0, 255},
209: color.RGBA{255, 135, 95, 255},
210: color.RGBA{255, 135, 135, 255},
211: color.RGBA{255, 135, 175, 255},
212: color.RGBA{255, 135, 215, 255},
213: color.RGBA{255, 135, 255, 255},
214: color.RGBA{255, 175, 0, 255},
215: color.RGBA{255, 175, 95, 255},
216: color.RGBA{255, 175, 135, 255},
217: color.RGBA{255, 175, 175, 255},
218: color.RGBA{255, 175, 215, 255},
219: color.RGBA{255, 175, 255, 255},
220: color.RGBA{255, 215, 0, 255},
221: color.RGBA{255, 215, 95, 255},
222: color.RGBA{255, 215, 135, 255},
223: color.RGBA{255, 215, 175, 255},
224: color.RGBA{255, 215, 215, 255},
225: color.RGBA{255, 215, 255, 255},
226: color.RGBA{255, 255, 0, 255},
227: color.RGBA{255, 255, 95, 255},
228: color.RGBA{255, 255, 135, 255},
229: color.RGBA{255, 255, 175, 255},
230: color.RGBA{255, 255, 215, 255},
231: color.RGBA{255, 255, 255, 255},
// Gray scale ramp
232: color.RGBA{8, 8, 8, 255},
233: color.RGBA{18, 18, 18, 255},
234: color.RGBA{28, 28, 28, 255},
235: color.RGBA{38, 38, 38, 255},
236: color.RGBA{48, 48, 48, 255},
237: color.RGBA{58, 58, 58, 255},
238: color.RGBA{68, 68, 68, 255},
239: color.RGBA{78, 78, 78, 255},
240: color.RGBA{88, 88, 88, 255},
241: color.RGBA{98, 98, 98, 255},
242: color.RGBA{108, 108, 108, 255},
243: color.RGBA{118, 118, 118, 255},
244: color.RGBA{128, 128, 128, 255},
245: color.RGBA{138, 138, 138, 255},
246: color.RGBA{148, 148, 148, 255},
247: color.RGBA{158, 158, 158, 255},
248: color.RGBA{168, 168, 168, 255},
249: color.RGBA{178, 178, 178, 255},
250: color.RGBA{188, 188, 188, 255},
251: color.RGBA{198, 198, 198, 255},
252: color.RGBA{208, 208, 208, 255},
253: color.RGBA{218, 218, 218, 255},
254: color.RGBA{228, 228, 228, 255},
255: color.RGBA{238, 238, 238, 255},
}
TermPalette, ColorLookup = createTermPalette()
)
// Creates a color.Palette for 256 color terminals and a corresponding
// ColorCode lookup slice
func createTermPalette() (color.Palette, []ColorCode) {
length := len(ColorMap)
palette := make(color.Palette, length, length)
lookup := make([]ColorCode, length, length)
i := 0
for k, v := range ColorMap {
palette[i] = v
lookup[i] = k
i++
}
return palette, lookup
}
|
/*
* PMMG - Polícia Militar do Estado de Minas Gerais.
* DTS - Diretoria de Tecnologia e Sistemas.
* CTS - Centro de Tecnologia em Sistemas.
*
* Copyright (c) DTS/CTS.
*
* Este é um software proprietário; não é permitida a distribuição total ou parcial deste código sem a autorização da DTS ou do CTS.
* Se você recebeu uma cópia, informe-nos através dos contatos abaixo.
*
* Site: www.policiamilitar.mg.gov.br
* DTS/CTS: Avenida Amazonas, 6455 - Gameleira, Belo Horizonte, Minas Gerais
* E-mail: <EMAIL>
* Telefones: (31) 2123-1133, (31) 2123-1108, (31) 2123-1113
*/
/**
* Simulação de envios de localização e consulta
* @module teste
* @class bdd_localizacao
**/
var chai = require('chai');
module.exports.chai = chai;
var expect = chai.expect
describe('TDD_MongoCRUD - ', function () {
it('Cria muitos', function (fim) {
expect(true).to.not.equal(false)
return fim()
});
it('Cria um', function (fim) {
expect(true).to.not.equal(false)
return fim()
});
it('Atualiza uma coleção', function (fim) {
expect(true).to.not.equal(false)
return fim()
});
it('Lista todos', function (fim) {
expect(true).to.not.equal(false)
return fim()
});
it('Filtra uma lista', function (fim) {
expect(true).to.not.equal(false)
return fim()
});
it('remove uma coleção', function (fim) {
expect(true).to.not.equal(false)
return fim()
});
it('Recupera uma coleção', function (fim) {
expect(true).to.not.equal(false)
return fim()
});
it('Cria um filtro dinâmico', function (fim) {
expect(true).to.not.equal(false)
return fim()
});
});
|
class SourceCodeLoader:
def __init__(self):
self.sources = {} # Dictionary to store the source code content
self.source_location_table = {} # Dictionary to store the source location table
def add_source(self, path, text):
self.sources[path] = text
def process_imports(self, path, text, visited):
if path in visited:
raise Exception("Circular dependency detected")
visited.add(path)
lines = text.split('\n')
for line in lines:
if line.startswith('(import "'):
import_path = line.split('"')[1]
if import_path not in self.sources:
raise Exception(f"Error: Imported module '{import_path}' not found")
self.process_imports(import_path, self.sources[import_path], visited)
visited.remove(path)
def build_source_location_table(self):
for path, text in self.sources.items():
try:
self.process_imports(path, text, set())
self.source_location_table[path] = len(text) # Store the length of the source code as an example
except Exception as e:
print(f"Error processing {path}: {str(e)}")
# Usage
loader = SourceCodeLoader()
sources = [
("~/foo", r#"(import "~/bar") (import "std/b")"#),
("~/bar", r#"(import "~/baz") (import "std")"#),
("std", r#"(import "~/b")"#),
("std/a", r#"(import "~/b")"#),
("std/b", r#""#),
("std/c", r#"(hoge"#),
("std", r#"(no-implicit-std)"#),
]
for path, text in sources:
loader.add_source(path, text)
loader.build_source_location_table() |
<reponame>Kvadeck/basic-js
const CustomError = require("../extensions/custom-error");
module.exports = function createDreamTeam(members) {
if (!Array.isArray(members))
return false
let result = '';
for (const i of members) {
if (typeof (i) == 'string') {
result += i.split(' ').join('')[0];
}
}
return false ? result == '' : result.toUpperCase().split('').sort().join('')
}; |
#!/usr/bin/env bash
. ./hack/check-python/prepare-env.sh
# run the pydocstyle for all files that are provided in $1
function check_files() {
for source in $1
do
echo "$source"
$PYTHON_VENV_DIR/bin/pydocstyle --count "$source"
if [ $? -eq 0 ]
then
echo " Pass"
let "pass++"
elif [ $? -eq 2 ]
then
echo " Illegal usage (should not happen)"
exit 2
else
echo " Fail"
let "fail++"
fi
done
}
echo "----------------------------------------------------"
echo "Checking documentation strings in all sources stored"
echo "in following directories:"
echo "$directories"
echo "----------------------------------------------------"
echo
[ "$NOVENV" == "1" ] || prepare_venv || exit 1
# checks for the whole directories
for directory in $directories
do
files=$(find "$directory" -path "$PYTHON_VENV_DIR" -prune -o -name '*.py' -print)
check_files "$files"
done
if [ $fail -eq 0 ]
then
echo "All checks passed for $pass source files"
else
let total=$pass+$fail
echo "Documentation strings should be added and/or fixed in $fail source files out of $total files"
exit 1
fi
|
<reponame>OliMoose/kermit
import asyncio, discord
try:
from _command import Command
except:
from coms._command import Command
class Com(Command):
def __init__(self):
self.usage = "!purge [number of messages]"
self.description = "Deletes all the messages!"
self.keys = ["!purge", ".purge"]
self.permissions = ["Developer", "Moderator", "Moderator-In-Training", ":)"]
async def command(self, client, message, rawtext):
deleted = await message.channel.purge(limit=int(rawtext) + 1)
await self.send(client, message.channel, 'Deleted {} message(s)!'.format(len(deleted)))
if __name__ == "__main__":
command = Com()
print(command.help())
|
def char_count(str):
char_dict={}
for char in str:
if char in char_dict:
char_dict[char]+=1
else:
char_dict[char]=1
return char_dict
print(char_count('Hello World!')) |
sudo service apache2 stop
./stopandremove.sh
docker-compose up -d
|
package ferrari;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
public class Main {
public static void main(String[] args) {
try(BufferedReader bfr = new BufferedReader(new InputStreamReader(System.in))) {
String driverName = bfr.readLine();
Ferrari car = new Ferrari(driverName);
System.out.println(car);
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
#!/bin/bash
#*************************************************************************#
# @param
# src_frame_rate: frame rate for send data
# data_path: Video or image list path
# wait_time: When set to 0, it will automatically exit after the eos signal arrives
# loop = true: loop through video
#
# @notice: other flags see ${SAMPLES_DIR}/bin/cns_launcher --help
#*************************************************************************#
CURRENT_DIR=$(cd $(dirname ${BASH_SOURCE[0]});pwd)
CNSTREAM_ROOT=${CURRENT_DIR}/../../..
SAMPLES_ROOT=${CNSTREAM_ROOT}/samples
CONFIGS_ROOT=${SAMPLES_ROOT}/cns_launcher/configs
${SAMPLES_ROOT}/generate_file_list.sh
mkdir -p output
${SAMPLES_ROOT}/bin/cns_launcher \
--data_path ${SAMPLES_ROOT}/files.list_video \
--src_frame_rate -1 \
--config_fname ${CURRENT_DIR}/config.json \
--log_to_stderr=true
|
# Run at real time priority
chrt --rr 99 ./build/TOZ
|
<gh_stars>10-100
import * as path from 'path';
import * as vscode from 'vscode';
import { ILocalOnlyScript } from '../../models/ILocalOnlyScript';
export class OnlyLocalDirectoryItem extends vscode.TreeItem {
contextValue = "onlyLocalDirectoryItem";
iconPath = new vscode.ThemeIcon("folder-opened");
constructor(public directory: ILocalOnlyScript, collapse: boolean) {
super("", collapse ? vscode.TreeItemCollapsibleState.Collapsed : vscode.TreeItemCollapsibleState.Expanded);
this.description = `${path.basename(directory.path.fsPath)} (only local)`;
}
}
|
package workspace_th.day06.ex1;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
public class HashMapEx01 {
public static void main(String[] args) {
// //Map<K, V> = map1 = new HashMap();
Map map = new HashMap();
map.put("kosa", "th2");
map.put("bitcamp", "th129");
map.put("Kosta", 1234);
map.put("kosa", "th2");
map.put(1, "th2");
System.out.println(map.size());
System.out.println(map);
//map의 key는 중복허용하지 않으므로 4개 나올거다.
Set set = map.entrySet();
Iterator it = set.iterator();
// while(it.hasNext()) {
// System.out.println(it.next());
// }
while(it.hasNext()) {
Map.Entry e = (Map.Entry)it.next();
System.out.println("key : "+ e.getKey());
System.out.println("val : "+ e.getValue());
}
Map<String, Integer> map1 = new HashMap();
map1.put("김김김", 170);
map1.put("최초최", 171);
map1.put("박박박", 178);
map1.put("황황황", 174);
map1.put("수수수", 164);
// Iterator it2 = map.entrySet();
Iterator it2 = map1.entrySet().iterator();
System.out.println("-------------");
while(it2.hasNext()) {
Map.Entry e = (Map.Entry)it2.next();
System.out.println("key : "+ e.getKey());
System.out.println("val : "+ e.getValue());
}
System.out.println("__________________");
Collection values = map1.values();
it = values.iterator();
while(it.hasNext()) {
int i = (int)it.next();
System.out.println(i);
}
System.out.println("가장 큰 val"+Collections.max(values));
System.out.println("가장 작은 val"+Collections.min(values));
if(map1.containsKey("박박박")){
System.out.println("1hello");
}else {
System.out.println("1none");
}
if(map1.containsKey("박박박1")){
System.out.println("2hello");
}else {
System.out.println("2none");
}
}
}
|
from abc import ABC, abstractmethod
class AbstractGenerator(ABC):
@abstractmethod
def generate(self):
pass
class InfoGANGenerator(AbstractGenerator):
def __init__(self, info_params):
self.info_params = info_params
# Initialize InfoGAN specific parameters and architecture here
def generate(self, input_noise):
# Implement the generation process specific to InfoGAN using input_noise and info_params
generated_output = None # Placeholder for the generated output
# Implement the generation process here
return generated_output
# Example usage
info_params = {'param1': 0.5, 'param2': 0.3}
info_gan_generator = InfoGANGenerator(info_params)
generated_image = info_gan_generator.generate(input_noise) |
import { ftType } from "../lib";
export class LessonParameters {
public samplingRate: number = 1024;
public duration: number = 5;
public stretch: number = 1;
public showWaves: boolean = false;
public type: ftType = ftType.FFT;
public absValues: boolean = false;
constructor(init?: Partial<LessonParameters>) {
init && Object.assign(this, init);
}
} |
<reponame>Datacket/Invado
import numpy as np
import tensorflow as tf
import pandas as pd
import random
import matplotlib.pyplot as plt
class DatasetSplit(object):
def __init__(self, x, y, bs):
self.x = x
self.y = y
self.bs = bs
self.start_split = 0
self.its = 1
def get_next_batch(self):
flag = True
while flag:
self.start_split = self.its * self.bs
ee = min(len(self.x), self.start_split + self.bs)
self.its += 1
if len(self.x) == ee:
flag = False
yield self.x[self.start_split : ee], self.y[self.start_split : ee]
class Dataset(object):
x = None
y = None
def __init__(self, X, Y, bs, split=[0.9, 0.1]):
self.x = X
self.y = Y
self.bs = bs
self.train_x = self.x[: int(0.9 * len(self.x))]
self.train_y = self.y[: int(0.9 * len(self.y))]
self.test_x = self.x[int(0.9 * len(self.x)):]
self.test_y = self.y[int(0.9 * len(self.y)):]
def train(self):
return DatasetSplit(self.train_x, self.train_y, bs=self.bs)
def test(self):
return DatasetSplit(self.test_x, self.test_y, bs=len(self.test_x))
class WildlifeCraziness(object):
def __init__(self, n_in, n_out, hid1=1024, hid2=512, learning_rate=0.001, n_epochs=10, batch_size=64, interval=5):
self.n_in = n_in
self.n_out = n_out
self.hid1 = hid1
self.hid2 = hid2
self.lr = learning_rate
self.n_epochs = n_epochs
self.bs = batch_size
self.interval = interval
# Split = [0.9, 0.1]
self.total_samples = 9000
def fit(self):
self.x = tf.placeholder(tf.float32, shape=(None, self.n_in))
self.y = tf.placeholder(tf.float32, shape=(None, self.n_out))
# dense1
self.W1 = tf.get_variable('weights1', shape=(self.n_in, self.hid1))
self.b1 = tf.get_variable('bias1', shape=(self.hid1))
self.dense1 = tf.nn.relu(tf.matmul(self.x, self.W1, name='matmul2') + self.b1)
# dense2
self.W2 = tf.get_variable('weights2', shape=(self.hid1, self.hid2))
self.b2 = tf.get_variable('bias2', shape=(self.hid2))
self.dense2 = tf.nn.relu(tf.matmul(self.dense1, self.W2, name='matmul2') + self.b2, name='relu2')
# dense2
self.W3 = tf.get_variable('weights3', shape=(self.hid2, self.n_out))
self.b3 = tf.get_variable('bias3', shape=(self.n_out))
self.dense3 = tf.matmul(self.dense2, self.W3, name='matmul3') + self.b3
# print(self.dense3.shape)
self.loss = tf.nn.softmax_cross_entropy_with_logits_v2(labels=self.y, logits=self.dense3, name='softmax1')
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr, name='Adam1')
self.optimizer = self.optimizer.minimize(self.loss)
self.probs=tf.nn.softmax(self.dense3)
self.sess = tf.Session()
self.sess.run(tf.global_variables_initializer())
for epoch in range(self.n_epochs):
dataset_train = self.dataset.train().get_next_batch()
for batch in range((self.total_samples // self.bs)):
xs, ys = next(dataset_train)
_, curr_loss = self.sess.run([self.optimizer, self.loss], feed_dict={self.x:xs, self.y:ys})
print("Epoch #{}: {}".format(epoch, curr_loss))
'''
if epoch % self.interval == 0:
loss = 'NO VALID DATASET'
print ('Loss for Epoch ', epoch , ' is ', loss)
'''
def predict(self, test_x,one_hot_animals):
'''
df = pd.read_csv(path)
y = np.array(df.iloc[:, -1])
x = np.array(df.iloc[:, :-1])
dataset = Dataset(x, y, batch_size)
'''
# Predicting on test data
probs = self.sess.run(self.probs, feed_dict={self.x:test_x})
probs=probs.flatten().tolist()
#print(probs)
#print (np.array(probs).shape)
d={}
l=list(enumerate(probs))
l=sorted(l,reverse=True,key=lambda x:x[1])
d={one_hot_animals[i]:x for i,x in l}
return d
def close_session(self):
self.sess.close()
print ('Session Closed Successfully!')
def load_dataset(self, x, y):
self.dataset = Dataset(x, y, self.bs)
# Dry run
'''
n_in = 25
n_out = 10
hid1 = 1024
hid2 = 512
learning_rate = 0.001
n_epochs = 1000
batch_size = 128
path = './data.csv'''
|
package test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.Test;
import stratego.BoardSetupMessage;
import stratego.Piece;
import stratego.Piece.PieceType;
public class BoardSetupMessageClassTest {
@Test
public void constructerTest() {
PieceType[][] pieceTypes = new PieceType[4][10];
BoardSetupMessage boardSetupMessage = new BoardSetupMessage(Piece.RED, pieceTypes);
assertTrue(boardSetupMessage instanceof BoardSetupMessage);
}
@Test
public void getColorTest() {
PieceType[][] pieceTypes = new PieceType[4][10];
BoardSetupMessage boardSetupMessage = new BoardSetupMessage(Piece.RED, pieceTypes);
assertEquals(boardSetupMessage.getColor(), Piece.RED);
PieceType[][] pieceTypes2 = new PieceType[4][10];
BoardSetupMessage boardSetupMessage2 = new BoardSetupMessage(Piece.BLUE, pieceTypes2);
assertEquals(boardSetupMessage2.getColor(), Piece.BLUE);
}
@Test
public void getInitialSetupTest() {
PieceType[][] pieceTypes = new PieceType[4][10];
pieceTypes[0][0] = PieceType.FLAG;
pieceTypes[1][1] = PieceType.GENERAL;
pieceTypes[1][0] = PieceType.BOMB;
pieceTypes[0][1] = PieceType.BOMB;
BoardSetupMessage boardSetupMessage = new BoardSetupMessage(Piece.RED, pieceTypes);
assertEquals(boardSetupMessage.getInitialSetup(), pieceTypes);
}
}
|
#!/bin/bash
echo -e "\033[0;32mDeploying updates to GitHub...\033[0m"
# Stash uncomitted and untracked changes
git stash --all
# Remove the contents of the current /public folder
rm -rf ./public/*
# Generate the static site in the default /public folder.
hugo
# Add changes to the git submodule.
cd public
git checkout README.md
git add .
# Commit changes.
msg="Rebuilding site on `date`"
if [ $# -eq 1 ]
then msg="$1"
fi
git commit -m "$msg"
# Push changes to origin.
git push origin master
# Come back up to the project's root.
cd ..
# Unstash
git stash pop
|
function add(a, b) {
return a + b;
}
function divide(a, b) {
if (b == 0) {
throw new Error('除数不能为零');
return
}
return a / b;
}
exports.add = add;
exports.divide = divide; |
package org.silentsoft.ui.component.text;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.util.ArrayList;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JComboBox;
import javax.swing.JTextField;
import javax.swing.ListCellRenderer;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
public class AutoJTextField extends JTextField {
/**
*
*/
private static final long serialVersionUID = -1721385295372597718L;
private static final String PROPERTY_ADJUST = "PROPERTY_ADJUST";
private boolean isAdjusting() {
if (getClientProperty(PROPERTY_ADJUST) instanceof Boolean) {
return (Boolean) getClientProperty(PROPERTY_ADJUST);
}
return false;
}
private void setAdjusting(boolean adjust) {
putClientProperty(PROPERTY_ADJUST, adjust);
}
@SuppressWarnings("unchecked")
public void setAutoComplete(ArrayList<String> items, ListCellRenderer<? super String> renderer) {
final DefaultComboBoxModel model = new DefaultComboBoxModel();
final JComboBox<String> cbInput = new JComboBox<String>(model) {
public Dimension getPreferredSize() {
return new Dimension(super.getPreferredSize().width, 0);
}
};
setAdjusting(false);
for (String item : items) {
model.addElement(item);
}
cbInput.setSelectedItem(null);
cbInput.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (!isAdjusting()) {
if (cbInput.getSelectedItem() != null) {
setText(cbInput.getSelectedItem().toString());
}
}
}
});
if (renderer != null) {
cbInput.setRenderer(renderer);
}
this.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
setAdjusting(true);
if (e.getKeyCode() == KeyEvent.VK_SPACE) {
if (cbInput.isPopupVisible()) {
e.setKeyCode(KeyEvent.VK_ENTER);
}
}
if (e.getKeyCode() == KeyEvent.VK_ENTER
|| e.getKeyCode() == KeyEvent.VK_UP
|| e.getKeyCode() == KeyEvent.VK_DOWN) {
e.setSource(cbInput);
cbInput.dispatchEvent(e);
if (e.getKeyCode() == KeyEvent.VK_ENTER) {
setText(cbInput.getSelectedItem().toString());
cbInput.setPopupVisible(false);
}
}
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
cbInput.setPopupVisible(false);
}
setAdjusting(false);
}
});
this.getDocument().addDocumentListener(new DocumentListener() {
public void insertUpdate(DocumentEvent e) {
updateList();
}
public void removeUpdate(DocumentEvent e) {
updateList();
}
public void changedUpdate(DocumentEvent e) {
updateList();
}
private void updateList() {
setAdjusting(true);
model.removeAllElements();
String input = getText();
if (!input.isEmpty()) {
for (String item : items) {
if (item.toLowerCase().startsWith(input.toLowerCase())) {
model.addElement(item);
}
}
}
cbInput.hidePopup();
cbInput.setPopupVisible(model.getSize() > 0);
setAdjusting(false);
}
});
this.setLayout(new BorderLayout());
this.add(cbInput, BorderLayout.SOUTH);
}
}
|
<reponame>tlranjan/my-apps
app.controller('all_requests', function($scope, $filter, $http) {
var self = this;
self.getallawsrequestmodel = function(){$http({
method: 'GET',
url: '/admin/user/getallawsrequestmodel',
headers: {'Content-Type': 'application/json'}
}).then(function(response){self.getallawsrequestmodel=response.data;});};
self.getallawscatalogmodel = function(){$http({
method: 'GET',
url: '/admin/user/getallawscatalogmodel',
headers: {'Content-Type': 'application/json'}
}).then(function(response){self.getallawscatalogmodel=response.data;});};
self.requestawscatalog = function(awsrequestmodel){$http({
method: 'POST',
url: '/developer/user/requestawscatalog',
data: awsrequestmodel,
headers: {'Content-Type': 'application/json'}
}).then(function(response){self.requestawscatalog=response.data;self.getallawsrequestmodel.unshift(self.requestawscatalog);});};
self.getallvsphererequestmodel = function(){$http({
method: 'GET',
url: '/admin/user/getallvsphererequestmodel',
headers: {'Content-Type': 'application/json'}
}).then(function(response){self.getallvsphererequestmodel=response.data;});};
self.getallvspherecatalogmodel = function(){$http({
method: 'GET',
url: '/admin/user/getallvspherecatalogmodel',
headers: {'Content-Type': 'application/json'}
}).then(function(response){self.getallvspherecatalogmodel=response.data;});};
self.requestvspherecatalog = function(vsphererequestmodel){$http({
method: 'POST',
url: '/developer/user/requestvspherecatalog',
data: vsphererequestmodel,
headers: {'Content-Type': 'application/json'}
}).then(function(response){self.requestvspherecatalog=response.data;self.getallvsphererequestmodel.unshift(self.requestvspherecatalog);});};
self.getallawsrequestmodel();
self.getallawscatalogmodel()
self.getallvsphererequestmodel();
self.getallvspherecatalogmodel();
}); |
<filename>docussandra-domain/src/main/java/com/pearson/docussandra/domain/objects/QueryResponseWrapper.java
package com.pearson.docussandra.domain.objects;
import java.util.ArrayList;
import java.util.List;
/**
* Wrapper for returning queries. Contains metadata about the response in
* addition to the actual response.
*
* @author https://github.com/JeffreyDeYoung
*/
public class QueryResponseWrapper extends ArrayList<Document>
{
/**
* Number of additional results that exist. Null if there are additional
* results, but the number is unknown.
*/
private final Long numAdditionalResults;
/**
* Default constructor for JSON deserializing.
*/
public QueryResponseWrapper()
{
this.numAdditionalResults = null;
}
/**
* Constructor.
*
* @param responseData The actual response data.
* @param numAdditionalResults Number of additional results that exist. Null
* if there are additional results, but the number is unknown.
*/
public QueryResponseWrapper(List<Document> responseData, Long numAdditionalResults)
{
super(responseData);
this.numAdditionalResults = numAdditionalResults;
}
/**
* Number of additional results that exist. Null if there are additional
* results, but the number is unknown.
*
* @return the numAdditionalResults
*/
public Long getNumAdditionalResults()
{
return numAdditionalResults;
}
}
|
<reponame>mohamedkhairy/dhis2-android-sdk
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.testapp.event;
import org.hisp.dhis.android.core.arch.repositories.scope.RepositoryScope;
import org.hisp.dhis.android.core.common.BaseNameableObject;
import org.hisp.dhis.android.core.common.FeatureType;
import org.hisp.dhis.android.core.common.State;
import org.hisp.dhis.android.core.event.Event;
import org.hisp.dhis.android.core.event.EventCreateProjection;
import org.hisp.dhis.android.core.event.EventStatus;
import org.hisp.dhis.android.core.maintenance.D2Error;
import org.hisp.dhis.android.core.period.Period;
import org.hisp.dhis.android.core.utils.integration.mock.BaseMockIntegrationTestFullDispatcher;
import org.hisp.dhis.android.core.utils.runner.D2JunitRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static com.google.common.truth.Truth.assertThat;
@RunWith(D2JunitRunner.class)
public class EventCollectionRepositoryMockIntegrationShould extends BaseMockIntegrationTestFullDispatcher {
@Test
public void find_all() {
List<Event> events =
d2.eventModule().events()
.blockingGet();
assertThat(events.size()).isEqualTo(4);
}
@Test
public void filter_by_uid() {
List<Event> events =
d2.eventModule().events()
.byUid().eq("single1")
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_enrollment() {
List<Event> events =
d2.eventModule().events()
.byEnrollmentUid().eq("enroll1")
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_created() throws ParseException {
List<Event> events =
d2.eventModule().events()
.byCreated().eq(BaseNameableObject.DATE_FORMAT.parse("2017-08-07T15:47:25.959"))
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_last_updated() throws ParseException {
List<Event> events =
d2.eventModule().events()
.byLastUpdated().eq(BaseNameableObject.DATE_FORMAT.parse("2019-01-01T22:26:39.094"))
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_created_at_client() {
List<Event> events =
d2.eventModule().events()
.byCreatedAtClient().eq("2018-02-28T00:00:00.000")
.blockingGet();
assertThat(events.size()).isEqualTo(0);
}
@Test
public void filter_by_last_updated_at_client() {
List<Event> events =
d2.eventModule().events()
.byLastUpdatedAtClient().eq("2018-02-28T00:00:00.000")
.blockingGet();
assertThat(events.size()).isEqualTo(0);
}
@Test
public void filter_by_status() {
List<Event> events =
d2.eventModule().events()
.byStatus().eq(EventStatus.ACTIVE)
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_geometry_type() {
List<Event> events =
d2.eventModule().events()
.byGeometryType().eq(FeatureType.POINT)
.blockingGet();
assertThat(events.size()).isEqualTo(4);
}
@Test
public void filter_by_geometry_coordinates() {
List<Event> events =
d2.eventModule().events()
.byGeometryCoordinates().eq("[21.0, 43.0]")
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_program() {
List<Event> events =
d2.eventModule().events()
.byProgramUid().eq("lxAQ7Zs9VYR")
.blockingGet();
assertThat(events.size()).isEqualTo(4);
}
@Test
public void filter_by_program_stage() {
List<Event> events =
d2.eventModule().events()
.byProgramStageUid().eq("dBwrot7S420")
.blockingGet();
assertThat(events.size()).isEqualTo(4);
}
@Test
public void filter_by_organization_unit() {
List<Event> events =
d2.eventModule().events()
.byOrganisationUnitUid().eq("DiszpKrYNg8")
.blockingGet();
assertThat(events.size()).isEqualTo(4);
}
@Test
public void filter_by_event_date() throws ParseException {
List<Period> periods = new ArrayList<>();
periods.add(Period.builder()
.startDate(BaseNameableObject.DATE_FORMAT.parse("2017-02-27T00:00:00.000"))
.endDate(BaseNameableObject.DATE_FORMAT.parse("2017-02-27T00:00:00.000"))
.build());
List<Event> events =
d2.eventModule().events()
.byEventDate().inPeriods(periods)
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_complete_date() throws ParseException {
List<Event> events =
d2.eventModule().events()
.byCompleteDate().eq(BaseNameableObject.DATE_FORMAT.parse("2016-02-27T14:34:00.000"))
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_due_date() throws ParseException {
List<Event> events =
d2.eventModule().events()
.byDueDate()
.afterOrEqual(BaseNameableObject.DATE_FORMAT.parse("2017-01-28T12:35:00.000"))
.blockingGet();
assertThat(events.size()).isEqualTo(2);
}
@Test
public void filter_by_state() {
List<Event> events =
d2.eventModule().events()
.byState().eq(State.SYNCED)
.blockingGet();
assertThat(events.size()).isEqualTo(4);
}
@Test
public void filter_by_attribute_option_combo() {
List<Event> events =
d2.eventModule().events()
.byAttributeOptionComboUid().eq("bRowv6yZOF2")
.blockingGet();
assertThat(events.size()).isEqualTo(2);
}
@Test
public void filter_by_deleted() {
List<Event> events =
d2.eventModule().events()
.byDeleted().isFalse()
.blockingGet();
assertThat(events.size()).isEqualTo(4);
}
@Test
public void filter_by_tracked_entity_instance() {
List<Event> events =
d2.eventModule().events()
.byTrackedEntityInstanceUids(Collections.singletonList("nWrB0TfWlvh"))
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_data_value() {
List<Event> events =
d2.eventModule().events()
.byDataValue("hB9F8vKFmlk").lt("3843")
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_follow_up() {
List<Event> events =
d2.eventModule().events()
.byFollowUp(true)
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void filter_by_assigned_user() {
List<Event> events =
d2.eventModule().events()
.byAssignedUser().eq("aTwqot2S410")
.blockingGet();
assertThat(events.size()).isEqualTo(1);
}
@Test
public void count_tracked_entity_instances_unrestricted() {
int count = d2.eventModule().events().countTrackedEntityInstances();
assertThat(count).isEqualTo(2);
}
@Test
public void count_tracked_entity_instances_restricted() {
int count = d2.eventModule().events().byUid().eq("event1").countTrackedEntityInstances();
assertThat(count).isEqualTo(1);
}
@Test
public void include_tracked_entity_data_values_as_children() {
Event event = d2.eventModule().events()
.withTrackedEntityDataValues().uid("single1").blockingGet();
assertThat(event.trackedEntityDataValues().size()).isEqualTo(6);
}
@Test
public void include_notes_as_children() {
Event event = d2.eventModule().events()
.withNotes().uid("single1").blockingGet();
assertThat(event.notes().size()).isEqualTo(2);
}
@Test
public void order_by_due_date() {
List<Event> events = d2.eventModule().events()
.orderByDueDate(RepositoryScope.OrderByDirection.ASC)
.blockingGet();
assertThat(events.get(0).uid()).isEqualTo("event1");
assertThat(events.get(1).uid()).isEqualTo("event2");
assertThat(events.get(2).uid()).isEqualTo("single1");
assertThat(events.get(3).uid()).isEqualTo("single2");
}
@Test
public void order_by_created() {
List<Event> events = d2.eventModule().events()
.orderByCreated(RepositoryScope.OrderByDirection.ASC)
.blockingGet();
assertThat(events.get(0).uid()).isEqualTo("event1");
assertThat(events.get(1).uid()).isEqualTo("event2");
assertThat(events.get(2).uid()).isEqualTo("single1");
assertThat(events.get(3).uid()).isEqualTo("single2");
}
@Test
public void order_by_created_at_client() {
List<Event> events = d2.eventModule().events()
.orderByCreatedAtClient(RepositoryScope.OrderByDirection.ASC)
.blockingGet();
assertThat(events.get(0).uid()).isEqualTo("event1");
assertThat(events.get(1).uid()).isEqualTo("event2");
assertThat(events.get(2).uid()).isEqualTo("single1");
assertThat(events.get(3).uid()).isEqualTo("single2");
}
@Test
public void order_by_last_updated() {
List<Event> events = d2.eventModule().events()
.orderByLastUpdated(RepositoryScope.OrderByDirection.ASC)
.blockingGet();
assertThat(events.get(0).uid()).isEqualTo("event1");
assertThat(events.get(1).uid()).isEqualTo("event2");
assertThat(events.get(2).uid()).isEqualTo("single2");
assertThat(events.get(3).uid()).isEqualTo("single1");
}
@Test
public void order_by_last_updated_at_client() {
List<Event> events = d2.eventModule().events()
.orderByLastUpdatedAtClient(RepositoryScope.OrderByDirection.ASC)
.blockingGet();
assertThat(events.get(0).uid()).isEqualTo("event1");
assertThat(events.get(1).uid()).isEqualTo("event2");
assertThat(events.get(2).uid()).isEqualTo("single1");
assertThat(events.get(3).uid()).isEqualTo("single2");
}
@Test
public void order_by_event_date_and_last_updated() {
List<Event> events = d2.eventModule().events()
.orderByEventDate(RepositoryScope.OrderByDirection.ASC)
.orderByLastUpdated(RepositoryScope.OrderByDirection.ASC)
.blockingGet();
assertThat(events.get(0).uid()).isEqualTo("event2");
assertThat(events.get(1).uid()).isEqualTo("event1");
assertThat(events.get(2).uid()).isEqualTo("single2");
assertThat(events.get(3).uid()).isEqualTo("single1");
}
@Test
public void order_by_complete_date() {
List<Event> events = d2.eventModule().events()
.orderByCompleteDate(RepositoryScope.OrderByDirection.ASC)
.blockingGet();
assertThat(events.get(0).uid()).isEqualTo("event2");
assertThat(events.get(1).uid()).isEqualTo("single1");
assertThat(events.get(2).uid()).isEqualTo("single2");
assertThat(events.get(3).uid()).isEqualTo("event1");
}
@Test
public void order_by_organisation_unit_name() {
List<Event> events = d2.eventModule().events()
.orderByOrganisationUnitName(RepositoryScope.OrderByDirection.ASC)
.blockingGet();
assertThat(events.size()).isEqualTo(4);
}
@Test
public void order_by_timeline() {
List<Event> events = d2.eventModule().events()
.orderByTimeline(RepositoryScope.OrderByDirection.ASC)
.blockingGet();
assertThat(events.get(0).uid()).isEqualTo("event1"); // eventDate
assertThat(events.get(1).uid()).isEqualTo("event2"); // dueDate
assertThat(events.get(2).uid()).isEqualTo("single2"); // eventDate
assertThat(events.get(3).uid()).isEqualTo("single1"); // eventDate
}
@Test
public void order_by_data_element() {
List<Event> events = d2.eventModule().events()
.byEnrollmentUid().isNull()
.orderByDataElement(RepositoryScope.OrderByDirection.DESC, "hB9F8vKFmlk")
.withTrackedEntityDataValues()
.blockingGet();
assertThat(events.size()).isEqualTo(2);
assertThat(events.get(0).uid()).isEqualTo("single2"); // 3843
assertThat(events.get(1).uid()).isEqualTo("single1"); // 3842
}
@Test
public void add_events_to_the_repository() throws D2Error {
List<Event> events1 = d2.eventModule().events().blockingGet();
assertThat(events1.size()).isEqualTo(4);
String eventUid = d2.eventModule().events().blockingAdd(
EventCreateProjection.create("enroll1", "lxAQ7Zs9VYR", "dBwrot7S420",
"DiszpKrYNg8", "bRowv6yZOF2"));
List<Event> events2 = d2.eventModule().events().blockingGet();
assertThat(events2.size()).isEqualTo(5);
Event event = d2.eventModule().events().uid(eventUid).blockingGet();
assertThat(event.uid()).isEqualTo(eventUid);
d2.eventModule().events().uid(eventUid).blockingDelete();
}
} |
import random
import numpy as np
from deap import base
from deap import creator
from deap import tools
# Create a new type with a fitness attribute
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
# Provide a new type, inheriting from the list type
creator.create("Individual", list, fitness=creator.FitnessMax)
# Create the toolbox
toolbox = base.Toolbox()
# Fill the toolbox with the attribute
toolbox.register("attr_float", random.random)
# Create the structure with two classes, Student and Teacher
def createStructure(student_attributes, teacher_attributes):
structure = []
for i in range(len(student_attributes)):
structure.append(student_attributes[i])
for i in range(len(teacher_attributes)):
structure.append(teacher_attributes[i])
return structure
# Create the individual
toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.attr_float)
# Set the evaluation function
def evaluateIndividual(individual):
student_attributes = []
teacher_attributes = []
for i in range(len(individual)):
if i < len(individual)/2:
student_attributes.append(individual[i])
else:
teacher_attributes.append(individual[i])
student_score = 0
teacher_score = 0
for attr in student_attributes:
student_score += attr
for attr in teacher_attributes:
teacher_score += attr
return student_score, teacher_score
# Create the population
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
# Create the genetic operators
toolbox.register("mate", tools.cxTwoPoint)
toolbox.register("mutate", tools.mutGaussian, mu=0.0, sigma=1.0, indpb=0.2)
toolbox.register("select", tools.selTournament, tournsize=3)
# Apply the evolutionary algorithm
def main():
student_attr = [toolbox.attr_float() for i in range(2)]
teacher_attr = [toolbox.attr_float() for i in range(2)]
structure = createStructure(student_attr, teacher_attr)
pop = toolbox.population(n=50)
CXPB, MUTPB, NGEN = 0.5, 0.2, 40
fitnesses = list(map(toolbox.evaluate, pop))
for ind, fit in zip(pop, fitnesses):
ind.fitness.values = fit
for g in range(NGEN):
offspring = toolbox.select(pop, len(pop))
offspring = list(map(toolbox.clone, offspring))
for child1, child2 in zip(offspring[::2], offspring[1::2]):
if random.random() < CXPB:
toolbox.mate(child1, child2)
del child1.fitness.values
del child2.fitness.values
for mutant in offspring:
if random.random() < MUTPB:
toolbox.mutate(mutant)
del mutant.fitness.values
invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
fitnesses = map(toolbox.evaluate, invalid_ind)
for ind, fit in zip(invalid_ind, fitnesses):
ind.fitness.values = fit
pop[:] = offspring
best_ind = tools.selBest(pop, 1)[0]
print(f"Best individual is {best_ind}, Fitness: {best_ind.fitness.values}")
if __name__ == "__main__":
main() |
<gh_stars>0
var core = function() {
this.init()
};
core.prototype = {
init: function() {
this._run()
},
_run: function() {
document.imgSvgReplacer = new imgSvgReplacer, document.intro = new intro, document.slider = new slider, document.odometerInit = new odometerInit, document.tabs = new tabs, document.responsive = new responsive, document.contactMap = new contactMap, document.header = new header, document.menu = new menu
}
}, $(document).ready(function() {
new core
});
var imgSvgReplacer = function() {
this.init()
};
imgSvgReplacer.prototype = {
init: function() {
this._setVars() && this._replace()
},
_setVars: function() {
return this.vectorImage = $("img.svg"), "undefined" == typeof this.vectorImage ? !1 : !0
},
_replace: function() {
this.vectorImage.each(function() {
var img = $(this),
imgID = img.attr("id"),
imgClass = img.attr("class"),
imgSrc = img.attr("src");
$.get(imgSrc, function(data) {
var svg = $(data).find("svg");
"undefined" != typeof imgID && (svg = svg.attr("id", imgID)), "undefined" != typeof imgClass && (svg = svg.attr("class", imgClass + " svgReplaced")), svg = svg.removeAttr("xmlns:a"), img.replaceWith(svg)
}, "xml")
})
}
};
var intro = function() {
this.init()
};
intro.prototype = {
init: function() {
this._setVars() && (this._setEvents(), this._initParallax())
},
_setVars: function() {
return this._container = $(".jsIntroContainer"), this._container ? (this._video = $(".jsIntroVideo"), this._video ? (this._videoContainer = $(".jsIntroVideoContainer"), this._videoContainer ? (this._scroll = $(".jsIntroScroll"), this._scroll ? (this._control = $(".jsIntroControl"), this._control ? (this._window = $(window), this._window ? (this._body = $("body"), this._body ? (this._header = $("header"), this._header ? (this.countIterationsAfterScroll = 0, this.videoStatus = 1, !0) : !1) : !1) : !1) : !1) : !1) : !1) : !1) : !1
},
_setEvents: function() {
var $this = this;
this._scroll.on("click", function(e) {
e.preventDefault(), $this._scrollDown()
}), this._control.on("click", function(e) {
e.preventDefault(), $this._playToggle()
}), $(document).on("scroll", function() {
$this.countIterationsAfterScroll = 0, $this._videoParallax()
})
},
_initParallax: function() {
var $this = this;
this._video.get(0) && (this._video.get(0).onloadedmetadata = function() {
$this._container.height($this._video.height())
})
},
_videoParallax: function() {
var bodyScrollTop = parseInt(this._window.scrollTop());
this._videoContainer.css("top", -(bodyScrollTop / 2)), 1 == this.videoStatus && bodyScrollTop > this._container.height() && this._playToggle()
},
_scrollDown: function() {
var containerHeight = this._container.height();
$("html, body").animate({
scrollTop: containerHeight
}, 1e3, "swing")
},
_playToggle: function() {
this._video.text() && (this._video.get(0).paused ? (this._video.get(0).play(), this._control.removeClass("play"), this._control.addClass("pause"), this.videoStatus = 1) : (this._video.get(0).pause(), this._control.removeClass("pause"), this._control.addClass("play"), this.videoStatus = 0))
}
};
var slider = function() {
this.init()
};
slider.prototype = {
init: function() {
this._setVars() && this._runSliders()
},
_setVars: function() {
return this._sliderMain = $(".jsSliderContainer"), this._sliderMainControlsName = ".slider .bx-controls .bx-pager .bx-pager-item", this._sliderTwitter = $(".jsSliderTwitter"), this._sliderTestimonials = $(".jsSliderTestimonials"), this._sliderTestimonialsName = ".jsSliderTestimonials", this._sliderTestimonialsWrapperName = ".jsSliderTestimonialsWrapper", this.singleSlidePrecentage = 0, this.sliderObject = null, this.sliderTestimonialsObject = null, this._sliderGallery = $(".jsSliderGallery"), !0
},
_runSliders: function() {
var $this = this;
if (this._sliderMain.length > 1) {
var slidesCount = 2;
$(window).width() <= 750 && (slidesCount = 1), this.sliderObject = this._sliderMain.bxSlider({
minSlides: slidesCount,
maxSlides: slidesCount,
moveSlides: 1,
slideWidth: 610,
onSliderLoad: function() {
var countSlider = parseInt($this._sliderMain.getSlideCount());
countSlider > 0 && ($this.singleSlidePrecentage = 100 / countSlider, $($this._sliderMainControlsName).width($this.singleSlidePrecentage + "%"))
}
})
}
if (this._sliderTwitter && this._sliderTwitter.bxSlider(), this._sliderTestimonials) {
var sliderTestimonialsTouchScroll = !0;
$(window).width() <= 750 && (sliderTestimonialsTouchScroll = !1), this.sliderTestimonialsObject = this._sliderTestimonials.bxSlider({
mode: "vertical",
slideMargin: 1,
touchEnabled: sliderTestimonialsTouchScroll,
onSliderLoad: function() {
var pagerHeight = parseInt($($this._sliderTestimonialsWrapperName).find(".bx-pager").innerHeight());
pagerHeight > 0 && $($this._sliderTestimonialsWrapperName).find(".bx-pager").css("margin-top", -(pagerHeight / 2))
}
})
}
this._sliderGallery && this._sliderGallery.bxSlider()
}
};
var odometerInit = function() {
this.init()
};
odometerInit.prototype = {
_inited: !1,
init: function() {
var $this = this;
this._setVars() && (this._initAllOdometer(), setInterval(function() {
var scroll = $(document).scrollTop();
$this.onScroll(scroll)
}, 200))
},
_setVars: function() {
return $(window).width() <= 750 ? !1 : (this._odometers = $(".jsOdometer"), this._odometers.length ? (this._inited = !0, !0) : !1)
},
_initAllOdometer: function() {
var $this = this;
this._odometers.each(function() {
$this._initOdometer($(this))
})
},
_initOdometer: function(obj) {
var val = obj.text(),
vl = val.length;
obj.attr("data-def-val", val);
var nVal = Math.pow(10, vl - 1);
1 == nVal && (nVal = 0), obj.attr("data-null-val", nVal), obj.text(nVal)
},
_checkAllOdometer: function(scrollBot) {
var $this = this;
this._odometers.each(function() {
$this._checkOdometer($(this), scrollBot)
})
},
_checkOdometer: function(obj, scrollBot) {
var objTop = obj.offset().top;
if (scrollBot > objTop) {
var oldVal = obj.text(),
newVal = obj.attr("data-def-val");
newVal != oldVal && (obj.text(newVal), this._inited = !1)
}
},
onScroll: function(scrollTop) {
if (this._inited) {
var scrollBot = scrollTop + $(window).height();
this._checkAllOdometer(scrollBot)
}
}
};
var tabs = function() {
this.init()
};
tabs.prototype = {
init: function() {
this._setVars() && (this._setEvents(), this._initHeight())
},
_setVars: function() {
return this._button = $(".jsTabsButton"), this._button ? (this._content = $(".jsTabsContent"), this._content ? (this._contentParent = this._content.parent(".tabsContent"), this._contentParent ? (this._belt = $(".jsTabsBelt"), this._belt ? (this._headerContent = $(".jsTabsHeaderContent"), this._headerContent ? (this._header = this._headerContent.parent(), this._header ? (this._arrows = $(".jsTabArrows"), this._arrows ? (this._arrowLeft = this._arrows.find(".tabsArrowLeft"), this._arrowLeft ? (this._arrowRight = this._arrows.find(".tabsArrowRight"), this._arrowRight ? !0 : !1) : !1) : !1) : !1) : !1) : !1) : !1) : !1) : !1
},
_setEvents: function() {
var $this = this;
this._button.on("click", function(e) {
e.preventDefault(), $this._tabToggle($(this))
}), this._arrowLeft.on("click", function(e) {
e.preventDefault(), $this._tabsMoveLeft()
}), this._arrowRight.on("click", function(e) {
e.preventDefault(), $this._tabsMoveRight()
})
},
_initHeight: function() {
this._contentParent.height(this._contentParent.find(".active").height())
},
_tabToggle: function(obj) {
var $this = this;
this._button.removeClass("active"), this._content.removeClass("active"), obj.addClass("active");
var tabId = obj.attr("data-tab-id");
this._content.each(function() {
$(this).attr("data-tab-id") == tabId && ($(this).addClass("active"), $this._contentParent.height($(this).height()))
});
var position = obj.offset(),
parentPosition = obj.parent().offset(),
offset = {
top: position.top - parentPosition.top,
left: position.left - parentPosition.left
}, leftOffset = offset.left + parseInt(this._headerContent.css("left"));
this._belt.animate({
left: leftOffset
}, 300, "swing")
},
regenerateBelt: function(slideLeftWidth) {
obj = $(".jsTabsButton.active");
var jstc = $(".jsTabsContent"),
jstb = $(".jsTabsBelt"),
position = obj.offset(),
parentPosition = obj.parent().offset(),
offset = {
top: position.top - parentPosition.top,
left: position.left - parentPosition.left
}, leftOffset = offset.left + parseInt(slideLeftWidth);
0 > leftOffset && (leftOffset = 0), leftOffset > jstc.width() && (leftOffset = jstc.width() - 60), jstb.animate({
left: leftOffset
}, 300, "swing")
},
_tabsMoveLeft: function() {
var containerLeft = parseInt(this._headerContent.css("left"));
if (0 >= containerLeft) {
var slideLeftWidth = 0,
tmpContainerWidth = 0;
this._button.each(function() {
tmpContainerWidth += -1 * ($(this).width() + parseInt($(this).css("margin-right"))), tmpContainerWidth > containerLeft && (slideLeftWidth = tmpContainerWidth)
}), this._headerContent.css("left", slideLeftWidth), this.regenerateBelt(slideLeftWidth)
}
},
_tabsMoveRight: function() {
var containerLeft = parseInt(this._headerContent.css("left")),
containerWidth = 0;
if (this._button.each(function() {
containerWidth += $(this).width() + parseInt($(this).css("margin-right"))
}), containerLeft >= -1 * (containerWidth - this._contentParent.width())) {
var slideLeftWidth = 0,
tmpContainerWidth = 0;
this._button.each(function() {
tmpContainerWidth += -1 * ($(this).width() + parseInt($(this).css("margin-right"))), containerLeft > tmpContainerWidth && !slideLeftWidth && (slideLeftWidth = tmpContainerWidth)
}), this._headerContent.css("left", slideLeftWidth), this.regenerateBelt(slideLeftWidth)
}
}
};
var responsive = function() {
this.init()
};
responsive.prototype = {
init: function() {
var $this = this;
this._setVars() && (this._setEvents(), this._onResize(), this._videoVisibility(), setTimeout(function() {
$this._regenerateBg()
}, 100), this._video.ready(function() {
setTimeout(function() {
$this._resizeVideo()
}, 1e3)
}))
},
_setVars: function() {
return this._window = $(window), this._body = $("body"), this._video = $(".jsIntroVideo"), this._videoContainer = $(".jsIntroVideoContainer"), this._videoSource = $(".jsIntroVideo source"), this._videoTmpUrl = $(".jsIntroVideo source").attr("src"), this._introContainer = $(".jsIntroContainer"), this._sliderMainControlsName = ".slider .bx-controls .bx-pager .bx-pager-item", this._slidesCount = 2, this._sliderTestimonials = $(".jsSliderTestimonials"), this._sliderTestimonialsWrapperName = ".jsSliderTestimonialsWrapper", this._tabsContentParent = $(".jsTabsContent").parent(".content"), this._grayBg = $(".rightGrayBg"), this._orangeBg = $(".rightOrangeBg"), this._violetBg = $(".rightVioletBg"), this._sidebar = $(".sidebar"), this._bgWrapper = $(".bgWrapper"), this._responsiveMenuButton = $(".jsResponsiveMenuButton"), this._responsiveMenuContainer = $(".jsResponsiveMenuContainer"), this._tabsContentParent = $(".jsTabsContent").parent(".tabsContent"), !0
},
_setEvents: function() {
var $this = this;
this._responsiveMenuButton.on("click", function(e) {
e.preventDefault(), $this._menuToggle()
})
},
_menuToggle: function() {
this._responsiveMenuContainer.is(":visible") ? (this._responsiveMenuContainer.slideUp(), this._responsiveMenuButton.removeClass("active")) : (this._responsiveMenuContainer.slideDown(), this._responsiveMenuButton.addClass("active"))
},
_onResize: function() {
var $this = this;
this._window.on("resize", function() {
$this._runResponsive()
})
},
_runResponsive: function() {
this._regenerateSliderControls(), this._regenerateSliderTocuhScroll(), this._regenerateSliderItemsOnScreen(), this._resizeTabs(), this._resizeVideo(), this._regenerateBg(), this._regenerateHeader(), this._regenerateTabs(), document.tabs.regenerateBelt($(".jsTabsHeaderContent").css("left"))
},
_regenerateBg: function() {
if ((this._grayBg.text() || this._orangeBg.text() || this._violetBg.text()) && this._sidebar.text() && this._bgWrapper.is(":visible")) {
if (this._grayBg.text()) var bgWrapper = this._grayBg;
if (this._orangeBg.text()) var bgWrapper = this._orangeBg;
if (this._violetBg.text()) var bgWrapper = this._violetBg;
var bgHeight = bgWrapper.outerHeight(),
sidebarOffset = this._sidebar.offset(),
sidebarWidth = this._window.width() - sidebarOffset.left;
this._bgWrapper.css({
left: sidebarOffset.left,
width: sidebarWidth,
height: bgHeight
})
}
},
_regenerateTabs: function() {
if (this._tabsContentParent.text()) {
var tabActive = this._tabsContentParent.find(".active");
tabActive.css("height", "auto"), this._tabsContentParent.height(tabActive.height())
}
},
_videoVisibility: function() {
this._video.text() && this._window.width() <= 750 && (this._videoSource.attr("src", ""), this._video.removeAttr("autoplay", ""))
},
_regenerateHeader: function() {
this._window.width() <= 750 ? this._body.removeClass("fixedHeader") : this._responsiveMenuContainer.show()
},
_regenerateSliderControls: function() {
var $this = this;
$($this._sliderMainControlsName).text() && setTimeout(function() {
document.slider.singleSlidePrecentage > 0 && $($this._sliderMainControlsName).width(document.slider.singleSlidePrecentage + "%")
}, 50)
},
_regenerateSliderTocuhScroll: function() {
var $this = this;
if (this._sliderTestimonials.text() && document.slider.sliderTestimonialsObject.text()) {
var sliderTestimonialsTouchScroll = !0;
$(window).width() <= 750 && (sliderTestimonialsTouchScroll = !1), document.slider.sliderTestimonialsObject.reloadSlider({
mode: "vertical",
slideMargin: 1,
touchEnabled: sliderTestimonialsTouchScroll,
onSliderLoad: function() {
var pagerHeight = parseInt($($this._sliderTestimonialsWrapperName).find(".bx-pager").innerHeight());
pagerHeight > 0 && $($this._sliderTestimonialsWrapperName).find(".bx-pager").css("margin-top", -(pagerHeight / 2))
}
})
}
},
_resizeTabs: function() {
this._tabsContentParent.text() && this._tabsContentParent.height(this._tabsContentParent.find(".active").height())
},
_resizeVideo: function() {
this._introContainer.text() && (this._window.width() > 750 ? (this._introContainer.height(this._videoContainer.height()), this._video.width(this._videoContainer.width() >= 1700 ? this._videoContainer.width() : "auto")) : (this._introContainer.height("auto"), this._video.width("auto")))
},
_regenerateSliderItemsOnScreen: function() {
var $this = this;
if (document.slider.sliderObject) {
var oldSlidesCount = this._slidesCount;
this._slidesCount = $(window).width() <= 750 ? 1 : 2, this._slidesCount != oldSlidesCount && document.slider.sliderObject.reloadSlider({
minSlides: $this._slidesCount,
maxSlides: $this._slidesCount,
moveSlides: 1,
slideWidth: 610
})
}
}
};
var contactMap = function() {
this.init()
};
contactMap.prototype = {
init: function() {
var $this = this;
if ("undefined" != typeof google && "undefined" != typeof google.maps) {
if (this._maps = $(".jsContactMap"), !this._maps.length) return !1;
this._pin_s = new google.maps.MarkerImage("/themes/dinebydesign/images/marker_map.png", null, null, new google.maps.Point(18, 47));
var zoom = this._maps.data("zoom");
zoom = void 0 == zoom ? 15 : parseInt(zoom);
var latString = this._maps.data("lat"),
lngString = this._maps.data("lng"),
latlng = new google.maps.LatLng(parseFloat(latString), parseFloat(lngString));
$this._initMap(this._maps, latlng, zoom)
}
},
_initMap: function(mapObj, latlng, zoom) {
{
var style = [{
featureType: "administrative",
elementType: "labels.text.fill",
stylers: [{
color: "#444444"
}]
}, {
featureType: "landscape",
elementType: "all",
stylers: [{
color: "#f2f2f2"
}]
}, {
featureType: "poi",
elementType: "all",
stylers: [{
visibility: "off"
}]
}, {
featureType: "road",
elementType: "all",
stylers: [{
saturation: -100
}, {
lightness: 45
}]
}, {
featureType: "road.highway",
elementType: "all",
stylers: [{
visibility: "simplified"
}]
}, {
featureType: "road.arterial",
elementType: "labels.icon",
stylers: [{
visibility: "off"
}]
}, {
featureType: "transit",
elementType: "all",
stylers: [{
visibility: "off"
}]
}, {
featureType: "water",
elementType: "all",
stylers: [{
color: "#425a68"
}, {
visibility: "on"
}]
}],
_mapOptions = {
zoom: zoom,
center: latlng,
mapTypeId: google.maps.MapTypeId.ROADMAP,
panControl: !1,
zoomControl: !0,
zoomControlOptions: {
style: google.maps.ZoomControlStyle.SMALL,
position: google.maps.ControlPosition.LEFT_TOP
},
mapTypeControl: !1,
scaleControl: !1,
streetViewControl: !1,
overviewMapControl: !1,
scrollwheel: !1,
draggable: !1,
styles: style
}, _map = new google.maps.Map(mapObj[0], _mapOptions);
new google.maps.Marker({
position: latlng,
map: _map,
icon: this._pin_s,
animation: google.maps.Animation.DROP
})
}
}
};
var header = function() {
this.init()
};
header.prototype = {
init: function() {
this._setVars() && this._setEvents()
},
_setVars: function() {
return this._window = $(window), this._body = $("body"), !0
},
_setEvents: function() {
var $this = this;
$(document).on("scroll", function() {
$this._fixedHeader()
})
},
_fixedHeader: function() {
if (this._window.width() > 750) {
var bodyScrollTop = this._window.scrollTop();
bodyScrollTop >= 69 ? this._body.addClass("fixedHeader") : this._body.removeClass("fixedHeader")
}
}
};
var menu = function() {
this.init()
};
menu.prototype = {
init: function() {
!this._setVars()
},
_setVars: function() {
var $this = this;
if (this._tabs = $(".jsMenuOpen"), !this._tabs.length) return !1;
var idx = 0;
return this._instances = new Array, this._tabs.each(function() {
$this._instances[idx] = new menuInstance($(this)), idx++
}), !0
},
refresh: function() {
"undefined" != typeof this._instances && (this._instances = null), this.init()
}
};
var menuInstance = function(obj) {
this.init(obj)
};
menuInstance.prototype = {
init: function(obj) {
this._setVars(obj) && this._setEvents()
},
_setVars: function(obj) {
return this._handler = $(obj), this._handler.length ? (this._container = this._handler.next(".submenu"), this._container.length ? !0 : !1) : !1
},
_setEvents: function() {
var $this = this;
this._handler.on("click", function(e) {
e.preventDefault(), $this._subMenuToggle()
})
},
_subMenuToggle: function() {
this._container.is(":hidden") ? (this._container.slideDown(), this._handler.addClass("active")) : (this._handler.removeClass("active"), this._container.slideUp())
}
}; |
for i in range(10):
print("Perfect square at position", i+1, "is", (i+1)*(i+1)) |
<filename>back-end/hub-api/src/main/java/io/apicurio/hub/api/github/GitHubCreateReference.java
/*
* Copyright 2018 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apicurio.hub.api.github;
/**
* @author <EMAIL>
*/
public class GitHubCreateReference {
private String ref;
private String sha;
/**
* Constructor.
*/
public GitHubCreateReference() {
}
/**
* @return the ref
*/
public String getRef() {
return ref;
}
/**
* @param ref the ref to set
*/
public void setRef(String ref) {
this.ref = ref;
}
/**
* @return the sha
*/
public String getSha() {
return sha;
}
/**
* @param sha the sha to set
*/
public void setSha(String sha) {
this.sha = sha;
}
}
|
<gh_stars>0
#Generation of random IP addresses in the subnet
while True:
generate = input("Generate random IP address from this subnet? (y/n)")
if generate == "y":
generated_ip = []
#Obtain available IP address in range, based on the difference between octets in broadcast address and network address
for indexb, oct_bst in enumerate(bst_ip_address):
#print(indexb, oct_bst)
for indexn, oct_net in enumerate(net_ip_address):
#print(indexn, oct_net)
if indexb == indexn:
if oct_bst == oct_net:
#Add identical octets to the generated_ip list
generated_ip.append(oct_bst)
else:
#Generate random number(s) from within octet intervals and append to the list
generated_ip.append(str(random.randint(int(oct_net), int(oct_bst))))
#IP address generated from the subnet pool
#print(generated_ip)
y_iaddr = ".".join(generated_ip)
#print(y_iaddr)
print("Random IP address is: %s" % y_iaddr)
print("\n")
continue
else:
print("Ok, bye!\n")
break |
module.exports = function (app) {
const has = require('./has')(app)
return `
<!-- App Scripts -->
${base(app).trim()}
${scripts(app)}
${application(app)}`;
function application(app) {
let polymers = app.polymers
if (has('import') || has('shell', polymers)) {
let href = has('import') ? app.import : polymers.shell
return `<!--Polymer-->
<link rel="import" href="bower_components/polymer/polymer-element.html">`;
}
return ''
}
function base(app) {
return `
<script>
'use strict';
// Register the base URL
var baseUrl = document.querySelector('base').href;
window.addEventListener('DomModule', function() {
window.manifest.base_url = baseUrl
// At this point we are guaranteed that all required polyfills have loaded,
// all HTML imports have loaded, and all defined custom elements have upgraded
let link = document.createElement('link')
link.rel = "import"
link.href = "${app.shell}"
document.head.appendChild(link)
});
if ('serviceWorker' in navigator) {
window.addEventListener('load', function() {
navigator.serviceWorker.register(baseUrl + 'service-worker.js');
});
}
</script>`;
}
function scripts(app) {
return `<!-- WecComponents Polyfill -->
<script src="bower_components/webcomponentsjs/webcomponents-loader.js"></script>`
}
}
|
#!/bin/bash
# This script provides methods to call custom commands pre/post of starting/stoping the component during launch on the device.
# This script is being executed on the target device where the component is running.
# For example the script can be used to start and stop the morse simulator automatically.
case "$1" in
pre-start)
echo "Triggering pre-start hooks FROM COMPONENT ComponentRosImageClient ..."
# Insert commands you want to call prior to starting the components
;;
post-start)
echo "Triggering post-start hooks FROM COMPONENT ComponentRosImageClient ..."
# Insert commands you want to call after all components were started
;;
pre-stop)
echo "Triggering pre-stop hooks FROM COMPONENT ComponentRosImageClient ..."
# Insert commands you want to call before stopping all components
;;
post-stop)
echo "Triggering post-stop hooks FROM COMPONENT ComponentRosImageClient ..."
# Insert commands you want to call after all components were stopped
;;
*)
echo "ERROR in $0: no such hook '$1'. Usage: $0 pre-start|post-start|pre-stop|post-stop"
;;
esac
|
package vectorwing.farmersdelight.common.registry;
import net.minecraft.core.particles.ParticleType;
import net.minecraft.core.particles.SimpleParticleType;
import net.minecraftforge.registries.RegistryObject;
import net.minecraftforge.registries.DeferredRegister;
import net.minecraftforge.registries.ForgeRegistries;
import vectorwing.farmersdelight.FarmersDelight;
public class ModParticleTypes
{
public static final DeferredRegister<ParticleType<?>> PARTICLE_TYPES = DeferredRegister.create(ForgeRegistries.PARTICLE_TYPES, FarmersDelight.MODID);
public static final RegistryObject<SimpleParticleType> STAR = PARTICLE_TYPES.register("star",
() -> new SimpleParticleType(true));
public static final RegistryObject<SimpleParticleType> STEAM = PARTICLE_TYPES.register("steam",
() -> new SimpleParticleType(true));
}
|
#!/bin/bash
echo "Setting up env variables"
set -e
PROJECT_ID=$GOOGLE_PROJECT_ID # set by the CI
STAGE=$CI_BRANCH
KUBERNETES_APP_NAME=CLUSTER_NAME-$STAGE
IMAGE=gcr.io/$PROJECT_ID/hapi-api:$CI_REPO_NAME.$CI_COMMIT_ID
echo "Setting up gcloud client"
codeship_google authenticate
gcloud config set compute/zone us-central1-a
gcloud container clusters get-credentials $KUBERNETES_APP_NAME
sed -i "s,\$IMAGE_NAME,$IMAGE," ./deploy/infrastructure/k8s/$STAGE/api-deployment.yml
echo "Deploying to Kubernetes..."
kubectl apply -f ./deploy/infrastructure/k8s/$STAGE
echo "Image $IMAGE was deployed" |
import numpy as np
class TicTacToeGame:
def __init__(self):
self.board = np.zeros((3, 3))
self.player_markers = {1: "X", -1: "O", 0: " "}
self.player_marker = 1
def is_gameover(self):
# if any row matches the marker the player wins
for row in range(3):
if np.all(self.board[row] == self.player_marker):
return True
# if any column matches the marker the player wins
for col in range(3):
if np.all(self.board[:, col] == self.player_marker):
return True
# if diagonal matches the marker the player wins
if np.all(np.diag(self.board) == self.player_marker):
return True
# if anti-diagonal matches the marker the player wins
if np.all(np.diag(np.fliplr(self.board)) == self.player_marker):
return True
# draw
if np.all(self.board != 0):
return True
# Game is still in progress
return False
# Create a reinforcement learning agent to play tic tac toe
def play_game(agent):
game = TicTacToeGame()
# Keep playing until game is over
while not game.is_gameover():
game.player_marker *= -1
action = agent.play(game)
game.board[action[0]][action[1]] = game.player_marker |
import random
# Generate a random sequence of 0s and 1s
n = 10
random_sequence = "".join([str(random.randint(0, 1)) for x in range(n)])
print (random_sequence) |
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_ROOT}/../../MEAdvSDK/Assets/Resource/MobiAdapters.plist"
install_resource "${PODS_ROOT}/../../MEAdvSDK/Assets/Resource/MEAdvBundle.bundle"
install_resource "${PODS_ROOT}/../../MEAdvSDK/Assets/Resource/MobiPubSDK.bundle"
install_resource "${PODS_ROOT}/../../MEAdvSDK/Assets/Resource/MRAID.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_ROOT}/../../MEAdvSDK/Assets/Resource/MobiAdapters.plist"
install_resource "${PODS_ROOT}/../../MEAdvSDK/Assets/Resource/MEAdvBundle.bundle"
install_resource "${PODS_ROOT}/../../MEAdvSDK/Assets/Resource/MobiPubSDK.bundle"
install_resource "${PODS_ROOT}/../../MEAdvSDK/Assets/Resource/MRAID.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find -L "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
|
//
// Created by ooooo on 2020/2/25.
//
#ifndef CPP_0337__SOLUTION3_H_
#define CPP_0337__SOLUTION3_H_
#include "TreeNode.h"
#include <unordered_map>
using namespace std;
/**
* max money = max(根节点 + 四个孙子 , 两个儿子)
* A
* / \
* B B
* / \ / \
* C C C C
*
* dp: 0 表示不偷, 1 表示偷
*
* root[0] = max(root.left[0], root.left[1]) + max(root.right[0] , root.right[1])
* root[1] = root.val + root.left[0] + root.right[0]
*/
class Solution {
public:
vector<int> help(TreeNode *node) {
vector<int> ans(2, 0);
if (!node) return ans;
vector<int> l = help(node->left);
vector<int> r = help(node->right);
ans[0] = max(l[0], l[1]) + max(r[0], r[1]);
ans[1] = node->val + l[0] + r[0];
return ans;
}
int rob(TreeNode *root) {
vector<int> ans = help(root);
return max(ans[0], ans[1]);
}
};
#endif //CPP_0337__SOLUTION3_H_
|
/// <reference types="yoga-layout" />
import Yoga from 'yoga-layout-prebuilt';
interface BuildLayoutOptions {
config: Yoga.YogaConfig;
terminalWidth: number;
skipStaticElements: boolean;
}
export declare const buildLayout: (node: import("./dom").TextNode | import("./dom").DOMElement, options: BuildLayoutOptions) => import("./dom").TextNode | import("./dom").DOMElement;
export {};
|
<gh_stars>0
"""control_spending URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from bills.views import home, list_transactions, new_transaction, update_transaction, delete_transaction
urlpatterns = [
path('admin/', admin.site.urls),
path('', home),
path('transactions/', list_transactions, name='list'),
path('new_transaction/', new_transaction, name='new'),
path('update_transaction/<int:pk>/', update_transaction, name='update'),
path('delete_transaction/<int:pk>/', delete_transaction, name='delete')
]
|
<reponame>levitnudi/Dala<gh_stars>0
package yali.org.view;
/**
* Created by Abhi on 13 Nov 2017 013.
*/
public class NotificationVO {
private String title;
private String message;
private String iconUrl;
private String action;
private String actionDestination;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public String getIconUrl() {
return iconUrl;
}
public void setIconUrl(String iconUrl) {
this.iconUrl = iconUrl;
}
public String getAction() {
return action;
}
public void setAction(String action) {
this.action = action;
}
public String getActionDestination() {
return actionDestination;
}
public void setActionDestination(String actionDestination) {
this.actionDestination = actionDestination;
}
}
|
#!/bin/bash
usage() { echo "Usage: $0 -t <subscriptionId> -p <resourceGroupName> -q <deploymentName> -l <resourceGroupLocation>" 1>&2; exit 1; }
# Initialize parameters specified from command line
while getopts ":t:p:q:l:" o; do
case "${o}" in
t)
echo "in case t"
subscriptionId=${OPTARG}
;;
p)
resourceGroupName=${OPTARG}
;;
q)
deploymentName=${OPTARG}
;;
l)
resourceGroupLocation=${OPTARG}
;;
esac
done
shift $((OPTIND-1))
#Prompt for parameters is some required parameters are missing
if [ -z "$subscriptionId" ]; then
echo "Subscription Id:"
read subscriptionId
fi
if [ -z "$resourceGroupName" ]; then
echo "ResourceGroupName:"
read resourceGroupName
fi
if [ -z "$deploymentName" ]; then
echo "DeploymentName:"
read deploymentName
fi
if [ -z "$resourceGroupLocation" ]; then
echo "Enter a location below to create a new resource group else skip this"
echo "ResourceGroupLocation:"
read resourceGroupLocation
fi
#templateFile Path - template file to be used
templateFilePath="template.json"
#parameter file path
parametersFilePath="parameters.json"
if [ -z "$subscriptionId" ] || [ -z "$resourceGroupName" ] || [ -z "$deploymentName" ]; then
echo "Either one of subscriptionId, resourceGroupName, deploymentName is empty"
usage
fi
#login to azure using your credentials
azure login
#set the default subscription id
azure account set $subscriptionId
#switch the mode to azure resource manager
azure config mode arm
#Check for existing resource group
if [ -z "$resourceGroupLocation" ] ;
then
echo "Using existing resource group..."
else
echo "Creating a new resource group..."
azure group create --name $resourceGroupName --location $resourceGroupLocation
fi
#Start deployment
echo "Starting deployment..."
azure group deployment create --name $deploymentName --resource-group $resourceGroupName --template-file $templateFilePath --parameters-file $parametersFilePath |
<reponame>hofmeister/voyager<gh_stars>1-10
package collector
import (
"crypto/tls"
"encoding/csv"
"errors"
"fmt"
"io"
"net"
"net/http"
"net/url"
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/log"
)
const (
Namespace = "haproxy" // For Prometheus metrics.
// HAProxy 1.4
// # pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,
// HAProxy 1.5
// pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,comp_in,comp_out,comp_byp,comp_rsp,lastsess,
// HAProxy 1.5.19
// pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,comp_in,comp_out,comp_byp,comp_rsp,lastsess,last_chk,last_agt,qtime,ctime,rtime,ttime,
// HAProxy 1.7
// pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,comp_in,comp_out,comp_byp,comp_rsp,lastsess,last_chk,last_agt,qtime,ctime,rtime,ttime,agent_status,agent_code,agent_duration,check_desc,agent_desc,check_rise,check_fall,check_health,agent_rise,agent_fall,agent_health,addr,cookie,mode,algo,conn_rate,conn_rate_max,conn_tot,intercepted,dcon,dses
minimumCsvFieldCount = 33
statusField = 17
qtimeMsField = 58
ctimeMsField = 59
rtimeMsField = 60
ttimeMsField = 61
)
var (
frontendLabelNames = []string{"frontend"}
backendLabelNames = []string{"backend"}
serverLabelNames = []string{"backend", "server"}
)
func newServerGaugeOpts(metricName string, docString string, constLabels prometheus.Labels) prometheus.GaugeOpts {
return prometheus.GaugeOpts{
Namespace: Namespace,
Name: "server_" + metricName,
Help: docString,
ConstLabels: constLabels,
}
}
type Metrics map[int]prometheus.GaugeOpts
func (m Metrics) String() string {
keys := make([]int, 0, len(m))
for k := range m {
keys = append(keys, k)
}
sort.Ints(keys)
s := make([]string, len(keys))
for i, k := range keys {
s[i] = strconv.Itoa(k)
}
return strings.Join(s, ",")
}
var (
ServerMetrics = Metrics{
2: newServerGaugeOpts("current_queue", "Current number of queued requests assigned to this server.", nil),
3: newServerGaugeOpts("max_queue", "Maximum observed number of queued requests assigned to this server.", nil),
4: newServerGaugeOpts("current_sessions", "Current number of active sessions.", nil),
5: newServerGaugeOpts("max_sessions", "Maximum observed number of active sessions.", nil),
6: newServerGaugeOpts("limit_sessions", "Configured session limit.", nil),
7: newServerGaugeOpts("sessions_total", "Total number of sessions.", nil),
8: newServerGaugeOpts("bytes_in_total", "Current total of incoming bytes.", nil),
9: newServerGaugeOpts("bytes_out_total", "Current total of outgoing bytes.", nil),
13: newServerGaugeOpts("connection_errors_total", "Total of connection errors.", nil),
14: newServerGaugeOpts("response_errors_total", "Total of response errors.", nil),
15: newServerGaugeOpts("retry_warnings_total", "Total of retry warnings.", nil),
16: newServerGaugeOpts("redispatch_warnings_total", "Total of redispatch warnings.", nil),
17: newServerGaugeOpts("up", "Current health status of the server (1 = UP, 0 = DOWN).", nil),
18: newServerGaugeOpts("weight", "Current weight of the server.", nil),
21: newServerGaugeOpts("check_failures_total", "Total number of failed health checks.", nil),
24: newServerGaugeOpts("downtime_seconds_total", "Total downtime in seconds.", nil),
33: newServerGaugeOpts("current_session_rate", "Current number of sessions per second over last elapsed second.", nil),
35: newServerGaugeOpts("max_session_rate", "Maximum observed number of sessions per second.", nil),
38: newServerGaugeOpts("check_duration_milliseconds", "Previously run health check duration, in milliseconds", nil),
39: newServerGaugeOpts("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "1xx"}),
40: newServerGaugeOpts("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "2xx"}),
41: newServerGaugeOpts("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "3xx"}),
42: newServerGaugeOpts("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "4xx"}),
43: newServerGaugeOpts("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "5xx"}),
44: newServerGaugeOpts("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "other"}),
}
)
// Exporter collects HAProxy stats from the given URI and exports them using
// the prometheus metrics package.
type Exporter struct {
URI string
mutex sync.RWMutex
fetch func() (io.ReadCloser, error)
extraLabels prometheus.Labels
up prometheus.Gauge
totalScrapes, csvParseFailures prometheus.Counter
frontendMetrics, backendMetrics, serverMetrics map[int]*prometheus.GaugeVec
}
// NewExporter returns an initialized Exporter.
func NewExporter(uri string, sslVerify bool, serverMetricFields string, extraLabels prometheus.Labels, timeout time.Duration) (*Exporter, error) {
u, err := url.Parse(uri)
if err != nil {
return nil, err
}
var fetch func() (io.ReadCloser, error)
switch u.Scheme {
case "http", "https", "file":
fetch = fetchHTTP(uri, sslVerify, timeout)
case "unix":
fetch = fetchUnix(u, timeout)
default:
return nil, fmt.Errorf("unsupported scheme: %q", u.Scheme)
}
e := &Exporter{
URI: uri,
extraLabels: extraLabels,
fetch: fetch,
}
e.up = prometheus.NewGauge(prometheus.GaugeOpts{
Namespace: Namespace,
Name: "up",
Help: "Was the last scrape of haproxy successful.",
ConstLabels: nil,
})
e.totalScrapes = prometheus.NewCounter(prometheus.CounterOpts{
Namespace: Namespace,
Name: "exporter_total_scrapes",
Help: "Current total HAProxy scrapes.",
ConstLabels: nil,
})
e.csvParseFailures = prometheus.NewCounter(prometheus.CounterOpts{
Namespace: Namespace,
Name: "exporter_csv_parse_failures",
Help: "Number of errors while parsing CSV.",
ConstLabels: nil,
})
e.frontendMetrics = map[int]*prometheus.GaugeVec{
4: e.newFrontendMetric("current_sessions", "Current number of active sessions.", nil),
5: e.newFrontendMetric("max_sessions", "Maximum observed number of active sessions.", nil),
6: e.newFrontendMetric("limit_sessions", "Configured session limit.", nil),
7: e.newFrontendMetric("sessions_total", "Total number of sessions.", nil),
8: e.newFrontendMetric("bytes_in_total", "Current total of incoming bytes.", nil),
9: e.newFrontendMetric("bytes_out_total", "Current total of outgoing bytes.", nil),
10: e.newFrontendMetric("requests_denied_total", "Total of requests denied for security.", nil),
12: e.newFrontendMetric("request_errors_total", "Total of request errors.", nil),
33: e.newFrontendMetric("current_session_rate", "Current number of sessions per second over last elapsed second.", nil),
34: e.newFrontendMetric("limit_session_rate", "Configured limit on new sessions per second.", nil),
35: e.newFrontendMetric("max_session_rate", "Maximum observed number of sessions per second.", nil),
39: e.newFrontendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "1xx"}),
40: e.newFrontendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "2xx"}),
41: e.newFrontendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "3xx"}),
42: e.newFrontendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "4xx"}),
43: e.newFrontendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "5xx"}),
44: e.newFrontendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "other"}),
48: e.newFrontendMetric("http_requests_total", "Total HTTP requests.", nil),
79: e.newFrontendMetric("connections_total", "Total number of connections", nil),
}
e.backendMetrics = map[int]*prometheus.GaugeVec{
2: e.newBackendMetric("current_queue", "Current number of queued requests not assigned to any server.", nil),
3: e.newBackendMetric("max_queue", "Maximum observed number of queued requests not assigned to any server.", nil),
4: e.newBackendMetric("current_sessions", "Current number of active sessions.", nil),
5: e.newBackendMetric("max_sessions", "Maximum observed number of active sessions.", nil),
6: e.newBackendMetric("limit_sessions", "Configured session limit.", nil),
7: e.newBackendMetric("sessions_total", "Total number of sessions.", nil),
8: e.newBackendMetric("bytes_in_total", "Current total of incoming bytes.", nil),
9: e.newBackendMetric("bytes_out_total", "Current total of outgoing bytes.", nil),
13: e.newBackendMetric("connection_errors_total", "Total of connection errors.", nil),
14: e.newBackendMetric("response_errors_total", "Total of response errors.", nil),
15: e.newBackendMetric("retry_warnings_total", "Total of retry warnings.", nil),
16: e.newBackendMetric("redispatch_warnings_total", "Total of redispatch warnings.", nil),
17: e.newBackendMetric("up", "Current health status of the backend (1 = UP, 0 = DOWN).", nil),
18: e.newBackendMetric("weight", "Total weight of the servers in the backend.", nil),
19: e.newBackendMetric("current_server", "Current number of active servers", nil),
33: e.newBackendMetric("current_session_rate", "Current number of sessions per second over last elapsed second.", nil),
35: e.newBackendMetric("max_session_rate", "Maximum number of sessions per second.", nil),
39: e.newBackendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "1xx"}),
40: e.newBackendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "2xx"}),
41: e.newBackendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "3xx"}),
42: e.newBackendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "4xx"}),
43: e.newBackendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "5xx"}),
44: e.newBackendMetric("http_responses_total", "Total of HTTP responses.", prometheus.Labels{"code": "other"}),
58: e.newBackendMetric("http_queue_time_average_seconds", "Avg. HTTP queue time for last 1024 successful connections.", nil),
59: e.newBackendMetric("http_connect_time_average_seconds", "Avg. HTTP connect time for last 1024 successful connections.", nil),
60: e.newBackendMetric("http_response_time_average_seconds", "Avg. HTTP response time for last 1024 successful connections.", nil),
61: e.newBackendMetric("http_total_time_average_seconds", "Avg. HTTP total time for last 1024 successful connections.", nil),
}
selected, err := e.filterServerMetrics(serverMetricFields)
if err != nil {
return nil, err
}
e.serverMetrics = map[int]*prometheus.GaugeVec{}
for field, opts := range ServerMetrics {
if _, ok := selected[field]; ok {
e.serverMetrics[field] = e.newServerMetric(opts)
}
}
return e, err
}
func (e *Exporter) newFrontendMetric(metricName string, docString string, constLabels prometheus.Labels) *prometheus.GaugeVec {
return prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: Namespace,
Name: "frontend_" + metricName,
Help: docString,
ConstLabels: e.getConstLabels(constLabels),
},
frontendLabelNames,
)
}
func (e *Exporter) newBackendMetric(metricName string, docString string, constLabels prometheus.Labels) *prometheus.GaugeVec {
return prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: Namespace,
Name: "backend_" + metricName,
Help: docString,
ConstLabels: e.getConstLabels(constLabels),
},
backendLabelNames,
)
}
func (e *Exporter) newServerMetric(opts prometheus.GaugeOpts) *prometheus.GaugeVec {
return prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: opts.Namespace,
Name: opts.Name,
Help: opts.Help,
ConstLabels: e.getConstLabels(opts.ConstLabels),
},
serverLabelNames,
)
}
func (e *Exporter) getConstLabels(constLabels prometheus.Labels) prometheus.Labels {
result := prometheus.Labels{}
for k, v := range constLabels {
result[k] = v
}
if e != nil && e.extraLabels != nil {
for k, v := range e.extraLabels {
result[k] = v
}
}
if len(result) == 0 {
return nil
}
return result
}
// Describe describes all the metrics ever exported by the HAProxy exporter. It
// implements prometheus.Collector.
func (e *Exporter) Describe(ch chan<- *prometheus.Desc) {
for _, m := range e.frontendMetrics {
m.Describe(ch)
}
for _, m := range e.backendMetrics {
m.Describe(ch)
}
for _, m := range e.serverMetrics {
m.Describe(ch)
}
ch <- e.up.Desc()
ch <- e.totalScrapes.Desc()
ch <- e.csvParseFailures.Desc()
}
// Collect fetches the stats from configured HAProxy location and delivers them
// as Prometheus metrics. It implements prometheus.Collector.
func (e *Exporter) Collect(ch chan<- prometheus.Metric) {
e.mutex.Lock() // To protect metrics from concurrent collects.
defer e.mutex.Unlock()
e.resetMetrics()
e.scrape()
ch <- e.up
ch <- e.totalScrapes
ch <- e.csvParseFailures
e.collectMetrics(ch)
}
func fetchHTTP(uri string, sslVerify bool, timeout time.Duration) func() (io.ReadCloser, error) {
tr := &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: !sslVerify}}
client := http.Client{
Timeout: timeout,
Transport: tr,
}
return func() (io.ReadCloser, error) {
resp, err := client.Get(uri)
if err != nil {
return nil, err
}
if !(resp.StatusCode >= 200 && resp.StatusCode < 300) {
resp.Body.Close()
return nil, fmt.Errorf("HTTP status %d", resp.StatusCode)
}
return resp.Body, nil
}
}
func fetchUnix(u *url.URL, timeout time.Duration) func() (io.ReadCloser, error) {
return func() (io.ReadCloser, error) {
f, err := net.DialTimeout("unix", u.Path, timeout)
if err != nil {
return nil, err
}
if err := f.SetDeadline(time.Now().Add(timeout)); err != nil {
f.Close()
return nil, err
}
cmd := "show stat\n"
n, err := io.WriteString(f, cmd)
if err != nil {
f.Close()
return nil, err
}
if n != len(cmd) {
f.Close()
return nil, errors.New("write error")
}
return f, nil
}
}
func (e *Exporter) scrape() {
e.totalScrapes.Inc()
body, err := e.fetch()
if err != nil {
e.up.Set(0)
log.Errorf("Can't scrape HAProxy: %v", err)
return
}
defer body.Close()
e.up.Set(1)
reader := csv.NewReader(body)
reader.TrailingComma = true
reader.Comment = '#'
loop:
for {
row, err := reader.Read()
switch err {
case nil:
case io.EOF:
break loop
default:
if _, ok := err.(*csv.ParseError); ok {
log.Errorf("Can't read CSV: %v", err)
e.csvParseFailures.Inc()
continue loop
}
log.Errorf("Unexpected error while reading CSV: %v", err)
e.up.Set(0)
break loop
}
e.parseRow(row)
}
}
func (e *Exporter) resetMetrics() {
for _, m := range e.frontendMetrics {
m.Reset()
}
for _, m := range e.backendMetrics {
m.Reset()
}
for _, m := range e.serverMetrics {
m.Reset()
}
}
func (e *Exporter) collectMetrics(metrics chan<- prometheus.Metric) {
for _, m := range e.frontendMetrics {
m.Collect(metrics)
}
for _, m := range e.backendMetrics {
m.Collect(metrics)
}
for _, m := range e.serverMetrics {
m.Collect(metrics)
}
}
func (e *Exporter) parseRow(csvRow []string) {
if len(csvRow) < minimumCsvFieldCount {
log.Errorf("Parser expected at least %d CSV fields, but got: %d", minimumCsvFieldCount, len(csvRow))
e.csvParseFailures.Inc()
return
}
pxname, svname, typ := csvRow[0], csvRow[1], csvRow[32]
const (
frontend = "0"
backend = "1"
server = "2"
listener = "3"
)
switch typ {
case frontend:
e.exportCsvFields(e.frontendMetrics, csvRow, pxname)
case backend:
e.exportCsvFields(e.backendMetrics, csvRow, pxname)
case server:
e.exportCsvFields(e.serverMetrics, csvRow, pxname, svname)
}
}
func parseStatusField(value string) int64 {
switch value {
case "UP", "UP 1/3", "UP 2/3", "OPEN", "no check":
return 1
case "DOWN", "DOWN 1/2", "NOLB", "MAINT":
return 0
}
return 0
}
func (e *Exporter) exportCsvFields(metrics map[int]*prometheus.GaugeVec, csvRow []string, labels ...string) {
for fieldIdx, metric := range metrics {
if fieldIdx > len(csvRow)-1 {
break
}
valueStr := csvRow[fieldIdx]
if valueStr == "" {
continue
}
var err error = nil
var value float64
var valueInt int64
switch fieldIdx {
case statusField:
valueInt = parseStatusField(valueStr)
value = float64(valueInt)
case qtimeMsField, ctimeMsField, rtimeMsField, ttimeMsField:
value, err = strconv.ParseFloat(valueStr, 64)
value /= 1000
default:
valueInt, err = strconv.ParseInt(valueStr, 10, 64)
value = float64(valueInt)
}
if err != nil {
log.Errorf("Can't parse CSV field value %s: %v", valueStr, err)
e.csvParseFailures.Inc()
continue
}
metric.WithLabelValues(labels...).Set(value)
}
}
// FilterServerMetrics returns the set of server metrics specified by the comma
// separated filter.
func (e *Exporter) filterServerMetrics(filter string) (map[int]prometheus.GaugeOpts, error) {
metrics := map[int]prometheus.GaugeOpts{}
if len(filter) == 0 {
return metrics, nil
}
selected := map[int]struct{}{}
for _, f := range strings.Split(filter, ",") {
field, err := strconv.Atoi(f)
if err != nil {
return nil, fmt.Errorf("invalid server metric field number: %v", f)
}
selected[field] = struct{}{}
}
for field, opts := range ServerMetrics {
if _, ok := selected[field]; ok {
metrics[field] = opts
}
}
return metrics, nil
}
|
<gh_stars>1-10
package main
import (
"fmt"
"github.com/henrymxu/gomoderator/forum"
"github.com/henrymxu/gomoderator/moderator"
"os"
)
func main() {
githubBuilder := forum.NewGithubBuilder()
githubBuilder.AccessToken = os.Getenv("GITHUB_ACCESS_TOKEN")
githubBuilder.AccountName = "henrymxu"
githubBuilder.RepositoryOwner = "henrymxu"
githubBuilder.RepositoryName = "gosports"
builder := moderator.NewModeratorBuilder()
builder.SetForumBuilder(githubBuilder)
builder.SetModerators("henrymxu")
_ = builder.SetResolutions("pass", "fail")
builder.RegisterActionHandler(actionHandler)
builder.SetModeToCommenting()
_ = builder.SetTitleFormat("Action required for %d")
mod, err := builder.BuildModerator()
if err != nil {
panic(err)
}
fmt.Println(mod.DoesActionAlreadyExist(0))
// Output: true
}
func actionHandler(id int64, resolution string) {
fmt.Printf("Handling action for %d with resolution %s\n", id, resolution)
} |
#!/bin/bash
cd "$(dirname "$(which "$0")")"
if [ "$1" = "Update" ]; then
docker build -t fieldboundary Docker
elif [ "$1" = "Launch" ]; then
docker run -v "$(pwd)/..":/workspace --gpus all -u $(id -u):$(id -g) -it --shm-size='256m' --rm fieldboundary
fi
|
#!/bin/bash
# Author: yeho <lj2007331 AT gmail.com>
# BLOG: https://linuxeye.com
#
# Notes: OneinStack for CentOS/RedHat 7+ Debian 8+ and Ubuntu 16+
#
# Project home page:
# https://oneinstack.com
# https://github.com/oneinstack/oneinstack
Install_PHP80() {
pushd ${oneinstack_dir}/src > /dev/null
if [ -e "${apache_install_dir}/bin/httpd" ];then
[ "$(${apache_install_dir}/bin/httpd -v | awk -F'.' /version/'{print $2}')" == '4' ] && Apache_main_ver=24
[ "$(${apache_install_dir}/bin/httpd -v | awk -F'.' /version/'{print $2}')" == '2' ] && Apache_main_ver=22
fi
if [ ! -e "${libiconv_install_dir}/lib/libiconv.la" ]; then
tar xzf libiconv-${libiconv_ver}.tar.gz
pushd libiconv-${libiconv_ver} > /dev/null
./configure --prefix=${libiconv_install_dir}
make -j ${THREAD} && make install
popd > /dev/null
rm -rf libiconv-${libiconv_ver}
ln -s ${libiconv_install_dir}/lib/libiconv.so.2 /usr/lib64/libiconv.so.2
fi
if [ ! -e "${curl_install_dir}/lib/libcurl.la" ]; then
tar xzf curl-${curl_ver}.tar.gz
pushd curl-${curl_ver} > /dev/null
[ "${Debian_ver}" == '8' ] && apt-get -y remove zlib1g-dev
./configure --prefix=${curl_install_dir} --with-ssl=${openssl_install_dir}
make -j ${THREAD} && make install
[ "${Debian_ver}" == '8' ] && apt-get -y install libc-client2007e-dev libglib2.0-dev libpng12-dev libssl-dev libzip-dev zlib1g-dev
popd > /dev/null
rm -rf curl-${curl_ver}
fi
if [ ! -e "${freetype_install_dir}/lib/libfreetype.la" ]; then
tar xzf freetype-${freetype_ver}.tar.gz
pushd freetype-${freetype_ver} > /dev/null
./configure --prefix=${freetype_install_dir} --enable-freetype-config
make -j ${THREAD} && make install
ln -sf ${freetype_install_dir}/include/freetype2/* /usr/include/
[ -d /usr/lib/pkgconfig ] && /bin/cp ${freetype_install_dir}/lib/pkgconfig/freetype2.pc /usr/lib/pkgconfig/
popd > /dev/null
rm -rf freetype-${freetype_ver}
fi
if [ ! -e "/usr/lib/libargon2.a" ]; then
tar xzf phc-winner-argon2-${argon2_ver}.tar.gz
pushd phc-winner-argon2-${argon2_ver} > /dev/null
make -j ${THREAD} && make install
popd > /dev/null
rm -rf phc-winner-argon2-${argon2_ver}
fi
if [ ! -e "/usr/local/lib/libsodium.la" ]; then
tar xzf libsodium-${libsodium_ver}.tar.gz
pushd libsodium-${libsodium_ver} > /dev/null
./configure --disable-dependency-tracking --enable-minimal
make -j ${THREAD} && make install
popd > /dev/null
rm -rf libsodium-${libsodium_ver}
fi
if [ ! -e "/usr/local/lib/libzip.la" ]; then
tar xzf libzip-${libzip_ver}.tar.gz
pushd libzip-${libzip_ver} > /dev/null
./configure
make -j ${THREAD} && make install
popd > /dev/null
rm -rf libzip-${libzip_ver}
fi
if [ ! -e "/usr/local/include/mhash.h" -a ! -e "/usr/include/mhash.h" ]; then
tar xzf mhash-${mhash_ver}.tar.gz
pushd mhash-${mhash_ver} > /dev/null
./configure
make -j ${THREAD} && make install
popd > /dev/null
rm -rf mhash-${mhash_ver}
fi
[ -z "`grep /usr/local/lib /etc/ld.so.conf.d/*.conf`" ] && echo '/usr/local/lib' > /etc/ld.so.conf.d/local.conf
ldconfig
if [ "${PM}" == 'yum' ]; then
if [ "${OS_BIT}" == '64' ]; then
[ ! -e "/lib64/libpcre.so.1" ] && ln -s /lib64/libpcre.so.0.0.1 /lib64/libpcre.so.1
[ ! -e "/usr/lib/libc-client.so" ] && ln -s /usr/lib64/libc-client.so /usr/lib/libc-client.so
else
[ ! -e "/lib/libpcre.so.1" ] && ln -s /lib/libpcre.so.0.0.1 /lib/libpcre.so.1
fi
fi
id -g ${run_group} >/dev/null 2>&1
[ $? -ne 0 ] && groupadd ${run_group}
id -u ${run_user} >/dev/null 2>&1
[ $? -ne 0 ] && useradd -g ${run_group} -M -s /sbin/nologin ${run_user}
tar xzf php-${php80_ver}.tar.gz
pushd php-${php80_ver} > /dev/null
make clean
export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/:$PKG_CONFIG_PATH
[ ! -d "${php_install_dir}" ] && mkdir -p ${php_install_dir}
[ "${phpcache_option}" == '1' ] && phpcache_arg='--enable-opcache' || phpcache_arg='--disable-opcache'
if [ "${Apache_main_ver}" == '22' ] || [ "${apache_mode_option}" == '2' ]; then
./configure --prefix=${php_install_dir} --with-config-file-path=${php_install_dir}/etc \
--with-config-file-scan-dir=${php_install_dir}/etc/php.d \
--with-apxs2=${apache_install_dir}/bin/apxs ${phpcache_arg} --disable-fileinfo \
--enable-mysqlnd --with-mysqli=mysqlnd --with-pdo-mysql=mysqlnd \
--with-iconv --with-freetype --with-jpeg --with-zlib \
--enable-xml --disable-rpath --enable-bcmath --enable-shmop --enable-exif \
--enable-sysvsem --with-curl=${curl_install_dir} --enable-mbregex \
--enable-mbstring --with-password-argon2 --with-sodium=/usr/local --enable-gd --with-openssl=${openssl_install_dir} \
--with-mhash --enable-pcntl --enable-sockets --enable-ftp --enable-intl --with-xsl \
--with-gettext --with-zip=/usr/local --enable-soap --disable-debug ${php_modules_options}
else
./configure --prefix=${php_install_dir} --with-config-file-path=${php_install_dir}/etc \
--with-config-file-scan-dir=${php_install_dir}/etc/php.d \
--with-fpm-user=${run_user} --with-fpm-group=${run_group} --enable-fpm ${phpcache_arg} --disable-fileinfo \
--enable-mysqlnd --with-mysqli=mysqlnd --with-pdo-mysql=mysqlnd \
--with-iconv --with-freetype --with-jpeg --with-zlib \
--enable-xml --disable-rpath --enable-bcmath --enable-shmop --enable-exif \
--enable-sysvsem --with-curl=${curl_install_dir} --enable-mbregex \
--enable-mbstring --with-password-argon2 --with-sodium=/usr/local --enable-gd --with-openssl=${openssl_install_dir} \
--with-mhash --enable-pcntl --enable-sockets --enable-ftp --enable-intl --with-xsl \
--with-gettext --with-zip=/usr/local --enable-soap --disable-debug ${php_modules_options}
fi
make ZEND_EXTRA_LIBS="-L${libiconv_install_dir}/lib/ -liconv" -j ${THREAD}
make install
if [ -e "${php_install_dir}/bin/phpize" ]; then
echo "${CSUCCESS}PHP installed successfully! ${CEND}"
else
rm -rf ${php_install_dir}
echo "${CFAILURE}PHP install failed, Please Contact the author! ${CEND}"
kill -9 $$
fi
[ -z "`grep ^'export PATH=' /etc/profile`" ] && echo "export PATH=${php_install_dir}/bin:\$PATH" >> /etc/profile
[ -n "`grep ^'export PATH=' /etc/profile`" -a -z "`grep ${php_install_dir} /etc/profile`" ] && sed -i "s@^export PATH=\(.*\)@export PATH=${php_install_dir}/bin:\1@" /etc/profile
. /etc/profile
# wget -c http://pear.php.net/go-pear.phar
# ${php_install_dir}/bin/php go-pear.phar
[ ! -e "${php_install_dir}/etc/php.d" ] && mkdir -p ${php_install_dir}/etc/php.d
/bin/cp php.ini-production ${php_install_dir}/etc/php.ini
sed -i "s@^memory_limit.*@memory_limit = ${Memory_limit}M@" ${php_install_dir}/etc/php.ini
sed -i 's@^output_buffering =@output_buffering = On\noutput_buffering =@' ${php_install_dir}/etc/php.ini
#sed -i 's@^;cgi.fix_pathinfo.*@cgi.fix_pathinfo=0@' ${php_install_dir}/etc/php.ini
sed -i 's@^short_open_tag = Off@short_open_tag = On@' ${php_install_dir}/etc/php.ini
sed -i 's@^expose_php = On@expose_php = Off@' ${php_install_dir}/etc/php.ini
sed -i 's@^request_order.*@request_order = "CGP"@' ${php_install_dir}/etc/php.ini
sed -i "s@^;date.timezone.*@date.timezone = ${timezone}@" ${php_install_dir}/etc/php.ini
sed -i 's@^post_max_size.*@post_max_size = 100M@' ${php_install_dir}/etc/php.ini
sed -i 's@^upload_max_filesize.*@upload_max_filesize = 50M@' ${php_install_dir}/etc/php.ini
sed -i 's@^max_execution_time.*@max_execution_time = 600@' ${php_install_dir}/etc/php.ini
sed -i 's@^;realpath_cache_size.*@realpath_cache_size = 2M@' ${php_install_dir}/etc/php.ini
sed -i 's@^disable_functions.*@disable_functions = passthru,exec,system,chroot,chgrp,chown,shell_exec,proc_open,proc_get_status,ini_alter,ini_restore,dl,readlink,symlink,popepassthru,stream_socket_server,fsocket,popen@' ${php_install_dir}/etc/php.ini
[ -e /usr/sbin/sendmail ] && sed -i 's@^;sendmail_path.*@sendmail_path = /usr/sbin/sendmail -t -i@' ${php_install_dir}/etc/php.ini
sed -i "s@^;curl.cainfo.*@curl.cainfo = \"${openssl_install_dir}/cert.pem\"@" ${php_install_dir}/etc/php.ini
sed -i "s@^;openssl.cafile.*@openssl.cafile = \"${openssl_install_dir}/cert.pem\"@" ${php_install_dir}/etc/php.ini
sed -i "s@^;openssl.capath.*@openssl.capath = \"${openssl_install_dir}/cert.pem\"@" ${php_install_dir}/etc/php.ini
[ "${phpcache_option}" == '1' ] && cat > ${php_install_dir}/etc/php.d/02-opcache.ini << EOF
[opcache]
zend_extension=opcache.so
opcache.enable=1
opcache.enable_cli=1
opcache.memory_consumption=${Memory_limit}
opcache.interned_strings_buffer=8
opcache.max_accelerated_files=100000
opcache.max_wasted_percentage=5
opcache.use_cwd=1
opcache.validate_timestamps=1
opcache.revalidate_freq=60
;opcache.save_comments=0
opcache.consistency_checks=0
;opcache.optimization_level=0
EOF
if [ ! -e "${apache_install_dir}/bin/apxs" -o "${Apache_main_ver}" == '24' ] && [ "${apache_mode_option}" != '2' ]; then
# php-fpm Init Script
if [ -e /bin/systemctl ]; then
/bin/cp ${oneinstack_dir}/init.d/php-fpm.service /lib/systemd/system/
sed -i "s@/usr/local/php@${php_install_dir}@g" /lib/systemd/system/php-fpm.service
systemctl enable php-fpm
else
/bin/cp sapi/fpm/init.d.php-fpm /etc/init.d/php-fpm
chmod +x /etc/init.d/php-fpm
[ "${PM}" == 'yum' ] && { chkconfig --add php-fpm; chkconfig php-fpm on; }
[ "${PM}" == 'apt-get' ] && update-rc.d php-fpm defaults
fi
cat > ${php_install_dir}/etc/php-fpm.conf <<EOF
;;;;;;;;;;;;;;;;;;;;;
; FPM Configuration ;
;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;
; Global Options ;
;;;;;;;;;;;;;;;;;;
[global]
pid = run/php-fpm.pid
error_log = log/php-fpm.log
log_level = warning
emergency_restart_threshold = 30
emergency_restart_interval = 60s
process_control_timeout = 5s
daemonize = yes
;;;;;;;;;;;;;;;;;;;;
; Pool Definitions ;
;;;;;;;;;;;;;;;;;;;;
[${run_user}]
listen = /dev/shm/php-cgi.sock
listen.backlog = -1
listen.allowed_clients = 127.0.0.1
listen.owner = ${run_user}
listen.group = ${run_group}
listen.mode = 0666
user = ${run_user}
group = ${run_group}
pm = dynamic
pm.max_children = 12
pm.start_servers = 8
pm.min_spare_servers = 6
pm.max_spare_servers = 12
pm.max_requests = 2048
pm.process_idle_timeout = 10s
request_terminate_timeout = 120
request_slowlog_timeout = 0
pm.status_path = /php-fpm_status
slowlog = var/log/slow.log
rlimit_files = 51200
rlimit_core = 0
catch_workers_output = yes
;env[HOSTNAME] = $HOSTNAME
env[PATH] = /usr/local/bin:/usr/bin:/bin
env[TMP] = /tmp
env[TMPDIR] = /tmp
env[TEMP] = /tmp
EOF
if [ $Mem -le 3000 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = $(($Mem/3/20))@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = $(($Mem/3/30))@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = $(($Mem/3/40))@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = $(($Mem/3/20))@" ${php_install_dir}/etc/php-fpm.conf
elif [ $Mem -gt 3000 -a $Mem -le 4500 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = 50@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = 30@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = 20@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = 50@" ${php_install_dir}/etc/php-fpm.conf
elif [ $Mem -gt 4500 -a $Mem -le 6500 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = 60@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = 40@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = 30@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = 60@" ${php_install_dir}/etc/php-fpm.conf
elif [ $Mem -gt 6500 -a $Mem -le 8500 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = 70@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = 50@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = 40@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = 70@" ${php_install_dir}/etc/php-fpm.conf
elif [ $Mem -gt 8500 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = 80@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = 60@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = 50@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = 80@" ${php_install_dir}/etc/php-fpm.conf
fi
service php-fpm start
elif [ "${Apache_main_ver}" == '22' ] || [ "${apache_mode_option}" == '2' ]; then
service httpd restart
fi
popd > /dev/null
[ -e "${php_install_dir}/bin/phpize" ] && rm -rf php-${php80_ver}
popd > /dev/null
}
|
#!/bin/sh
# Script for installing Caffe SSD with cuDNN support on Jetson TX1 Development Kits
# Modified from JetsonHacks file and Dockerfiles:
# https://github.com/jetsonhacks/installCaffeJTX1/blob/master/installCaffeCuDNN.sh
# https://github.com/pool1892/docker/blob/master/caffe_pre/Dockerfile
# https://github.com/pool1892/docker/blob/master/ssd/Dockerfile
# Install and compile Caffe on NVIDIA Jetson TX1 Development Kit
# Prerequisites (which can be installed with JetPack 2):
# L4T 24.2 (Ubuntu 16.04)
# OpenCV4Tegra
# CUDA 8.0
# cuDNN v5.1
sudo add-apt-repository universe
sudo apt-get update -y
/bin/echo -e "\e[1;32mLoading Caffe Dependencies.\e[0m"
sudo apt-get install --no-install-recommends build-essential cmake git unzip wget -y
# General Dependencies
sudo apt-get install libprotobuf-dev libleveldb-dev libsnappy-dev \
libhdf5-serial-dev protobuf-compiler -y
sudo apt-get install --no-install-recommends libboost-all-dev -y
# BLAS
# To Do: Switch to OPENBLAS
sudo apt-get install libatlas-base-dev -y
# Remaining Dependencies
sudo apt-get install libgflags-dev libgoogle-glog-dev liblmdb-dev -y
sudo apt-get install python-dev python-numpy python-pip python-setuptools \
python-scipy python-nose python-h5py python-skimage python-matplotlib \
python-pandas python-sklearn python-sympy python-scipy -y
sudo usermod -a -G video $USER
/bin/echo -e "\e[1;32mCloning Caffe-SSD into $HOME/git directory.\e[0m"
cd $HOME
# Git clone Caffe SSD
git clone https://github.com/weiliu89/caffe.git caffe-ssd
cd caffe-ssd
# Switch to SSD branch
git checkout ssd
/bin/echo -e "\e[1;32mInstalling OpenCV Libraries.\e[0m"
# Install OpenCV Libraries
cd $HOME
sudo apt-get install libopencv-dev
sudo ./OpenCV4Tegra/ocv.sh
/bin/echo -e "\e[1;32mOverlocking Jetson.\e[0m"
# save current settings
#sudo ./jetson_clocks.sh --store default-clocks
# load performance-optimized profile
#sudo ./jetson_clocks.sh
/bin/echo -e "\e[1;32mLoading Caffe pip Dependencies.\e[0m"
pip install --upgrade pip && \
pip --no-cache-dir install ipykernel jupyter sklearn && \
python -m ipykernel.kernelspec
cd caffe-ssd/python && for req in $(cat requirements.txt) pydot; do pip install $req; done && cd ..
/bin/echo -e "\e[1;32mPerforming CMake.\e[0m"
mkdir build && cd build
cmake -DCUDA_USE_STATIC_CUDA_RUNTIME=OFF ..
/bin/echo -e "\e[1;32mCompiling Caffe.\e[0m"
make -j"$(nproc)" all
# make install ???
# make symlink ???
#if [[ -z $(cat ~/.bashrc | grep "$HOME/caffe-ssd") ]] ; then
# echo -e "\n# Adds Caffe to the PATH variable" >> ~/.bashrc
# echo "export CAFFE_ROOT=$HOME/caffe-ssd" >> ~/.bashrc
# echo "export PYCAFFE_ROOT=$CAFFE_ROOT/python" >> ~/.bashrc
# echo "export PYTHONPATH=$PYCAFFE_ROOT:$PYTHONPATH" >> ~/.bashrc
# echo "export PATH=$CAFFE_ROOT/build/tools:$PYCAFFE_ROOT:$PATH" >> ~/.bashrc
# source ~/.bashrc
#fi
# Run the tests to make sure everything works
#/bin/echo -e "\e[1;32mRunning Caffe Tests.\e[0m"
#make -j4 runtest
# The following is a quick timing test ...
# tools/caffe time --model=models/bvlc_alexnet/deploy.prototxt --gpu=0
|
def search(query, text):
n = len(query)
result = []
# Loop through the text
for i in range(len(text)):
# Compare the current n characters of the text
# with the query string
if text[i: i + n] == query:
# If matches, append the current index of text to
# the result
result.append(i)
# Return the list of indices
return result |
#!/bin/bash
if [[ "$OSTYPE" == "linux-gnu"* && $EUID != 0 ]]
then
commandPrefix="sudo"
else
commandPrefix=
fi
$commandPrefix docker-compose exec postgres dropdb nhs-virtual-visit-test -U postgres
$commandPrefix docker-compose exec postgres createdb nhs-virtual-visit-test -U postgres
npm run dbmigratetest up
|
package dev.patika.quixotic95.repository;
import dev.patika.quixotic95.model.Course;
import dev.patika.quixotic95.model.Instructor;
import java.util.List;
public interface InstructorRepository {
List<Course> findInstructorCoursesById(int id);
List<Course> findInstructorCourses(Instructor object);
}
|
def compute_std_dev(nums):
mean = sum(nums) / len(nums)
variance = 0
for n in nums:
variance += (n - mean) ** 2
variance /= len(nums)
std_dev = variance ** 0.5
return std_dev |
python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/new03/
python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/new02/
python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_new01/
python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_sub01/
python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_sub02/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_sub03/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_sub04/
python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_sub05/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_sub06/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_sub07/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main01/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main02/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main03/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main04/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main05/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main06/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main07/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main08/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/PACS_tuning01/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/PACS_tuning02/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/PACS_tuning03/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/PACS_tuning04/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/PACS_tuning05/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main02/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final_changed/Digits_main03/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_other01/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_main01/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_main02/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_main03/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_main04/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_main05/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_main06/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_main07/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_main08/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_sub01/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_sub02/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_sub03/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_sub04/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_sub05/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_sub06/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Digits_sub07/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Cifar10C_tuning01/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Cifar10C_tuning02/
# python -m domainbed.scripts.collect_results --input_dir=../result_domainbed/final/Cifar10C_tuning03/
|
from ade25.base.utils import register_image_scales, package_image_scales
from ade25.widgets.utils import register_content_widgets
def register_and_package(image_scales):
register_image_scales(image_scales)
packaged_scales = package_image_scales()
register_content_widgets(packaged_scales) |
<filename>userdoc/html/search/classes_9.js<gh_stars>1-10
var searchData=
[
['text2d',['Text2D',['../classText2D.html',1,'']]],
['triangle',['Triangle',['../classTriangle.html',1,'']]]
];
|
from unittest import mock
from lib_kafka import message_segmenter
import unittest
import uuid
import time
class TestMessageSegmenter(unittest.TestCase):
def test_segment_message(self):
msg = '0'*(1000*1024)
all_results = list(message_segmenter.segment_message(msg))
self.assertEqual(len(all_results), 2) # total segments are 2
self.assertEqual(len(all_results[0][0]), 900*1024) # first segment is 900*1024 bytes
self.assertEqual(all_results[0][1], all_results[1][1]) # identifier is equal for both
self.assertEqual(all_results[0][2], all_results[1][2]) # count is equal for both
self.assertEqual(all_results[1][2], b'2') # count is 2
self.assertEqual(all_results[0][3], b'1') # index for first segment is 1
self.assertEqual(all_results[1][3], b'2') # index for second segment is 2
msg = 'Hello World'
all_results = list(message_segmenter.segment_message(msg, 5))
self.assertEqual(len(all_results), 3)
self.assertEqual(len(all_results[0][0]), 5)
msg = b'Hello World'
all_results = list(message_segmenter.segment_message(msg, 5))
self.assertEqual(len(all_results), 3)
self.assertEqual(len(all_results[0][0]), 5)
try:
list(message_segmenter.segment_message(5))
self.fail('expected exception')
except Exception as e:
self.assertEqual(type(e), ValueError)
def test_combine_segments(self):
msgs = [b'hello', b' how ', b'are y', b'ou']
index = 1
identifier = str(uuid.uuid4()).encode('utf-8')
count = b'4'
for msg in msgs:
result = message_segmenter.combine_segments(msg, {
message_segmenter.ID: identifier,
message_segmenter.COUNT: count,
message_segmenter.INDEX: str(index).encode('utf-8')
})
index += 1
if index <= 4:
self.assertIsNone(result)
else:
self.assertIsNotNone(result)
self.assertEqual(result, b'hello how are you')
def test_purge_segements(self):
message_segmenter._message_store['abcd'] = {
'bitset': None,
'segments': None,
'last_accessed': time.time() - (10 * 60 - 1)
}
message_segmenter._purge_segments()
self.assertEqual(len(message_segmenter._message_store), 1)
time.sleep(2)
message_segmenter._purge_segments()
self.assertEqual(len(message_segmenter._message_store), 0)
if __name__ == '__main__':
unittest.main()
|
#!/bin/bash
echo "Setup mounted directories"
/swtools/init-mounts.sh
/swtools/wait-mysql.sh
echo "Start importing database..."
/swtools/init-db.php
echo "Database imported." |
<reponame>ch1huizong/learning
from distutils.core import setup
import sys, os, py2exe
# the key trick with our arguments and Python's sys.path
name = sys.argv[1]
sys.argv[1] = 'py2exe'
sys.path.append(os.path.dirname(os.path.abspath(name)))
setup(name=name[:-3], scripts=[name])
|
#######################################################################
# Site specific configuration. Override these settings to run on
# your system.
hostname=$(hostname -f)
if [[ "$hostname" == *".fit.vutbr.cz" ]]; then
timit=/mnt/matylda2/data/TIMIT/timit
server=matylda5
parallel_env=sge
parallel_opts="-l mem_free=200M,ram_free=200M,$server=1"
parallel_opts_gpu="-l gpu=1,mem_free=1G,ram_free=1G,hostname=*face*"
elif [[ "$hostname" = *"clsp.jhu.edu" ]]; then
timit=/export/corpora5/LDC/LDC93S1/timit/TIMIT
parallel_env=sge
parallel_opts="-l mem_free=200M,ram_free=200M,hostname=b*|c*"
parallel_opts_gpu="-l gpu=1,mem_free=1G,ram_free=1G,hostname=b1[123456789]*|c*"
else
echo "Unkown location configuration. Please update the"
echo "\"setup.sh\" file."
exit 1
fi
#######################################################################
# Directory structure.
confdir=$(pwd)/conf
datadir=$(pwd)/data
langdir=$datadir/lang
expdir=$(pwd)/exp
#######################################################################
# Features extraction.
fea_njobs=10
fea_parallel_opts="$parallel_opts"
fea_conf=$confdir/features.yml
#######################################################################
# HMM-GMM model parameters.
hmm_conf=$confdir/hmm/hmm.yml
hmm_dir=$expdir/hmm
hmm_align_njobs=20
hmm_align_parallel_opts="$parallel_opts"
hmm_align_iters="1 2 3 4 5 6 7 8 9 10 12 14 16 18 20 23 26 29"
hmm_train_iters=30
hmm_train_lrate=0.1
hmm_train_batch_size=400
hmm_train_epochs=10
hmm_train_opts="--fast-eval --use-gpu"
hmm_train_parallel_opts="$parallel_opts_gpu"
hmm_decode_njobs=10
hmm_decode_parallel_opts="$parallel_opts"
#######################################################################
# VAE-HMM model.
vae_hmm_confdir=$confdir/vae_hmm
vae_hmm_dir=$expdir/vae_hmm
vae_hmm_hmm_conf=$vae_hmm_confdir/hmm.yml
vae_hmm_encoder_conf=$vae_hmm_confdir/encoder.yml
vae_hmm_decoder_conf=$vae_hmm_confdir/decoder.yml
vae_hmm_nflow_conf=$vae_hmm_confdir/normalizing_flow.yml
vae_hmm_nnet_width=512
vae_hmm_latent_dim=30
vae_hmm_encoder_cov_type=isotropic
vae_hmm_decoder_cov_type=diagonal
vae_hmm_align_njobs=30
vae_hmm_align_parallel_opts="$parallel_opts"
vae_hmm_align_iters=$(seq 10 30)
vae_hmm_train_iters=30
vae_hmm_train_epochs_per_iter=10
vae_hmm_train_warmup_iters=0
vae_hmm_train_lrate=5e-2
vae_hmm_train_nnet_lrate=1e-3
vae_hmm_train_batch_size=100
vae_hmm_train_opts="--fast-eval --use-gpu"
vae_hmm_train_parallel_opts="$parallel_opts_gpu"
#######################################################################
# AUD (HMM) model parameters.
aud_hmm_fea_type=fbank
aud_hmm_model_name=aud_hmm
aud_hmm_conf=$confdir/${aud_hmm_model_name}/hmm.yml
aud_hmm_dir=$expdir/$aud_hmm_model_name
aud_hmm_n_units=50
aud_hmm_lm_concentration=1.
aud_hmm_align_njobs=20
aud_hmm_align_parallel_opts="$parallel_opts"
aud_hmm_align_iters="1 2 3 4 5 6 7 8 9 10 12 14 16 18 20 23 26 29"
aud_hmm_train_iters=30
aud_hmm_train_lrate=5e-2
aud_hmm_train_batch_size=50
aud_hmm_train_epochs=10
aud_hmm_train_opts="--fast-eval --use-gpu"
aud_hmm_train_parallel_opts="$parallel_opts_gpu"
aud_hmm_decode_njobs=10
aud_hmm_decode_parallel_opts="$parallel_opts"
#######################################################################
# AUD (VAE-HMM) model parameters.
aud_vae_hmm_fea_type=logspec
aud_vae_hmm_model=aud_dual_vae_hmm
aud_vae_hmm_confdir=$confdir/$aud_vae_hmm_model
aud_vae_hmm_dir=$expdir/$aud_vae_hmm_model
aud_vae_hmm_encoder_conf=$aud_vae_hmm_confdir/encoder.yml
aud_vae_hmm_decoder_conf=$aud_vae_hmm_confdir/decoder.yml
aud_vae_hmm_nflow_conf=$aud_vae_hmm_confdir/normalizing_flow.yml
aud_vae_hmm_hmm_conf=$aud_vae_hmm_confdir/hmm.yml
aud_vae_hmm_fea_type=logspec
aud_vae_hmm_nnet_width=128
aud_vae_hmm_latent_dim=30
aud_vae_hmm_encoder_cov_type=isotropic
aud_vae_hmm_decoder_cov_type=diagonal
aud_vae_hmm_n_units=100
aud_vae_hmm_lm_concentration=1.
aud_vae_hmm_align_njobs=20
aud_vae_hmm_align_parallel_opts="$parallel_opts"
aud_vae_hmm_align_iters=$(seq 2 30)
aud_vae_hmm_train_warmup_iters=0
aud_vae_hmm_train_iters=30
aud_vae_hmm_train_epochs_per_iter=1
aud_vae_hmm_train_nnet_lrate=1e-3
aud_vae_hmm_train_lrate=1e-1
aud_vae_hmm_train_batch_size=50
aud_vae_hmm_train_epochs=30
aud_vae_hmm_train_opts="--fast-eval --use-gpu"
aud_vae_hmm_train_parallel_opts="$parallel_opts_gpu"
aud_vae_hmm_decode_njobs=2
aud_vae_hmm_decode_parallel_opts="$parallel_opts"
#######################################################################
# Score options.
remove_sym="" # Support multiple symbol, e.g. "sil spn nsn"
duplicate="no" # Do not allow adjacent duplicated phones. Only effective at scoring stage.
phone_48_to_39_map=$langdir/phones_48_to_39.txt
|
package com.github.nenomm.ks.ktable.stockmarket;
import org.apache.kafka.streams.kstream.KGroupedStream;
import org.apache.kafka.streams.kstream.KStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.context.annotation.Profile;
@Profile("stockMarket")
@EnableBinding(StockSink.class)
public class StockMarketStatistics {
private static final Logger logger = LoggerFactory.getLogger(StockMarketStatistics.class);
@StreamListener
public void crunchNumbers(@Input(StockSink.INPUT) KStream<String, StockInfo> stocks) {
logger.info("Inside stocks listener");
KGroupedStream grouped = stocks.groupByKey();
//grouped.aggregate(StockInfo::new, (key, value, aggregate) -> )
}
}
|
#!/bin/bash
docker stack deploy -c services.yml feeliks |
/**
* @author ooooo
* @date 2021/4/9 13:08
*/
#ifndef CPP_0154__SOLUTION1_H_
#define CPP_0154__SOLUTION1_H_
#include <iostream>
#include <vector>
using namespace std;
class Solution {
public:
int findMin(vector<int> &nums) {
int n = nums.size();
int l = 0, r = n - 1;
if (nums[l] < nums[r]) return nums[l];
int ans = nums[l];
while (l <= r) {
int mid = l + (r - l) / 2;
if (nums[l] < nums[mid]) {
ans = min(ans, nums[l]);
l = mid + 1;
} else if (nums[mid] < nums[r]) {
ans = min(ans, nums[mid]);
r = mid - 1;
} else {
while (l <= r) {
ans = min(ans, nums[l++]);
}
}
}
return ans;
}
};
#endif //CPP_0154__SOLUTION1_H_
|
function insertionSort(arr) {
for (let i = 1; i < arr.length; i++) {
let currentVal = arr[i];
for (let j = i-1; j >= 0 && arr[j] > currentVal; j--) {
arr[j+1] = arr[j];
}
arr[j+1] = currentVal;
}
return arr;
}
let result = insertionSort(arr);
console.log(result); // prints [1,3,4,5,6,9] |
<filename>Quicklook/app/src/main/java/cl/uchile/ing/adi/quicklook/MainActivity.java
package cl.uchile.ing.adi.quicklook;
import android.Manifest;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.os.Environment;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentTransaction;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import org.apache.commons.io.IOUtils;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import cl.uchile.ing.adi.quicklooklib.QuicklookActivity;
public class MainActivity extends AppCompatActivity implements DemoAssetFragment.OnDemoAssetFragmentListener, ActivityCompat.OnRequestPermissionsResultCallback {
private static String FILES_ASSETS_DIR = "files/";
private static final String QUICKLOOK_ERROR = "cl.uchile.ing.adi.quicklook.QUICKLOOK_ERROR";
Runnable r;
private static int REQUEST_FILE_PERMISSIONS = 121;
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Fragment fragment = DemoAssetFragment.newInstance();
FragmentTransaction t = getSupportFragmentManager().beginTransaction();
t.replace(R.id.main_activity_fragment, fragment, "MainQuickLook");
t.commit();
}
@Override public void onAssetSelected(final String item) {
final File downloadAsset = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS + "/" + item);
r = new Runnable() {
public void run() {
String urlForAsset = "";
String mimetype = null;
if(downloadAsset.exists()) {
urlForAsset = downloadAsset.getAbsolutePath();
}
else{
if(copyAssetToDownload(FILES_ASSETS_DIR, item,downloadAsset)) {
urlForAsset = downloadAsset.getAbsolutePath();
}
}
HashMap<String,String> extensions = new HashMap<>();
String[] extsplit = urlForAsset.split("\\.");
String ext = extsplit[extsplit.length-1];
if (extensions.containsKey(ext)) {
mimetype = extensions.get(ext);
}
openIntent(urlForAsset, mimetype);
}
};
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED) {
// Storage permission has not been granted.
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
REQUEST_FILE_PERMISSIONS);
} else {
// Storage permissions are already available, show the camera preview.
Log.i("quicklook66",
"STORAGE permission has already been granted. Displaying files.");
r.run();
}
}
public void openIntent(String urlForAsset,String mimetype) {
Intent i = new Intent(this, QuicklookActivity.class);
i.putExtra("localurl", urlForAsset);
if (mimetype!=null) {
Bundle b = new Bundle();
b.putString("mime-type",mimetype);
i.putExtra("extra", b);
}
String s = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS).getAbsolutePath();
QuicklookActivity.setDownloadPath(s + "/hola/");
startActivity(i);
}
public void openDownloads(View v) {
Intent i = new Intent(this, QuicklookActivity.class);
String s = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS).getAbsolutePath();
i.putExtra("localurl", s);
QuicklookActivity.setDownloadPath(s+"/hola/");
startActivity(i);
}
private boolean copyAssetToDownload(String assetPath, String assetName, File destinationFile){
try {
FileOutputStream fos = new FileOutputStream(destinationFile);
int copied = IOUtils.copy(getAssets().open(assetPath+assetName), fos);
fos.close();
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
private String mime(File file){
String[] pieces = file.getName().split("\\.");
String ext = pieces[pieces.length-1];
if(ext.equals("png")) return "image/png";
if(ext.equals("pdf")) return "application/pdf";
if(ext.equals("txt")) return "text/plain";
if(ext.equals("zip")) return "application/zip";
return "text/plain";
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if (requestCode == REQUEST_FILE_PERMISSIONS) {
// BEGIN_INCLUDE(permission_result)
// Received permission result for camera permission.
Log.i("quicklook66", "Received response for Write External permission request.");
// Check if the only required permission has been granted
if (grantResults.length == 1 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// Camera permission has been granted, preview can be displayed
Log.i("quicklook66", "WRITE_EXTERNAL permission has now been granted. Showing preview.");
Toast.makeText(this, "Permisos concedidos :D",Toast.LENGTH_LONG).show();
r.run();
} else {
Log.i("quicklook66", "WRITE_EXTERNAL permission was NOT granted.");
Toast.makeText(this, "Permisos no concedidos :(",Toast.LENGTH_LONG).show();
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
}
}
|
echo "Done: $(pwd)"
date >> /var/rlogs.log
|
<gh_stars>1-10
#ifndef vx_H
#define vx_H
#include <iostream>
#include <vector>
#include <cmath>
#include <fstream>
#include <string>
#include <stdio.h>
#include <stdlib.h>
#include <sstream>
#include <cstdlib>
#include <map>
template <typename T>
class vx
{
// defining class members
private:
std::vector<T> vectrix;
unsigned rows;
unsigned cols;
public:
vx(unsigned _rows, unsigned _cols, const T &_initial); // takes # rows, # cols, and the inital value for each cell
vx(const vx<T> &elem); // copy constructor
vx(const std::vector<T> &elem);
vx(const std::vector<std::vector<T>> &elem); // takes 2D vector and returns as matrix
virtual ~vx(); // deconstructor
// operator overloading for math operations
// matrix math
vx<T> &operator=(const vx<T> &elem);
vx<T> operator+(const vx<T> &elem);
vx<T> &operator+=(const vx<T> &elem);
vx<T> operator*(const vx<T> &elem);
vx<T> &operator*=(const vx<T> &elem);
vx<T> operator-(const vx<T> &elem);
vx<T> &operator-=(const vx<T> &elem);
vx<T> matmul(const vx<T> &elem1, const vx<T> &elem2);
vx<T> transpose();
float sum();
float mean();
float median() const;
float SD();
std::vector<T> flatten(); // flattens the matrix into a vector
T max();
T min();
vx<T> &absolute(); // absolute value of a matrix
std::vector<vx<T>> balance(const vx<T> &elem1, const vx<T> &elem2, const T value); // balances out two matrices such that their result is of the same order
std::vector<T> diag_vec(); // returns the diagonal of a matrix
// vector operations
float dot_product(const vx<T> &elem); // dot product of two vectors
float cross_product(const vx<T> &elem); // cross product of two vectors
// scalar to matrix math
vx<T> operator+(const T elem);
vx<T> &operator+=(const T elem);
vx<T> operator-(const T elem);
vx<T> &operator-=(const T elem);
vx<T> operator*(const T elem);
vx<T> &operator*=(const T elem);
vx<T> operator/(const T elem);
vx<T> &operator/=(const T elem);
vx<T> operator^(const T elem);
// utility functions
std::vector<unsigned> dims(bool print = true) const; // for matrix
unsigned nitems() const; // for matrix
unsigned nrows() const; // for matrix
unsigned ncols() const; // for matrix
vx<T> reshape(const unsigned rows, const unsigned cols);
bool is_ragged(); // ragged meaning rows of different lengths
void print() const; // prints the matrix
T &operator()(const unsigned &row, const unsigned &col = 0); // accessing specific index of matrix
const T &operator()(const unsigned &row, const unsigned &col = 0) const; // accessing specific index of (const) matrix
unsigned digit_count() const; // max digits of elements in matrix
};
/*
DEFINING NON CLASS FUNCTIONS AND VARIABLES:
*/
static const double pi = 3.14159265358979323846;
static const double e = 2.71828182845904523536;
// gets the index given the cols and rows
unsigned GetIndex(const unsigned &num_cols, const unsigned &_row, const unsigned &_col);
// random number generation
std::vector<double> getRandom_decimal(const unsigned n);
// vector math
template <typename T>
std::vector<T> &add(std::vector<T> &elem1, std::vector<T> &elem2); // add two vectors
template <typename T>
std::vector<T> &add(std::vector<T> &elem1, const T &elem2); // add each member of a vector with a scalar
template <typename T>
std::vector<T> &subtract(std::vector<T> &elem1, const T &elem2); // subtract each member of a vector with a scalar
template <typename T>
std::vector<T> &subtract(std::vector<T> &elem1, std::vector<T> &elem2); // subtract two vectors
template <typename T>
std::vector<T> &scale(std::vector<T> &elem1, const T &elem2); // multiplies each member by the scale factor; to shrink the vector apply the inverse of the scalar
template <typename T>
float sum(const std::vector<T> &elem);
template <typename T>
float sum(const std::vector<std::vector<T>> &elem);
template <typename T>
float mean(const std::vector<T> &elem);
template <typename T>
float mean(const std::vector<std::vector<T>> &elem);
template <typename T>
float SD(std::vector<T> &elem);
template <typename T>
float SD(std::vector<std::vector<T>> &elem);
template <typename T>
vx<T> expand(const std::vector<T> &elem); // expands a 1D vector into a vx row Matrix
template <typename T>
T max(const std::vector<T> &elem);
template <typename T>
T max(const std::vector<std::vector<T>> &elem);
template <typename T>
T min(const std::vector<T> &elem);
template <typename T>
T min(const std::vector<std::vector<T>> &elem);
template <typename T>
float angle_between(const std::vector<T> &elem1, const std::vector<T> &elem2); // angle between two vectors; in degrees
template <typename T>
float distance(const std::vector<T> &elem); // Calculates the distance between two points (endpoints of the line); AKA vector magnitude
template <typename T>
float vec_angle(const std::vector<T> &elem); // gets the angle (direction) of a vector; in degrees
template <typename T>
float slope(const std::vector<T> &elem);
template <typename T>
float rad_to_deg(T value);
template <typename T>
float deg_to_rad(T value);
template <typename T>
std::vector<unsigned> dims(const std::vector<std::vector<T>> &elem, bool print = true); // for 2D vector
template <typename T>
bool is_ragged(const std::vector<std::vector<T>> &elem); // ragged meaning rows of different lengths
template <typename T>
void print(const T &elem); // prints scalar
template <typename T>
void print(const std::vector<T> &elem); // prints vector
template <typename T>
void print(const std::vector<std::vector<T>> &elem); // prints 2D vector
template <typename T>
T &absolute(T &value);
template <typename T>
std::vector<T> &absolute(std::vector<T> &elem);
template <typename T>
std::vector<std::vector<T>> &absolute(std::vector<std::vector<T>> &elem);
template <typename T>
unsigned nitems(const std::vector<std::vector<T>> &elem);
template <typename T>
unsigned nrows(const std::vector<std::vector<T>> &elem);
template <typename T>
unsigned ncols(const std::vector<std::vector<T>> &elem);
template <typename T>
std::vector<T> range1D(const T stop, const T start, const T increment); // range vector
template <typename T>
vx<T> range2D(const T stop, const T start, const T increment, const unsigned numrows); // range matrix
template <typename T>
unsigned digit_count(const T &num);
template <typename T>
unsigned digit_count(const std::vector<T> &elem); // returns the max digit count in vector
template <typename T>
std::vector<T> &sort(std::vector<T> &elem);
template <typename T>
float median(std::vector<T> &elem);
#include "vectrix.cpp"
#endif |
#!/bin/bash
set -e
while true; do
read -p "Have you checked that you have updated the version number in package.json?" yn
case $yn in
[Yy]* ) break;;
[Nn]* ) exit;;
* ) echo "Please answer yes or no.";;
esac
done
echo "Removing node_modules for ensuring dev dependencies..."
rm -rf node_modules/
(which yarn > /dev/null && yarn) || npm install
echo "Cleaning 'build' directory..."
rm -fr ./build
echo "Updating dependencies licenses..."
npm run fix:licenses
git add client/components/about/dependencies.json
echo "Building..."
npm run build:prod
# Avoid shipping unused files
echo "Deleting temporary sprite files..."
rm -rf ./build/spritesmith-generated
git add -f build/
echo "Removing dev dependencies and installing production dependencies before shrinkwrap..."
rm -rf node_modules/ npm-shrinkwrap.json
npm install --production # yarn doesn't allow shrinkwrap.
npm shrinkwrap
git add npm-shrinkwrap.json
git status
echo "This is what is about to be committed. Check this and commit."
echo "Then, do:"
echo " $ npm publish"
echo " $ yarn docker:release"
echo " $ docker push bnjbvr/kresus"
|
#!/usr/bin/env bash
set -e
# test_names returns (via its stdout) a list of test names that match the provided regular expression
test_names () {
docker run --rm \
--workdir="/firecracker-containerd/${FCCD_PACKAGE_DIR}" \
"${FCCD_DOCKER_IMAGE}" \
"go test -list ." | sed '$d' | grep "${FCCD_TESTNAME_REGEX}"
}
# For each test case with a name matching the provided regex, run each one in its own isolated docker container
for TESTNAME in $(test_names); do
echo "TESTNAME = ${TESTNAME}"
docker run --rm \
--workdir="/firecracker-containerd/${FCCD_PACKAGE_DIR}" \
--env TESTNAME="${TESTNAME}" \
--env EXTRAGOARGS="${EXTRAGOARGS}" \
${FCCD_DOCKER_RUN_ARGS} \
"${FCCD_DOCKER_IMAGE}" \
'go test ${EXTRAGOARGS} -run "^${TESTNAME}$" .'
done
|
<reponame>premss79/zignaly-webapp<filename>src/components/Forms/ConfirmDeleteAccountForm/index.js
export { default } from "./ConfirmDeleteAccountForm";
|
from rest_framework import serializers
from contact import models
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = models.Message
fields = [
"name",
"email",
"phone",
"country",
"city",
"subject",
"message",
]
|
from django.conf.urls import url
from mainapp.views import IndexView, UploadView
urlpatterns = [
url(r'^$', IndexView.as_view(), name='index'),
url(r'^(?P<key>[a-zA-Z0-9]+)$', UploadView.as_view(), name='upload'),
]
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-N-VB-ADJ-ADV/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-N-VB-ADJ-ADV/1024+0+512-only-pad-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function pad_first_two_thirds_full --eval_function last_element_eval |
<filename>src/components/MdOutput.js
import React from 'react';
import marked from 'marked';
import PropTypes from 'prop-types';
const MdOutput = (props) => {
const markAll = (values) => {
return values.map(val => marked(val)).join('');
}
const mark = (values) => (values.length !== 0) ? markAll(values) : '<p> Write something!</p>'
return(
<div className="md-output" dangerouslySetInnerHTML={{__html: mark(props.values)}}>
</div>
)
}
MdOutput.propTypes = {
values: PropTypes.array.isRequired
}
export default MdOutput; |
#!/bin/bash
#
# Apache HTTPD & NGINX Access log parsing made easy
# Copyright (C) 2011-2018 Niels Basjes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
pig -x local -param_file etc/AccessLogs.properties pig/demo.pig
|
<reponame>MarcelBraghetto/AndroidNanoDegree2016
package com.lilarcor.popularmovies.testhelpers;
import android.app.Application;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.test.runner.AndroidJUnitRunner;
/**
* Created by <NAME> on 30/07/15.
*
* We need to use a custom test runner when launching the
* Espresso tests so that the main application class can
* be redefined and pointed at an Espresso specific version.
*
* For our test suite, the 'EspressoMainApp' class will be
* launched as the Android application, instead of the
* default 'MainApp' class.
*
* This lets us override methods and properties that might
* be needed specifically for testing (such as a custom
* network request provider).
*/
@SuppressWarnings("unused")
public class CustomEspressoRunner extends AndroidJUnitRunner {
@Override
public Application newApplication(@NonNull ClassLoader classLoader, String className, Context context) throws InstantiationException, IllegalAccessException, ClassNotFoundException {
return super.newApplication(classLoader, EspressoMainApp.class.getName(), context);
}
} |
<gh_stars>1-10
#!/usr/bin/env python3
import sys
import argparse
import contextlib
import collections
import binascii
import struct
import json
import hid
"""
Dualshock command-line utility
"""
PairingInfo = collections.namedtuple('PairingInfo', ['addr', 'paired_to'])
IMUCalib = collections.namedtuple('IMUCalib', ['gxbias', 'gybias', 'gzbias', 'gxp', 'gxm', 'gyp', 'gym', 'gzp', 'gzm', 'gsp', 'gsm', 'axp', 'axm', 'ayp', 'aym', 'azp', 'azm'])
class BTAddr:
def __init__(self, binary):
self.binary = binary
self.string = ':'.join(['%02X' % val for val in reversed(binary)])
@classmethod
def from_string(cls, value):
return BTAddr(bytes(reversed(binascii.unhexlify(value.replace(':', '')))))
class Controller:
def __init__(self, pid, handle):
self.pid = pid
self.handle = handle
def name(self):
raise NotImplementedError
def pair(self, remote_addr, key):
raise NotImplementedError
def get_pairing_info(self):
raise NotImplementedError
def get_imu_calibration(self):
raise NotImplementedError
def known_reports(self):
raise NotImplementedError
def show_info(self):
print(self.name())
info = self.get_pairing_info()
imu = self.get_imu_calibration()
print(' BT address: %s' % info.addr.string)
print(' Currently paired to: %s' % info.paired_to.string)
print(' IMU calibration:')
print(' Gyro')
print(' Bias: %d - %d - %d' % (imu.gxbias, imu.gybias, imu.gzbias))
print(' Max: %d - %d - %d' % (imu.gxp, imu.gyp, imu.gzp))
print(' Min: %d - %d - %d' % (imu.gxm, imu.gym, imu.gzm))
print(' Speed: %d - %d' % (imu.gsp, imu.gsm))
print(' Accelerometer')
print(' Max: %d - %d - %d' % (imu.axp, imu.ayp, imu.azp))
print(' Min: %d - %d - %d' % (imu.axm, imu.aym, imu.azm))
def print_report(self, report_id):
data = bytes(self.handle.get_feature_report(report_id, 64))
print('Report 0x%02x; length=%d' % (report_id, len(data)))
for offset in range((len(data) + 15) // 16):
s1 = ' '.join(['%02x' % value for value in data[offset * 16:offset * 16 + 16]])
s2 = ''.join([chr(value) if 32 <= value < 64 else '.' for value in data[offset * 16:offset * 16 + 16]])
print('%04x %-47s %s' % (offset * 16, s1, s2))
def extract_reports(self, addr=None):
data = { 'pid': self.pid, 'reports': {} }
for report_id in self.known_reports():
report = list(self.handle.get_feature_report(report_id, 64))
if addr is not None:
self._replace_addr(report, addr)
data['reports']['%02x' % report_id] = binascii.hexlify(bytes(report)).decode('ascii')
print(json.dumps(data, indent=2))
def _replace_addr(self, report, addr):
raise NotImplementedError
class Dualshock(Controller):
def name(self):
return 'Dualshock (PID=0x%04x)' % self.pid
def pair(self, remote_addr, key):
self.handle.send_feature_report(b'\x13' + remote_addr.binary + key)
def get_pairing_info(self):
data = bytes(self.handle.get_feature_report(0x12, 64))
return PairingInfo(BTAddr(data[1:7]), BTAddr(data[10:16]))
def get_imu_calibration(self):
data = bytes(self.handle.get_feature_report(0x02, 64))
return IMUCalib(*struct.unpack('<%s' % ('h' * 17), data[1:-2]))
def known_reports(self):
return (0x02, 0xa3, 0x12, 0x81)
def _replace_addr(self, report, addr):
if report[0] == 0x12:
report[1:7] = addr.binary
class Dualsense(Controller):
def name(self):
return 'Dualsense (PID=0x%04x)' % self.pid
def pair(self, remote_addr, key):
self.handle.send_feature_report(b'\x0a' + remote_addr.binary + key)
def get_pairing_info(self):
data = bytes(self.handle.get_feature_report(0x09, 64))
return PairingInfo(BTAddr(data[1:7]), BTAddr(data[10:16]))
def get_imu_calibration(self):
data = bytes(self.handle.get_feature_report(0x05, 64))
return IMUCalib(*struct.unpack('<%s' % ('h' * 17), data[1:35]))
def known_reports(self):
return (0x05, 0x20, 0x09)
def _replace_addr(self, report, addr):
if report[0] == 0x09:
report[1:7] = addr.binary
@contextlib.contextmanager
def find_controller():
for dev in hid.enumerate():
vid, pid = dev['vendor_id'], dev['product_id']
if vid == 0x054c and pid == 0x09cc:
handle = hid.device()
handle.open(vid, pid)
try:
yield Dualshock(pid, handle)
finally:
handle.close()
break
elif vid == 0x054c and pid == 0x0ce6:
handle = hid.device()
handle.open(vid, pid)
try:
yield Dualsense(pid, handle)
finally:
handle.close()
break
else:
raise RuntimeError('No Dualshock found')
def show_info(args):
with find_controller() as ctrl:
ctrl.show_info()
def pair_controller(args):
if args.key is None:
key = b'\<KEY>'
else:
key = binascii.unhexlify(args.key)
if len(key) != 16:
raise RuntimeError('Invalid key length')
with find_controller() as ctrl:
ctrl.pair(BTAddr.from_string(args.btaddr), key)
if args.key is None:
print('Link key: %s' % binascii.hexlify(key).decode('ascii').upper())
def print_report(args):
report_id = int(args.reportid, 16) if args.reportid.startswith('0x') else int(args.reportid)
with find_controller() as ctrl:
ctrl.print_report(report_id)
def extract_reports(args):
with find_controller() as ctrl:
ctrl.extract_reports(addr=None if args.address is None else BTAddr.from_string(args.address))
def main(argv):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
parser_info = subparsers.add_parser('info', help='Display controller information')
parser_info.set_defaults(func=show_info)
parser_pair = subparsers.add_parser('pair', help='Set controller pairing state')
parser_pair.add_argument('btaddr', help='BT address of the host')
parser_pair.add_argument('-k', '--key', help='Link key (16 bytes, hex)', default=None)
parser_pair.set_defaults(func=pair_controller)
parser_print = subparsers.add_parser('print', help='Print a specific feature report')
parser_print.add_argument('reportid', help='Report ID in either hex (with 0x prefix) or decimal')
parser_print.set_defaults(func=print_report)
parser_extract = subparsers.add_parser('extract', help='Extract all known reports in JSON format')
parser_extract.add_argument('-a', '--address', help='Replace controller MAC address with this one in the pairing report', default=None)
parser_extract.set_defaults(func=extract_reports)
args = parser.parse_args(argv)
args.func(args)
if __name__ == '__main__':
main(sys.argv[1:])
|
class GreetingForm extends React.Component {
constructor(props) {
super(props);
this.state = {
name: ''
};
}
handleChange = (event) => {
this.setState({name: event.target.value});
}
handleSubmit = (event) => {
alert('Hello there, ' + this.state.name);
event.preventDefault();
}
render() {
return (
<form onSubmit={this.handleSubmit}>
<label>
Name:
<input type="text" value={this.state.name} onChange={this.handleChange} />
</label>
<input type="submit" value="Submit" />
</form>
);
}
} |
/*
* Copyright © 2019 <NAME>.
*/
package apps
import (
"errors"
"github.com/hedzr/voxr-api/api/v10"
"github.com/hedzr/voxr-api/models"
"github.com/hedzr/voxr-api/util"
"github.com/hedzr/voxr-common/tool"
"github.com/hedzr/voxr-lite/misc/impl/dao"
"github.com/hedzr/voxr-lite/misc/impl/mq"
"github.com/sirupsen/logrus"
"github.com/streadway/amqp"
"plugin"
"strings"
"time"
)
type (
holder struct {
apps map[string]Plugin
exited bool
exitCh chan bool
externalExitCh chan bool
appAdded chan *models.App
appRemoved chan *models.App
appUpdated chan *models.App
}
Plugin interface {
Model() *models.TopicApp //
IsPlugin() bool
Plugin() *plugin.Plugin
PluginMainEntry() plugin.Symbol
IsCared(event v10.GlobalEvents) bool
}
)
var holderCore *holder
func Start() {
holderCore = newHolder()
go holderCore.loader()
}
func Stop() {
if !holderCore.exited {
holderCore.exitCh <- true
}
}
func newHolder() *holder {
return &holder{
make(map[string]Plugin),
true,
make(chan bool, 3),
nil,
make(chan *models.App, 10),
make(chan *models.App, 10),
make(chan *models.App, 10),
}
}
func (h *holder) loader() {
// load all apps
h.loadApps()
// starting the run looper
go h.run()
// monitor apps add/remove global events
h.monitorEvents()
}
// TODO 装入所有Topics的关联Apps到内存是不可能的事情,因此需要一个分批装载、或者不装载的方案。
//
// 总的Apps或许是有一个限度的,例如 8000K 个,此时即使仅装载 apps 到内存也是不可能的事情,只能实时检索 TopicApp 表,实时装载对应的 app 并在执行完毕之后卸载该 app。
// 由于 golang 并不支持 unload plugin 以释放相应的资源,因此实时检索、装载、执行、卸载的流程并没有能够复用有限的内存。
// 因此,需要进一步地解决问题:
// 1. 定义 vx-executor 微服务,负责 apps 组件的载入和执行。
// 2. 采用实时检索的策略,在 vx-executor 微服务中载入 apps 并执行。
// 3. 当 vx-executor 装载的 apps 超过一个阈值,例如 8K 个,则通知 管理器停止装载新的 apps,并启动 devops 伸缩机制,建立 vx-executor 的新实例(新的vm-host, 或者新的 container 资源)
// 4. 所以 vx-executor 的集群实质上实现了分批装载全部 apps 的效果。
// 5. 假设每个 vm/container (设为 8G RAM, 8Core) 的 vx-executor 能够容纳和管理 80K 个 apps 实例,那么100台 vm/containers 将能够承载 8000K 个 apps 实例,已经具备足够的可行性。
//
// 当前,仅实现了基本的装载算法,在 apps 不超过 8K 个之前,暂时不考虑实现上述的方案。
//
// 第二,对于提供RESTful回调接口的 app,则考虑不同的策略:
// 此时,3-10 个 vx-executor 实例应该初步满足需要了,这个小型集群负责发出 RESTful 调用、收集结果,实际的 apps 被部署在 IM 核心之外,其消耗的资源不再核心内被考虑。
// IM平台可以为 apps 提供宿主的功能,用户开发了 app 之后可以采用 IM 运营商所提供的服务器资源完成部署,这时已经去到商业收费的谈话场景,因此这里不再深入探讨了。
//
func (h *holder) loadApps() {
dx := dao.NewTopicAppDao()
if ret, err := dx.ListEager("1=1"); err != nil {
logrus.Fatalf("[apps] CAN'T load apps from DB: %v", err)
return
} else {
cnt := 0
for _, r := range ret {
if len(r.App.Name) == 0 || len(r.App.Callback) == 0 {
continue
}
if strings.HasPrefix(r.App.Callback, "file://") {
if h.loadPlugin(r, r.App.Callback[7:]) {
cnt++
}
} else if strings.HasPrefix(r.App.Callback, "http://") {
// RESTful API style
if h.loadRESTfulCB(r, r.App.Callback[7:]) {
cnt++
}
} else if strings.HasPrefix(r.App.Callback, "https://") {
// RESTful API style
if h.loadRESTfulCB(r, r.App.Callback[8:]) {
cnt++
}
}
}
logrus.Debugf("[apps] %v apps loaded.", cnt)
}
}
func (h *holder) loadRESTfulCB(r *models.TopicApp, file string) (ok bool) {
return
}
func (h *holder) loadPlugin(r *models.TopicApp, file string) (ok bool) {
if tool.FileExists(file) {
if p, err := plugin.Open(file); err != nil {
logrus.WithFields(logrus.Fields{"Err": err}).Warnf("[apps] CAN'T load app plugin '%v' from: '%v'", r.App.Name, file)
} else {
if sym, err := p.Lookup("VxPlugin"); err == nil {
logrus.Debugf("[apps] 'VxPlugin' is: %v", sym)
h.apps[r.App.Name] = NewAppPlugin(r, p, sym)
if entry, ok := h.apps[r.App.Name].PluginMainEntry().(VxPlug); ok {
_ = entry.OnLoad()
}
return true
} else {
logrus.Warnf("[apps] CANT load app plugin 'VxPlugin' symbol '%v': %v", r.App.Name, err, errors.New("x"))
}
}
}
return
}
func (h *holder) eventsHandlerForApps(d amqp.Delivery) {
key := d.RoutingKey
if strings.HasPrefix(key, "fx.im.ev.") {
key = key[9:]
}
keyInt := util.GlobalEventNameToInt(key)
ge := v10.GlobalEvents(keyInt)
logrus.Debugf(" [x][apps] %v (%v), ge: %v", d.RoutingKey, keyInt, ge)
for _, v := range h.apps {
// logrus.Debugf("v: %v", v)
entry, ok := v.PluginMainEntry().(VxPlug)
if ok && v.IsCared(ge) {
// logrus.Debugf("ge hit: %v | plugin: %v", ge, v.Model.Name)
go func() {
logrus.Debugf("run plugin: %v, entry=%v", v.Model().App.Name, entry)
if err := entry.OnEvent(v, &Args{ge, d.Timestamp, d.ConsumerTag, d.Body}); err != nil {
logrus.Warnf("[x][apps] invoke app '%v' return failed: %v", v.Model().App.Name, err)
}
}()
}
}
}
func (h *holder) monitorEvents() {
mq.HandleEvent("apps.mgr", mq.DEFAULT_QUEUE_FOR_APPS, mq.DEFAULT_CAST, h.eventsHandlerForApps)
}
func (h *holder) run() {
ticker := time.NewTicker(20 * time.Second)
defer func() {
ticker.Stop()
logrus.Debug("--- apps mgr run() stopped.")
}()
for {
select {
case e := <-h.exitCh:
if e {
Stop()
}
case e := <-h.externalExitCh:
if e {
return
}
case tm := <-ticker.C:
logrus.Debugf("--- app run() looper: %v", tm)
case c := <-h.appAdded:
logrus.Debugf("--- app added: %v", c)
case c := <-h.appRemoved:
logrus.Debugf("--- app removed: %v", c)
case c := <-h.appUpdated:
logrus.Debugf("--- app updated: %v", c)
}
}
}
|
<gh_stars>1-10
/*
* Copyright 2008-2014 MOPAS(Ministry of Public Administration and Security).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.egovframe.rte.fdl.excel.impl;
import java.util.Iterator;
import java.util.List;
import org.egovframe.rte.psl.dataaccess.EgovAbstractMapper;
import org.mybatis.spring.SqlSessionTemplate;
/**
* 엑셀서비스을 처리하는 Mapper 클래스.
*
* <p><b>NOTE:</b> 엑셀배치 서비스를 제공하기 위해 구현한 Mapper클래스이다.</p>
*
* @author 실행환경 개발팀 윤성종
* @since 2009.06.01
* @version 1.0
* <pre>
* 개정이력(Modification Information)
*
* 수정일 수정자 수정내용
* ----------------------------------------------
* 2014.05.07 이기하 최초 생성
* </pre>
*/
public class EgovExcelServiceMapper extends EgovAbstractMapper {
@SuppressWarnings("unused")
private SqlSessionTemplate sqlSessionTemplate = null;
public EgovExcelServiceMapper(SqlSessionTemplate sqlSessionTemplate) {
this.sqlSessionTemplate = sqlSessionTemplate;
super.setSqlSessionTemplate(sqlSessionTemplate);
}
/**
* 엑셀서비스의 배치업로드를 실행한다.
* @param queryId <code>String</code>
* @param list <code>List<Object></code>
* @return
*/
public Integer batchInsert(String queryId, List<Object> list) {
Iterator<Object> itr = list.iterator();
int count = 0;
while (itr.hasNext()) {
count += insert(queryId, itr.next());
}
return count;
}
/**
* 엑셀서비스의 배치업로드를 실행한다.
* 업로드할 엑셀의 시작 위치를 정하여 지정한 셀부터 업로드한다.
* @param queryId <code>String</code>
* @param list <code>List<Object></code>
* @param start <code>int</code>
* @return
*/
public Integer batchInsert(final String queryId, final List<Object> list, final int start) {
int count = 0;
int size = list.size();
for (int i = start; i < size; i++) {
count += insert(queryId, list.get(i));
}
return count;
}
}
|
<filename>pecado-uaa/src/main/java/me/batizhao/uaa/controller/AuthController.java
package me.batizhao.uaa.controller;
import io.swagger.annotations.Api;
import me.batizhao.common.core.util.R;
import me.batizhao.ims.api.domain.LoginDTO;
import me.batizhao.uaa.service.AuthService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.io.IOException;
import java.util.Map;
/**
* 岗位 API
*
* @module uaa
*
* @author batizhao
* @since 2021-04-22
*/
@Api(tags = "Token管理")
@RestController
@Validated
public class AuthController {
@Autowired
private AuthService authService;
@PostMapping("/token")
public R<String> handleLogin(@Valid @RequestBody LoginDTO loginDTO) {
return R.ok(authService.login(loginDTO.getUsername(), loginDTO.getPassword(),
loginDTO.getCode(), loginDTO.getUuid()));
}
/**
* 生成验证码
*/
@GetMapping("/captcha")
public R<Map<String, String>> handleCaptcha() throws IOException {
return R.ok(authService.getCaptchaImage());
}
}
|
<filename>src/option.js
/*
* @Description: 处理option的方法集合
* @Author: MADAO
* @Date: 2020-11-20 09:59:10
* @LastEditors: MADAO
* @LastEditTime: 2020-11-20 12:35:13
*/
const { storagePath } = require('./db')
const log = require('./log')
const inquirer = require('inquirer')
const { read, write } = require('./db')
const { addTodo } = require('./action')
const optionHandlers = {
todoIndex: 0,
todoQuestions: [
{
name: "name",
type: "input",
message: "请输入标题:",
validate(value) {
if (!value) { return "请输入标题" }
return true
}
},
{
name: "description",
type: "input",
message: "请输入描述:"
}
],
createTodo () {
inquirer
.prompt(todoQuestions)
.then(({ name, description }) => {
addTodo(name, description);
});
},
askForAction () {
inquirer
.prompt({
type: "rawlist",
name: "action",
message: "请选择操作",
choices: [
{ name: "退出", value: "quit" },
{ name: "已完成", value: "markAsDone" },
{ name: "未完成", value: "markAsUndone" },
{ name: "修改", value: "updateTask" },
{ name: "删除", value: "removeTask" }
]
})
.then(({ action }) => {
this.updateTodo(action)
})
},
updateTodo (action) {
const todoList = read()
const date = new Date().toLocaleString()
const changeTodoDone = (isDone) => {
if (!todoList[this.todoIndex]) { return }
todoList[this.todoIndex] = Object.assign({}, todoList[this.todoIndex], {
done: isDone,
completeDate: isDone ? date : null,
updateDate: date
})
const result = write(JSON.stringify(todoList), true)
result.succeed
? (isDone
? log('succeed', `已完成:${todoList[this.todoIndex].name}`)
: log('fail', `未完成:${todoList[this.todoIndex].name}`))
: log('fail', `操作失败:${result.message}`)
}
const handlers = {
markAsDone: () => changeTodoDone(true),
markAsUndone: () => changeTodoDone(false),
updateTodo: () => {
inquirer.prompt(todoQuestions)
.then(({ name, description }) => {
todoList[this.todoIndex] = Object.assign({}, todoList[this.todoIndex], {
name,
description,
updateDate: date
})
const result = write(JSON.stringify(todoList), true)
result.succeed
? log('succeed', `更新完成:${todoList[this.todoIndex].name}`)
: log('fail', `更新失败:${result.message}`)
})
},
removeTodo: () => {
todoList.splice(this.todoIndex, 1)
const result = write(JSON.stringify(todoList), true)
result.succeed
? log('succeed', `删除成功:${todoList[this.todoIndex].name}`)
: log('fail', `删除失败:${result.message}`)
}
}
handlers[action] && handlers[action]()
},
path () {
log('info', `储存路径:${storagePath}`)
},
edit () {
const todoList = read()
inquirer
.prompt([
{
type: 'list',
name: 'index',
message: "请选择你想要操作的待办事项",
choices: [
{ name: "退出", value: -1 },
{ name: "添加任务", value: -2 },
...todoList.map((todo, index) => ({
name: `${index + 1 } - ${todo.name} - ${todo.done ? '已完成' : '未完成'}`,
value: index
}))
]
}
])
.then(({ index }) => {
if (index === -2) {
this.createTodo()
return
}
this.todoIndex = index
this.askForAction()
})
}
}
module.exports = optionHandlers |
#!/bin/bash
#
# Oracle Linux DTrace.
# Copyright (c) 2006, Oracle and/or its affiliates. All rights reserved.
# Licensed under the Universal Permissive License v 1.0 as shown at
# http://oss.oracle.com/licenses/upl.
#
script()
{
$dtrace $dt_flags -wq -o $tmpfile -s /dev/stdin $tmpfile <<EOF
BEGIN
{
i = 0;
}
tick-10ms
{
freopen("%s.%d", \$\$1, i);
printf("%d\n", i)
}
tick-10ms
/++i == $iter/
{
freopen("");
printf("%d\n", i);
exit(0);
}
EOF
}
cleanup()
{
let i=0
if [ -f $tmpfile ]; then
rm $tmpfile
fi
while [ "$i" -lt "$iter" ]; do
if [ -f $tmpfile.$i ]; then
rm $tmpfile.$i
fi
let i=i+1
done
}
if [ $# != 1 ]; then
echo expected one argument: '<'dtrace-path'>'
exit 2
fi
dtrace=$1
tmpfile=$tmpdir/tst.freopen.$$
iter=20
script
status=$?
let i=0
if [ -f $tmpfile.$iter ]; then
echo "$0: did not expect to find file: $tmpfile.$iter"
cleanup
exit 100
fi
mv $tmpfile $tmpfile.$iter
let iter=iter+1
while [ "$i" -lt "$iter" ]; do
if [ ! -f $tmpfile.$i ]; then
echo "$0: did not find expected file: $tmpfile.$i"
cleanup
exit 101
fi
j=`cat $tmpfile.$i`
if [ "$i" -ne "$j" ]; then
echo "$0: unexpected contents in $tmpfile.$i: " \
"expected $i, found $j"
cleanup
exit 102
fi
rm $tmpfile.$i
let i=i+1
done
exit $status
|
#ifndef CMD_HPP_
#define CMD_HPP_
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
// Name : Cmd
// Author : Avi
// Revision : $Revision: #84 $
//
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//
// Description :
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
#include <memory>
class ClientToServerCmd;
class ServerToClientCmd;
class ServerReply;
typedef std::shared_ptr<ClientToServerCmd> Cmd_ptr;
typedef std::shared_ptr<ServerToClientCmd> STC_Cmd_ptr;
#endif
|
<reponame>Bobobert/RoLas<filename>rofl/functions/torch.py
from .const import *
from .functions import Tdiv, Tmean, Tcat, Tstd, multiplyIter, nn, optim, deepcopy
def getDevice(cudaTry:bool = True):
if torch.cuda.is_available() and cudaTry:
print("Using CUDA")
return Tdevice("cuda")
return DEVICE_DEFT
# Custom functions
def array2Tensor(arr: ARRAY, device = DEVICE_DEFT, dtype = F_TDTYPE_DEFT, grad: bool = False, batch: bool = False):
arr = arr if batch else np.squeeze(arr)
tensor = torch.from_numpy(arr).to(device).to(dtype).requires_grad_(grad)
tensor = tensor if batch else tensor.unsqueeze_(0)
return tensor
def list2Tensor(arr:list, device = DEVICE_DEFT, dtype = F_TDTYPE_DEFT, grad: bool = False):
# expecting simple lists with single items (int, float, bool)
return torch.tensor(arr, dtype = dtype, device = device).unsqueeze_(-1).requires_grad_(grad)
def copyDictState(net, grad:bool = True):
newSD = dict()
sd = net.state_dict()
for i in sd.keys():
t = sd[i]
t = t.new_empty(t.shape).copy_(t)
if t.dtype == F_TDTYPE_DEFT:
t.requires_grad_(grad)
newSD[i] = t
return newSD
def newNet(net, config = {}):
netClass = net.__class__
if config == {}:
config = net.config
new = netClass(config)
return new
def cloneNet(net):
new = newNet(net, net.config)
new.load_state_dict(copyDictState(net), strict = True)
return new.to(net.device)
### Meant to be used to share information between BaseNets of the same type ###
# order, shapes and dtypes of parameters are NOT in check #
# while using ray, ndarrays is the best way to share information
# between main and workers, as their serialization is done in shared memory
# from https://docs.ray.io/en/master/serialization.html
def getListNParams(net):
'''
Ordered list of ARRAYS for a network's parameters
'''
params = []
for p in net.parameters():
targetP = p.data.cpu().numpy()
params.append(targetP)
return params
def updateNet(net, targetLoad):
if isinstance(targetLoad, dict):
net.load_state_dict(targetLoad)
elif isinstance(targetLoad, list):
for p, pt in zip(targetLoad, net.parameters()):
pt.requires_grad_(False) # This is a must to change the values properly
p = np.copy(p) # TODO, check this. Works better(in time) using a copy (weird!?!)
pt.data = torch.from_numpy(p).to(pt.device)
pt.requires_grad_(True)
else:
raise ValueError('Should be either a state_dict or a list of ndarrays')
def getParams(policy):
"""
Returns lists of ARRAYS of the parameters for actor and baseline (if any
different from actor).
"""
pi = policy.actor
baseline = getattr(policy, 'baseline')
isAC = getattr(policy, 'actorHasCritic', False)
piParams = getListNParams(pi)
blParams = [] if baseline is None or isAC else getListNParams(baseline)
return piParams, blParams
def getDictState(net, cpu:bool = True):
stateDict = net.state_dict()
if cpu:
for key in stateDict.keys():
stateDict[key] = stateDict[key].to(DEVICE_DEFT)
return stateDict
def getListTParams(net, device = None):
'''
Ordered list of TENSORS for a network's parameters.
These are detached and can be directed to a device.
'''
params = []
for p in net.parameters():
p = p.detach()
if device is not None:
p.to(device)
params.append(p)
return params
def maxGrad(net):
return max(p.grad.detach().abs().max() for p in net.parameters()).item()
def meanGrad(net):
return Tmean(torch.tensor([Tmean(p.grad.detach()) for p in net.parameters()])).item()
def zeroGrad(obj, isPolicy: bool = False):
if not isPolicy:
zeroGradParams(obj.parameters())
else:
pi = obj.actor
bl = getattr(obj, 'baseline')
useBl = getattr(obj, 'doBaseline', False)
zeroGradParams(pi.parameters())
if useBl:
zeroGradParams(bl.parameters())
def zeroGradParams(parameters):
for p in parameters:
if p.grad is None:
p.grad = p.new_zeros(p.shape)
else:
p.grad.fill_(0)
def noneGrad(net):
for p in net.parameters():
p.grad = None
def clipGrads(net, clip:float):
nn.utils.clip_grad_value_(net.parameters(), clip)
def cloneState(states, grad: bool = True, ids = None):
if ids is not None:
assert isinstance(ids, (ARRAY, TENSOR)), "ids must be a ndarray or a torch.Tensor"
def cloneT(T):
return torch.clone(T).detach_().requires_grad_(grad)
if isinstance(states, TENSOR):
if ids is not None:
states = states[ids]
return cloneT(states)
elif isinstance(states, dict):
new = dict()
for key in states.keys():
sk = states[key]
if ids is not None:
sk = sk[ids]
new[key] = cloneT(sk)
return new
else:
raise TypeError("State type {} not supported".format(type(states)))
def convert2flat(x):
shapes = []
flat = []
for p in x:
shapes.append(p.shape)
flat.append(p.flatten())
return Tcat(flat, dim=0), shapes
def convertFromFlat(x, shapes):
newX, iL, iS = [], 0, 0
for s in shapes:
iS = iL + multiplyIter(s)
newX.append(x[iL:iS].reshape(s))
iL = iS
return newX
def getNGradients(net):
'''
Returns a list of ARRAYS for the gradients
in the networks parameters
'''
grads = []
for p in net.parameters():
grads.append(p.grad.cpu().numpy())
return grads
def accumulateGrad(net, *grads):
for grad in grads:
for p, g in zip(net.parameters(), grad):
p.grad.add_(torch.from_numpy(g).to(p.device))
def tryCopy(T: TENSOR):
if isinstance(T, TENSOR):
return T.detach()
elif isinstance(T, ARRAY):
return np.copy(T)
else:
return deepcopy(T)
def getOptimizer(config: dict, network, deftLR = OPTIMIZER_LR_DEF, key: str = 'network'):
"""
Usually all optimizers need at least two main arguments
parameters of the network and a learning rate. More argumens
should be declared in the 'optimizer_args' into config.policy
parameters
----------
- configDict: dict
- network: nn.Module type object
- deftLR: float
A default learning rate if there's none declared in the config
dict. A config dict by deault does not have this argument.
- key: str
Default 'network'. Depends on the key to get the configuration from
the dict config. Eg, 'baseline' to generate an optimizer with those
configs.
returns
--------
optimizer for network
"""
name = config['policy'][key].get('optimizer')
lr = config['policy'][key].get('learning_rate', deftLR)
if name == 'adam':
FOpt = optim.Adam
elif name == 'rmsprop':
FOpt = optim.RMSprop
elif name == 'sgd':
FOpt = optim.SGD
elif name == 'adagrad':
FOpt = optim.Adagrad
elif name == 'dummy':
FOpt = dummyOptimizer
else:
print("Warning: {} is not a valid optimizer. {} was generated instead".format(name, OPTIMIZER_DEF))
config['policy'][key]['optimizer'] = OPTIMIZER_DEF
config['policy'][key]['learning_rate'] = OPTIMIZER_LR_DEF
config['policy'][key]['optimizer_args'] = {}
return getOptimizer(config, network)
return FOpt(network.parameters(), lr = lr, **config['policy'][key].get("optimizer_args", {}))
class dummyOptimizer():
egg = 'Do nothing, receive everything (?)'
def __init__(self, parameters, lr = 0, **kwargs):
self.parameters = [p for p in parameters]
def __repr__(self) -> str:
return self.egg
def zero_grad(self):
zeroGradParams(self.parameters)
def step(self):
pass
def normMean(t: TENSOR):
return Tdiv(t - Tmean(t), EPSILON_OP + Tstd(t))
|
__author__ = 'LeoDong'
import socket
import sys
from util import config
from judge.SAEJudge import SAEJudge
from util.logger import log
#TODO unique id in queue, store to file and reload.
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
log.info('start listening on %s' % str(config.socket_addr_judge))
sock.bind(config.socket_addr_judge)
# Listen for incoming connections
sock.listen(10)
judge = SAEJudge(config.path_judge_dtree,config.dtree_param)
try:
while True:
# Wait for a connection
connection, client_address = sock.accept()
judge.process(connection, client_address)
finally:
log.info("Saving")
judge.save()
|
<gh_stars>0
//
// JWMaskView.h
// JWBarCharts
//
// Created by wangjun on 2018/11/16.
// Copyright © 2018年 wangjun. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface JWMaskView : UIView
@property (nonatomic, strong) UIFont *maskFont;
@property (nonatomic, strong) UIColor *maskTextColor;
@property (nonatomic, assign) NSTextAlignment maskTextAlignment;
- (void)reload:(NSString *)text;
@end
|
<reponame>jamestiotio/esc
import java.util.Calendar;
import java.util.Date;
public class exercise4 {
public static void main(String[] args) throws InterruptedException {
Calendar cal1 = new CalendarSubclass();
cal1.setTime(new Date());
Thread.sleep(1000);
Calendar cal2 = new CalendarSubclass();
cal2.setTime(new Date());
System.out.println(cal2.after(cal1)); // True
System.out.println(cal1.after(cal2)); // False
System.out.println(cal1.after(cal1)); // True
System.out.println(cal2.after(cal2)); // True
}
}
class CalendarSubclass extends Calendar {
/**
*
*/
private static final long serialVersionUID = 1L;
Calendar calendar = Calendar.getInstance();
public int getDayOfYear() {
return calendar.get(Calendar.DAY_OF_YEAR);
}
@Override
public boolean after(Object when) {
if (when instanceof Calendar && super.compareTo((Calendar) when) == 0) {
// if (when instanceof Calendar && ((Calendar) when).toString().equals(this.toString())) {
// if (when instanceof Calendar && equals((Calendar) when)) {
// System.out.println("lala");
return true;
}
return calendar.after(when);
}
@Override
public int compareTo(Calendar anotherCalendar) {
return compareDays(this.getFirstDayOfWeek(), anotherCalendar.getFirstDayOfWeek());
}
private int compareDays(int currentFirstDayOfWeek, int anotherFirstDayOfWeek) {
return (currentFirstDayOfWeek > anotherFirstDayOfWeek) ? 1
: (currentFirstDayOfWeek == anotherFirstDayOfWeek) ? 0 : -1;
}
// Implementation of other Calendar abstract methods skipped
@Override
public void add(int field, int amount) {
// TODO Auto-generated method stub
}
@Override
protected void computeFields() {
// TODO Auto-generated method stub
}
@Override
protected void computeTime() {
// TODO Auto-generated method stub
}
@Override
public int getGreatestMinimum(int field) {
// TODO Auto-generated method stub
return 0;
}
@Override
public int getLeastMaximum(int field) {
// TODO Auto-generated method stub
return 0;
}
@Override
public int getMaximum(int field) {
// TODO Auto-generated method stub
return 0;
}
@Override
public int getMinimum(int field) {
// TODO Auto-generated method stub
return 0;
}
@Override
public void roll(int field, boolean up) {
// TODO Auto-generated method stub
}
}
|
#!/bin/bash
# Figures out what the current version is, echoes that back,
# and also writes a `version.json` file into the package.
set -e
MAJMIN_VERSION="1.5"
pushd $(dirname $0) >/dev/null
working_dir=$PWD
name=$(basename $PWD)
popd >/dev/null
package=$(echo $name | sed 's/-/_/g')
version_json="${working_dir}/${package}/version.json"
version_suffix=""
# official version
if [[ "$JOB_NAME" = "${name}" ]] && [[ -n "$BUILD_TAG" ]]; then
patch_version=0
if [[ -n "$(git tag --list v$MAJMIN_VERSION.0)" ]]; then
# number of commits since vMAJOR.MINOR.0
patch_version=$(git rev-list --count $(git describe --tags --match "v${MAJMIN_VERSION}.0" | cut -f 1 -d -)...HEAD)
fi
# add post version if built before (i.e., already tagged)
post_version=$(git tag --contain | wc -l | awk '{print $1}')
test "$post_version" -gt 0 && version_suffix=".post${post_version}"
# development version
else
if [[ -n "$(git tag --list 'v[0-9]*')" ]]; then
recent_tag=$(git describe --tags --match 'v[0-9]*' | cut -f 1 -d -)
majmin_version=$(echo $recent_tag | tr "v.-" " " | awk '{print $1"."$2}')
patch_version=$(echo $recent_tag | tr "v.-" " " | awk '{print $3}')
dev_version=$(git rev-list --count ${recent_tag}...HEAD)
else # start of dev, nothing tagged
majmin_version="0.0"
patch_version="0"
dev_version=$(git rev-list --count HEAD)
fi
version_suffix=".$(date '+%s').dev${dev_version}+${USER}"
fi
version="${majmin_version:-$MAJMIN_VERSION}.${patch_version}${version_suffix}"
json='"version": "'$version'", "commit": "'$(git rev-parse HEAD)'", "build": "'${BUILD_TAG:-"dev"}'"'
# write-out version.json
echo "{${json}}" > $version_json
style="$1"
if [[ "$style" = "json" ]]; then
cat $version_json
else
echo $version
fi
|
'use strict';
class Relationship {
constructor(id, displayName) {
this.id = id;
this.displayName = displayName;
}
getId() {
return this.id;
}
getDisplayName() {
return this.displayName;
}
isPositive() {
return this.id === 'Enhance' || this.id === 'EnhancedBy';
}
isNegative() {
return this.id === 'Suppress' || this.id === 'SuppressedBy';
}
isNeutral() {
return this.id === 'Same';
}
}
Relationship.Same = Object.freeze(new Relationship('Same', "同"));
Relationship.Enhance = Object.freeze(new Relationship('Enhance', "生"));
Relationship.EnhancedBy = Object.freeze(new Relationship('EnhancedBy', "被生"));
Relationship.Suppress = Object.freeze(new Relationship('Suppress', "剋"));
Relationship.SuppressedBy = Object.freeze(new Relationship('SuppressedBy', "被剋"));
module.exports = Relationship; |
/* Page building */
const page = [];
// Configuration
page.config = {
"titleSuffix": " | JSONdb"
}
// Page elements
page.Header = class { // Header
constructor(parent) {
const elements = [];
// Title
const title = document.createElement('h1');
title.setAttribute('class', 'header');
title.innerText = 'JSONdb';
elements.push(title)
// Appending the elements to the page
page.appendElements(parent, elements);
}
}
// Auxilary code
page.appendElements = function(parent, objs) {
for (obj in objs) {
parent.appendChild(objs[obj])
}
}
// Build the page
page.build = function() {
// Add header
new page.Header(document.getElementsByTagName('header')[0])
// Set title suffix
document.getElementsByTagName('title')[0].innerText += page.config.titleSuffix
}
window.onload = page.build |
<reponame>ivonildo-lopes/PedidoVenda<gh_stars>1-10
package com.algaworks.pedidovenda.controller;
import java.io.Serializable;
import javax.enterprise.context.RequestScoped;
import javax.enterprise.event.Event;
import javax.inject.Inject;
import javax.inject.Named;
import com.algaworks.pedidovenda.model.FormaPagamento;
import com.algaworks.pedidovenda.model.Pedido;
import com.algaworks.pedidovenda.service.EmissaoPedidoService;
import com.algaworks.pedidovenda.service.PedidoService;
import com.algaworks.pedidovenda.util.jsf.FacesUtil;
import com.algaworks.pedidovenda.validation.PedidoAlteradoEvent;
import com.algaworks.pedidovenda.validation.PedidoEdicao;
@Named
@RequestScoped
public class EmissaoPedidoBean implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
@Inject
@PedidoEdicao
private Pedido pedido;
@Inject
private EmissaoPedidoService emissaoPedidoService;
@Inject
private Event<PedidoAlteradoEvent> pedidoAlteradoEvent;
@Inject
private PedidoService pedidoService;
public void emitirPedido() {
if (pedido.getFormaPagamento() == FormaPagamento.CARTAO_CREDITO
&& pedidoService.verificaDividaPendentePorCliente(pedido) > 0) {
FacesUtil.ErrorMessage("Não é possivel emitir o pedido, cliente esta pendente");
} else {
this.pedido.removerItemVazio();
try {
this.pedido = this.emissaoPedidoService.emitir(this.pedido);
// lançar um evento CDI
this.pedidoAlteradoEvent.fire(new PedidoAlteradoEvent(
this.pedido));
} finally {
this.pedido.adicionaItemVazio();
}
}
}
}
|
/*iweb在线课堂项目必须的数据库结构*/
/*SQL 语句不区分大小写,习惯上: 关键字都大写,非关键字小写*/
#删除数据库iweb,如果它存在的话
DROP DATABASE IF EXISTS iweb;
#重新创建数据库iweb
CREATE DATABASE iweb CHARSET=UTF8;
#进入数据库
USE iweb;
#创建校区表
CREATE TABLE iw_school(
sid INT PRIMARY KEY AUTO_INCREMENT, #校区编号
sname VARCHAR(32), #名称
pic VARCHAR(128), #图片
address VARCHAR(64), #联系地址
phone VARCHAR(32), #联系电话
postcode CHAR(6), #邮编
longitude DECIMAL(9,6), #经度
latitude DECIMAL(8,6) #纬度
);
INSERT INTO iw_school VALUES (NULL, '北京中关村中心', 'img.school/bjzgc.jpg', '北京市海淀区北三环西路甲18号(大钟寺附近)中鼎大厦B座7层', '010-62130963', '100098', 116.347527, 39.972529);
INSERT INTO iw_school VALUES (NULL, '北京北京清华园中心', 'img.school/bjbjqhyzx.jpg', '北京市海淀区花园路小关街120号万盛商务会馆A区三层', '010-82676916', '100088', 116.386508, 39.984261);
INSERT INTO iw_school VALUES (NULL, '北京万寿路中心', 'img.school/bjwsl.jpg', '北京市海淀区万寿路西街2号文博大厦二层达内科技', '010-82761288', '100081', 116.300901, 39.916085);
INSERT INTO iw_school VALUES (NULL, '北京天坛中心', 'img.school/bjttzx.jpg', '北京市崇文区珠市口东大街6号珍贝大厦西侧3层达内科技', '010-67028668', '100050', 116.418203, 39.898655);
INSERT INTO iw_school VALUES (NULL, '北京广渠门中心', 'img.school/bjgqm.jpg', '北京市东城区广渠门东广渠家园25号楼启达大厦一层', '010-82764488', '100088', 116.462916, 39.89666);
INSERT INTO iw_school VALUES (NULL, '上海中山公园', 'img.school/shzsgy.jpg', '上海中山公园', '010-82761888', '100678', 121.427073, 31.228765);
INSERT INTO iw_school VALUES (NULL, '上海静安雕塑公园', 'img.school/shjadsgy.jpg', '上海静安雕塑公园', '010-82761234', '100666', 121.470623, 31.241486);
INSERT INTO iw_school VALUES (NULL, '上海环球港', 'img.school/shhqg.jpg', '上海环球港', '010-82767777', '100999', 121.418305, 31.240128);
INSERT INTO iw_school VALUES (NULL, '上海科技京城', 'img.school/shjkjjc.jpg', '上海科技京城', '010-12345634', '108990', 121.184133, 31.245624);
INSERT INTO iw_school VALUES (NULL, '上海国金会议中心', 'img.school/shgjhyzx.jpg', '上海国金会议中心', '010-12345634', '108990', 121.503482, 31.245193);
INSERT INTO iw_school VALUES (NULL, '上海滨江公园', 'img.school/shbjgy.jpg', '上海滨江公园', '010-12345634', '108990', 121.503266, 31.241966);
INSERT INTO iw_school VALUES (NULL, '无锡崇安寺步行街', 'img.school/wxcasxj.jpg', '无锡崇安寺步行街', '010-12345634', '108990', 120.307766, 31.583314);
#创建讲师表
CREATE TABLE iw_teacher(
tid INT PRIMARY KEY AUTO_INCREMENT, #讲师编号
tname VARCHAR(64), #姓名
pic VARCHAR(128), #照片
skills VARCHAR(32), #擅长的领域
experience VARCHAR(1024), #工作经历
style VARCHAR(1024) #授课风格
);
INSERT INTO iw_teacher VALUES(NULL,'成亮','img.teacher/cl.jpg','WEB全栈开发','10年互联网开发经验, 精通HTML5,CSS3,js,vue,angualr 等网页前端技术及 php后端 开发;先后就职于多家 IT 互联网公司,曾任某网络公司技术主管,服务机构有首都师范大学、中国联通、中国石油、国家图书馆、中国摄影协会等。','讲课诙谐幽默,深处浅出,深得学生喜爱。');
INSERT INTO iw_teacher VALUES(NULL,'李然','img.teacher/lr.jpg','高级网页设计','从事前端开发多年,参与 及主导研发过多个省级政府项目及海 外项目,参与 NEC,福特等大型企业 CRM 系统的研发。项目涉及金融,汽 车,电子等多个领域。于文思海辉专 职负责前端产品线培训时,积累了大 量的教学经验。','教学风格风趣幽默,细致严谨。善于以项目流程反推知识点讲解,深受学员好评。');
INSERT INTO iw_teacher VALUES(NULL,'张东','img.teacher/zd.jpg','JS高级编程','美国PMP认证高级项目经理,微软认证解决方案开发专家。先后在中软国际、中国搜索、太极计算机担任高级开发工程师,架构师,项目经理。曾担任中央外汇管理中心、中国石油、二炮工程研究院、首都国际机场技术咨询顾问。','感情充沛,富有激情,容易引起学生们的共鸣。授课时能旁征博引,使学生从诸多的材料中得出结论,并且启迪学生的想象能力。');
INSERT INTO iw_teacher VALUES(NULL,'程涛','img.teacher/ct.jpg','移动APP开发','曾就职于东软集团。参与开发家乐福,津津会员卡等多款上线app。拥有外包和产品两种开发经历。已指导并帮助学员上架多个项目到AppStore。授课风趣幽默,亲和力强。教学耐心,循序渐进,课堂气氛活跃。善于结合生活中的实例,把艰涩难懂的知识点传授给学员。','授课风趣幽默,亲和力强。教学耐心,循序渐进,课堂气氛活跃。善于结合生活中的实例,把艰涩难懂的知识点传授给学员。');
INSERT INTO iw_teacher VALUES(NULL,'纪盈鑫','img.teacher/jyx.jpg','前端框架开发','曾就职于北京塞博维尔信息咨询公司,具备多年的互联网应用经验,精通PHP框架技术。在Web3.0前沿技术方面有着深入的研究,曾参与Shop EX核心模块开发。','感情充沛,富有激情,容易引起学生们的共鸣。授课时能旁征博引,使学生从诸多的材料中得出结论,并且启迪学生的想象能力。');
INSERT INTO iw_teacher VALUES(NULL,'张淑敏','img.teacher/zsm.jpg','嵌入式讲师','五年高校教学经验(华北理工大学信息工程学院),八年知名培训机构教学及教学管理经验,参与24小时心电监护系统(Holter)、脑电图(BEAM)等多个项目的研发,并发表多篇学术论文。','授课循序渐进、反复思考、推演、总结。善于将知识和生活结合,并应用于实际的产品中。');
INSERT INTO iw_teacher VALUES(NULL,'顾婷婷','img.teacher/gtt.jpg','产品经理讲师','5年互联网从业经验,曾就职于知名互联网企业和教育培训机构,参与多个团队实战项目;精通产品管理与设计及开发运营的关系和全流程。','善于沟通,懂得技巧;一起享受设计旅程的美好;风趣幽默,思路清晰,善于总结业内经验和规律,表达能力较强,善于激发学员积极性,富有亲和力。');
INSERT INTO iw_teacher VALUES(NULL,'曾一','img.teacher/zy.jpg','Unity3D讲师','8 年以上 3D 游戏项目经验和教学经验,熟知游戏行业的流程,具有丰富的制作、研发和教学经验和深厚的美术专业功底。对游戏模型贴图制作有独到方式和见解。','曾参与过《梦幻红楼》、《武侠世界》、《女神的斗士》、《热血江湖》等多款网络游戏以及移动端游戏,涉及风格多样,经验丰富。');
INSERT INTO iw_teacher VALUES(NULL,'段敏','img.teacher/dm.jpg','产品经理讲师','7年一线产品实战及管理经验,曾任职中兴及知名游戏公司第九城市产品总监,参与过手游,页游,智能TV等产品策划与设计,亲自领导发起多款软件&硬件的调研,研发,测试,上线运营整个产品的闭环。','授课循序渐进、反复思考、推演、总结。善于将知识和生活结合,并应用于实际的产品中。');
INSERT INTO iw_teacher VALUES(NULL,'潘福利','img.teacher/pfl.jpg','高级电商讲师','达内视频编导,国内编导,八年视频行业策划运营经验;曾就职于长春影视、北广传媒。','授课思路清晰、严谨务实,精辟透彻;生动幽默、绘声绘色,富有极强的亲和力。');
#创建课程类别
CREATE TABLE iw_course_type(
tid INT PRIMARY KEY AUTO_INCREMENT, #类别编号
tname VARCHAR(16) #类别名称
);
INSERT INTO iw_course_type VALUES (NULL,'基础课程');
INSERT INTO iw_course_type VALUES (NULL,'核心课程');
INSERT INTO iw_course_type VALUES (NULL,'进阶课程');
#创建课程
CREATE TABLE iw_course(
);
#创建用户信息
CREATE TABLE iw_user();
#创建购物车表
#创建订单表
#创建用户收藏夹表 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.