text stringlengths 1 1.05M |
|---|
<gh_stars>0
export default {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 24 24',
width: 24,
height: 24,
},
content: [
{
elem: 'path',
attrs: {
d:
'M12 1C5.9 1 1 5.9 1 12s4.9 11 11 11 11-4.9 11-11S18.1 1 12 1zm-.9 5h1.8v8h-1.8V6zm.9 13.2c-.7 0-1.2-.6-1.2-1.2s.6-1.2 1.2-1.2 1.2.6 1.2 1.2-.5 1.2-1.2 1.2z',
},
},
{
elem: 'path',
attrs: {
d:
'M13.2 18c0 .7-.6 1.2-1.2 1.2s-1.2-.6-1.2-1.2.6-1.2 1.2-1.2 1.2.5 1.2 1.2zm-.3-12h-1.8v8h1.8V6z',
'data-icon-path': 'inner-path',
opacity: '0',
},
},
],
name: 'warning--filled',
size: 24,
};
|
#!/bin/bash
# Add TexturePacker binary path to the system's PATH if not already present
if [[ ":$PATH:" != *":/c/Program Files/CodeAndWeb/TexturePacker/bin:"* ]]; then
export PATH=$PATH:"/c/Program Files/CodeAndWeb/TexturePacker/bin"
fi
# Get the input directory path from the command line argument
input_directory="$1"
# Ensure the input directory path is enclosed in double quotes to handle spaces
input_directory="${input_directory// /\\ }"
# Run TexturePacker to pack the images into a single texture sheet
TexturePacker --sheet "${input_directory}/packed_textures.png" --data "${input_directory}/packed_textures.plist" "${input_directory}"/*.png |
#!/bin/bash
#
# Start script for certified-copies.orders.api.ch.gov.uk
APP_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [[ -z "${MESOS_SLAVE_PID}" ]]; then
source ~/.chs_env/private_env
source ~/.chs_env/global_env
source ~/.chs_env/certified-copies.orders.api.ch.gov.uk/env
PORT="${CERTIFIED_COPIES_ORDERS_API_CH_GOV_UK_PORT:=18568}"
else
PORT="$1"
CONFIG_URL="$2"
ENVIRONMENT="$3"
APP_NAME="$4"
source /etc/profile
echo "Downloading environment from: ${CONFIG_URL}/${ENVIRONMENT}/${APP_NAME}"
wget -O "${APP_DIR}/private_env" "${CONFIG_URL}/${ENVIRONMENT}/private_env"
wget -O "${APP_DIR}/global_env" "${CONFIG_URL}/${ENVIRONMENT}/global_env"
wget -O "${APP_DIR}/app_env" "${CONFIG_URL}/${ENVIRONMENT}/${APP_NAME}/env"
source "${APP_DIR}/private_env"
source "${APP_DIR}/global_env"
source "${APP_DIR}/app_env"
fi
exec java ${JAVA_MEM_ARGS} -jar -Dserver.port="${PORT}" "${APP_DIR}/certified-copies.orders.api.ch.gov.uk.jar"
|
<reponame>raghav-deepsource/realm-java
/*
* Copyright 2020 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.internal.objectstore;
import org.bson.BsonArray;
import org.bson.BsonDocument;
import org.bson.BsonNull;
import org.bson.BsonObjectId;
import org.bson.BsonValue;
import org.bson.Document;
import org.bson.codecs.configuration.CodecRegistry;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import io.realm.internal.NativeObject;
import io.realm.internal.Util;
import io.realm.internal.events.NetworkEventStream;
import io.realm.internal.jni.JniBsonProtocol;
import io.realm.internal.jni.OsJNIResultCallback;
import io.realm.internal.network.ResultHandler;
import io.realm.internal.network.StreamNetworkTransport;
import io.realm.internal.objectserver.EventStream;
import io.realm.mongodb.App;
import io.realm.mongodb.AppException;
import io.realm.mongodb.mongo.MongoNamespace;
import io.realm.mongodb.mongo.iterable.AggregateIterable;
import io.realm.mongodb.mongo.iterable.FindIterable;
import io.realm.mongodb.mongo.options.CountOptions;
import io.realm.mongodb.mongo.options.FindOneAndModifyOptions;
import io.realm.mongodb.mongo.options.FindOptions;
import io.realm.mongodb.mongo.options.InsertManyResult;
import io.realm.mongodb.mongo.options.UpdateOptions;
import io.realm.mongodb.mongo.result.DeleteResult;
import io.realm.mongodb.mongo.result.InsertOneResult;
import io.realm.mongodb.mongo.result.UpdateResult;
public class OsMongoCollection<DocumentT> implements NativeObject {
private static final int DELETE_ONE = 1;
private static final int DELETE_MANY = 2;
private static final int UPDATE_ONE = 3;
private static final int UPDATE_ONE_WITH_OPTIONS = 4;
private static final int UPDATE_MANY = 5;
private static final int UPDATE_MANY_WITH_OPTIONS = 6;
private static final int FIND_ONE_AND_UPDATE = 7;
private static final int FIND_ONE_AND_UPDATE_WITH_OPTIONS = 8;
private static final int FIND_ONE_AND_REPLACE = 9;
private static final int FIND_ONE_AND_REPLACE_WITH_OPTIONS = 10;
private static final int FIND_ONE_AND_DELETE = 11;
private static final int FIND_ONE_AND_DELETE_WITH_OPTIONS = 12;
private static final int FIND_ONE = 13;
private static final int FIND_ONE_WITH_OPTIONS = 14;
private static final int WATCH = 15;
private static final int WATCH_IDS = 16;
private static final int WATCH_WITH_FILTER= 17;
private static final long nativeFinalizerPtr = nativeGetFinalizerMethodPtr();
private final long nativePtr;
private final Class<DocumentT> documentClass;
private final CodecRegistry codecRegistry;
private final String encodedEmptyDocument;
private final ThreadPoolExecutor threadPoolExecutor = App.NETWORK_POOL_EXECUTOR;
private final String serviceName;
private final MongoNamespace namespace;
private final StreamNetworkTransport streamNetworkTransport;
OsMongoCollection(final long nativeCollectionPtr,
final MongoNamespace namespace,
final String serviceName,
final Class<DocumentT> documentClass,
final CodecRegistry codecRegistry,
final StreamNetworkTransport streamNetworkTransport) {
this.nativePtr = nativeCollectionPtr;
this.namespace = namespace;
this.serviceName = serviceName;
this.documentClass = documentClass;
this.codecRegistry = codecRegistry;
this.encodedEmptyDocument = JniBsonProtocol.encode(new Document(), codecRegistry);
this.streamNetworkTransport = streamNetworkTransport;
}
@Override
public long getNativePtr() {
return nativePtr;
}
@Override
public long getNativeFinalizerPtr() {
return nativeFinalizerPtr;
}
public Class<DocumentT> getDocumentClass() {
return documentClass;
}
public CodecRegistry getCodecRegistry() {
return codecRegistry;
}
public <NewDocumentT> OsMongoCollection<NewDocumentT> withDocumentClass(
final Class<NewDocumentT> clazz) {
return new OsMongoCollection<>(nativePtr, namespace, serviceName, clazz, codecRegistry, streamNetworkTransport);
}
public OsMongoCollection<DocumentT> withCodecRegistry(final CodecRegistry codecRegistry) {
return new OsMongoCollection<>(nativePtr, namespace, serviceName, documentClass, codecRegistry, streamNetworkTransport);
}
public Long count() {
return countInternal(new Document(), null);
}
public Long count(final Bson filter) {
return countInternal(filter, null);
}
public Long count(final Bson filter, final CountOptions options) {
return countInternal(filter, options);
}
private Long countInternal(final Bson filter, @Nullable final CountOptions options) {
AtomicReference<Long> success = new AtomicReference<>(null);
AtomicReference<AppException> error = new AtomicReference<>(null);
OsJNIResultCallback<Long> callback = new OsJNIResultCallback<Long>(success, error) {
@Override
protected Long mapSuccess(Object result) {
return (Long) result;
}
};
final String filterString = JniBsonProtocol.encode(filter, codecRegistry);
final int limit = (options == null) ? 0 : options.getLimit();
nativeCount(nativePtr, filterString, limit, callback);
return ResultHandler.handleResult(success, error);
}
public FindIterable<DocumentT> find() {
return findInternal(new Document(), documentClass, null);
}
public FindIterable<DocumentT> find(final FindOptions options) {
return findInternal(new Document(), documentClass, options);
}
public <ResultT> FindIterable<ResultT> find(final Class<ResultT> resultClass) {
return findInternal(new Document(), resultClass, null);
}
public <ResultT> FindIterable<ResultT> find(final Class<ResultT> resultClass, final FindOptions options) {
return findInternal(new Document(), resultClass, options);
}
public FindIterable<DocumentT> find(final Bson filter) {
return findInternal(filter, documentClass, null);
}
public FindIterable<DocumentT> find(final Bson filter, final FindOptions options) {
return findInternal(filter, documentClass, options);
}
public <ResultT> FindIterable<ResultT> find(final Bson filter,
final Class<ResultT> resultClass) {
return findInternal(filter, resultClass, null);
}
public <ResultT> FindIterable<ResultT> find(final Bson filter,
final Class<ResultT> resultClass,
final FindOptions options) {
return findInternal(filter, resultClass, options);
}
private <ResultT> FindIterable<ResultT> findInternal(final Bson filter,
final Class<ResultT> resultClass,
@Nullable final FindOptions options) {
FindIterable<ResultT> findIterable =
new FindIterable<>(threadPoolExecutor, this, codecRegistry, resultClass);
findIterable.filter(filter);
if (options != null) {
findIterable.limit(options.getLimit());
findIterable.projection(options.getProjection());
}
return findIterable;
}
public AggregateIterable<DocumentT> aggregate(final List<? extends Bson> pipeline) {
return aggregate(pipeline, documentClass);
}
public <ResultT> AggregateIterable<ResultT> aggregate(final List<? extends Bson> pipeline,
final Class<ResultT> resultClass) {
return new AggregateIterable<>(threadPoolExecutor, this, codecRegistry, resultClass, pipeline);
}
public DocumentT findOne() {
return findOneInternal(FIND_ONE, new Document(), null, documentClass);
}
public <ResultT> ResultT findOne(final Class<ResultT> resultClass) {
return findOneInternal(FIND_ONE, new Document(), null, resultClass);
}
public DocumentT findOne(final Bson filter) {
return findOneInternal(FIND_ONE, filter, null, documentClass);
}
public <ResultT> ResultT findOne(final Bson filter, final Class<ResultT> resultClass) {
return findOneInternal(FIND_ONE, filter, null, resultClass);
}
public DocumentT findOne(final Bson filter, final FindOptions options) {
return findOneInternal(FIND_ONE_WITH_OPTIONS, filter, options, documentClass);
}
public <ResultT> ResultT findOne(final Bson filter,
final FindOptions options,
final Class<ResultT> resultClass) {
return findOneInternal(FIND_ONE_WITH_OPTIONS, filter, options, resultClass);
}
private <ResultT> ResultT findOneInternal(final int type,
final Bson filter,
@Nullable final FindOptions options,
final Class<ResultT> resultClass) {
AtomicReference<ResultT> success = new AtomicReference<>(null);
AtomicReference<AppException> error = new AtomicReference<>(null);
OsJNIResultCallback<ResultT> callback = new OsJNIResultCallback<ResultT>(success, error) {
@Override
protected ResultT mapSuccess(Object result) {
return findSuccessMapper(result, resultClass);
}
};
final String encodedFilter = JniBsonProtocol.encode(filter, codecRegistry);
// default to empty docs or update if needed
String projectionString = encodedEmptyDocument;
String sortString = encodedEmptyDocument;
switch (type) {
case FIND_ONE:
nativeFindOne(FIND_ONE, nativePtr, encodedFilter, projectionString, sortString, 0, callback);
break;
case FIND_ONE_WITH_OPTIONS:
Util.checkNull(options, "options");
projectionString = JniBsonProtocol.encode(options.getProjection(), codecRegistry);
sortString = JniBsonProtocol.encode(options.getSort(), codecRegistry);
nativeFindOne(FIND_ONE_WITH_OPTIONS, nativePtr, encodedFilter, projectionString, sortString, options.getLimit(), callback);
break;
default:
throw new IllegalArgumentException("Invalid fineOne type: " + type);
}
return ResultHandler.handleResult(success, error);
}
public InsertOneResult insertOne(final DocumentT document) {
AtomicReference<InsertOneResult> success = new AtomicReference<>(null);
AtomicReference<AppException> error = new AtomicReference<>(null);
OsJNIResultCallback<InsertOneResult> callback = new OsJNIResultCallback<InsertOneResult>(success, error) {
@Override
protected InsertOneResult mapSuccess(Object result) {
BsonValue id = JniBsonProtocol.decode((String) result, BsonValue.class, codecRegistry);
return new InsertOneResult(id);
}
};
final String encodedDocument = JniBsonProtocol.encode(document, codecRegistry);
nativeInsertOne(nativePtr, encodedDocument, callback);
return ResultHandler.handleResult(success, error);
}
public InsertManyResult insertMany(final List<? extends DocumentT> documents) {
AtomicReference<InsertManyResult> success = new AtomicReference<>(null);
AtomicReference<AppException> error = new AtomicReference<>(null);
OsJNIResultCallback<InsertManyResult> callback = new OsJNIResultCallback<InsertManyResult>(success, error) {
@Override
protected InsertManyResult mapSuccess(Object result) {
Object[] objects = (Object[]) result;
Map<Long, BsonValue> insertedIdsMap = new HashMap<>();
for (int i = 0; i < objects.length; i++) {
BsonValue id = JniBsonProtocol.decode((String) objects[i], BsonValue.class, codecRegistry);
insertedIdsMap.put((long) i, id);
}
return new InsertManyResult(insertedIdsMap);
}
};
final String encodedDocumentArray = JniBsonProtocol.encode(documents, codecRegistry);
nativeInsertMany(nativePtr, encodedDocumentArray, callback);
return ResultHandler.handleResult(success, error);
}
public DeleteResult deleteOne(final Bson filter) {
return deleteInternal(DELETE_ONE, filter);
}
public DeleteResult deleteMany(final Bson filter) {
return deleteInternal(DELETE_MANY, filter);
}
private DeleteResult deleteInternal(final int type, final Bson filter) {
AtomicReference<DeleteResult> success = new AtomicReference<>(null);
AtomicReference<AppException> error = new AtomicReference<>(null);
OsJNIResultCallback<DeleteResult> callback = new OsJNIResultCallback<DeleteResult>(success, error) {
@Override
protected DeleteResult mapSuccess(Object result) {
return new DeleteResult((Long) result);
}
};
final String jsonDocument = JniBsonProtocol.encode(filter, codecRegistry);
switch (type) {
case DELETE_ONE:
nativeDelete(DELETE_ONE, nativePtr, jsonDocument, callback);
break;
case DELETE_MANY:
nativeDelete(DELETE_MANY, nativePtr, jsonDocument, callback);
break;
default:
throw new IllegalArgumentException("Invalid delete type: " + type);
}
return ResultHandler.handleResult(success, error);
}
public UpdateResult updateOne(final Bson filter, final Bson update) {
return updateInternal(UPDATE_ONE, filter, update, null);
}
public UpdateResult updateOne(final Bson filter,
final Bson update,
final UpdateOptions options) {
return updateInternal(UPDATE_ONE_WITH_OPTIONS, filter, update, options);
}
public UpdateResult updateMany(final Bson filter, final Bson update) {
return updateInternal(UPDATE_MANY, filter, update, null);
}
public UpdateResult updateMany(final Bson filter,
final Bson update,
final UpdateOptions options) {
return updateInternal(UPDATE_MANY_WITH_OPTIONS, filter, update, options);
}
private UpdateResult updateInternal(final int type,
final Bson filter,
final Bson update,
@Nullable final UpdateOptions options) {
AtomicReference<UpdateResult> success = new AtomicReference<>(null);
AtomicReference<AppException> error = new AtomicReference<>(null);
OsJNIResultCallback<UpdateResult> callback = new OsJNIResultCallback<UpdateResult>(success, error) {
@Override
protected UpdateResult mapSuccess(Object result) {
BsonArray array = JniBsonProtocol.decode((String) result, BsonArray.class, codecRegistry);
long matchedCount = array.get(0).asInt32().getValue();
long modifiedCount = array.get(1).asInt32().getValue();
BsonValue upsertedId = array.get(2);
if (upsertedId instanceof BsonNull) {
upsertedId = null;
}
return new UpdateResult(matchedCount, modifiedCount, upsertedId);
}
};
final String jsonFilter = JniBsonProtocol.encode(filter, codecRegistry);
final String jsonUpdate = JniBsonProtocol.encode(update, codecRegistry);
switch (type) {
case UPDATE_ONE:
case UPDATE_MANY:
nativeUpdate(type, nativePtr, jsonFilter, jsonUpdate, false, callback);
break;
case UPDATE_ONE_WITH_OPTIONS:
case UPDATE_MANY_WITH_OPTIONS:
Util.checkNull(options, "options");
nativeUpdate(type, nativePtr, jsonFilter, jsonUpdate, options.isUpsert(), callback);
break;
default:
throw new IllegalArgumentException("Invalid update type: " + type);
}
return ResultHandler.handleResult(success, error);
}
public DocumentT findOneAndUpdate(final Bson filter, final Bson update) {
return findOneAndUpdate(filter, update, documentClass);
}
public <ResultT> ResultT findOneAndUpdate(final Bson filter,
final Bson update,
final Class<ResultT> resultClass) {
return findOneAndModify(FIND_ONE_AND_UPDATE, filter, update, null, resultClass);
}
public DocumentT findOneAndUpdate(final Bson filter,
final Bson update,
final FindOneAndModifyOptions options) {
return findOneAndUpdate(filter, update, options, documentClass);
}
public <ResultT> ResultT findOneAndUpdate(final Bson filter,
final Bson update,
final FindOneAndModifyOptions options,
final Class<ResultT> resultClass) {
return findOneAndModify(FIND_ONE_AND_UPDATE_WITH_OPTIONS, filter, update, options, resultClass);
}
public DocumentT findOneAndReplace(final Bson filter, final Bson replacement) {
return findOneAndReplace(filter, replacement, documentClass);
}
public <ResultT> ResultT findOneAndReplace(final Bson filter,
final Bson replacement,
final Class<ResultT> resultClass) {
return findOneAndModify(FIND_ONE_AND_REPLACE, filter, replacement, null, resultClass);
}
public DocumentT findOneAndReplace(final Bson filter,
final Bson replacement,
final FindOneAndModifyOptions options) {
return findOneAndReplace(filter, replacement, options, documentClass);
}
public <ResultT> ResultT findOneAndReplace(final Bson filter,
final Bson replacement,
final FindOneAndModifyOptions options,
final Class<ResultT> resultClass) {
return findOneAndModify(FIND_ONE_AND_REPLACE_WITH_OPTIONS, filter, replacement, options, resultClass);
}
public DocumentT findOneAndDelete(final Bson filter) {
return findOneAndDelete(filter, documentClass);
}
public <ResultT> ResultT findOneAndDelete(final Bson filter,
final Class<ResultT> resultClass) {
return findOneAndModify(FIND_ONE_AND_DELETE, filter, new Document(), null, resultClass);
}
public DocumentT findOneAndDelete(final Bson filter,
final FindOneAndModifyOptions options) {
return findOneAndDelete(filter, options, documentClass);
}
public <ResultT> ResultT findOneAndDelete(final Bson filter,
final FindOneAndModifyOptions options,
final Class<ResultT> resultClass) {
return findOneAndModify(FIND_ONE_AND_DELETE_WITH_OPTIONS, filter, new Document(), options, resultClass);
}
public String getServiceName() {
return serviceName;
}
private <ResultT> ResultT findOneAndModify(final int type,
final Bson filter,
final Bson update,
@Nullable final FindOneAndModifyOptions options,
final Class<ResultT> resultClass) {
AtomicReference<ResultT> success = new AtomicReference<>(null);
AtomicReference<AppException> error = new AtomicReference<>(null);
OsJNIResultCallback<ResultT> callback = new OsJNIResultCallback<ResultT>(success, error) {
@Override
protected ResultT mapSuccess(Object result) {
return findSuccessMapper(result, resultClass);
}
};
final String encodedFilter = JniBsonProtocol.encode(filter, codecRegistry);
final String encodedUpdate = JniBsonProtocol.encode(update, codecRegistry);
// default to empty docs or update if needed
String encodedProjection = encodedEmptyDocument;
String encodedSort = encodedEmptyDocument;
if (options != null) {
if (options.getProjection() != null) {
encodedProjection = JniBsonProtocol.encode(options.getProjection(), codecRegistry);
}
if (options.getSort() != null) {
encodedSort = JniBsonProtocol.encode(options.getSort(), codecRegistry);
}
}
switch (type) {
case FIND_ONE_AND_UPDATE:
nativeFindOneAndUpdate(type, nativePtr, encodedFilter, encodedUpdate, encodedProjection, encodedSort, false, false, callback);
break;
case FIND_ONE_AND_UPDATE_WITH_OPTIONS:
Util.checkNull(options, "options");
nativeFindOneAndUpdate(type, nativePtr, encodedFilter, encodedUpdate, encodedProjection, encodedSort, options.isUpsert(), options.isReturnNewDocument(), callback);
break;
case FIND_ONE_AND_REPLACE:
nativeFindOneAndReplace(type, nativePtr, encodedFilter, encodedUpdate, encodedProjection, encodedSort, false, false, callback);
break;
case FIND_ONE_AND_REPLACE_WITH_OPTIONS:
Util.checkNull(options, "options");
nativeFindOneAndReplace(type, nativePtr, encodedFilter, encodedUpdate, encodedProjection, encodedSort, options.isUpsert(), options.isReturnNewDocument(), callback);
break;
case FIND_ONE_AND_DELETE:
nativeFindOneAndDelete(type, nativePtr, encodedFilter, encodedProjection, encodedSort, false, false, callback);
break;
case FIND_ONE_AND_DELETE_WITH_OPTIONS:
Util.checkNull(options, "options");
nativeFindOneAndDelete(type, nativePtr, encodedFilter, encodedProjection, encodedSort, options.isUpsert(), options.isReturnNewDocument(), callback);
break;
default:
throw new IllegalArgumentException("Invalid modify type: " + type);
}
return ResultHandler.handleResult(success, error);
}
private <T> T findSuccessMapper(@Nullable Object result, Class<T> resultClass) {
if (result == null) {
return null;
} else {
return JniBsonProtocol.decode((String) result, resultClass, codecRegistry);
}
}
private EventStream<DocumentT> watchInternal(int type, @Nullable List<?> ids, @Nullable BsonDocument matchFilter) throws IOException {
List<Document> args = new ArrayList<>();
Document watchArgs = new Document("database", namespace.getDatabaseName());
watchArgs.put("collection", namespace.getCollectionName());
switch (type) {
case WATCH:
break;
case WATCH_IDS:
watchArgs.put("ids", ids);
break;
case WATCH_WITH_FILTER:
watchArgs.put("filter", matchFilter);
break;
default:
throw new IllegalArgumentException("Invalid watch type: " + type);
}
args.add(watchArgs);
String encodedArguments = JniBsonProtocol.encode(args, codecRegistry);
OsJavaNetworkTransport.Request request = streamNetworkTransport.makeStreamingRequest("watch", encodedArguments, serviceName);
OsJavaNetworkTransport.Response response = streamNetworkTransport.sendRequest(request);
return new NetworkEventStream<>(response, codecRegistry, documentClass);
}
public EventStream<DocumentT> watch() throws IOException {
return watchInternal(WATCH, null, null);
}
public EventStream<DocumentT> watch(final List<?> ids) throws IOException {
return watchInternal(WATCH_IDS, ids, null);
}
public EventStream<DocumentT> watchWithFilter(Document matchFilter) throws IOException {
return watchInternal(WATCH_WITH_FILTER, null, matchFilter.toBsonDocument(getDocumentClass(), getCodecRegistry()));
}
public EventStream<DocumentT> watchWithFilter(BsonDocument matchFilter) throws IOException {
return watchInternal(WATCH_WITH_FILTER, null, matchFilter);
}
private static native long nativeGetFinalizerMethodPtr();
private static native void nativeCount(long remoteMongoCollectionPtr,
String filter,
long limit,
OsJavaNetworkTransport.NetworkTransportJNIResultCallback callback);
private static native void nativeFindOne(int findOneType,
long nativePtr,
String filter,
String projection,
String sort,
long limit,
OsJavaNetworkTransport.NetworkTransportJNIResultCallback callback);
private static native void nativeInsertOne(long remoteMongoCollectionPtr,
String document,
OsJavaNetworkTransport.NetworkTransportJNIResultCallback callback);
private static native void nativeInsertMany(long remoteMongoCollectionPtr,
String documents,
OsJavaNetworkTransport.NetworkTransportJNIResultCallback callback);
private static native void nativeDelete(int deleteType,
long remoteMongoCollectionPtr,
String document,
OsJavaNetworkTransport.NetworkTransportJNIResultCallback callback);
private static native void nativeUpdate(int updateType,
long remoteMongoCollectionPtr,
String filter,
String update,
boolean upsert,
OsJavaNetworkTransport.NetworkTransportJNIResultCallback callback);
private static native void nativeFindOneAndUpdate(int findOneAndUpdateType,
long remoteMongoCollectionPtr,
String filter,
String update,
String projection,
String sort,
boolean upsert,
boolean returnNewDocument,
OsJavaNetworkTransport.NetworkTransportJNIResultCallback callback);
private static native void nativeFindOneAndReplace(int findOneAndReplaceType,
long remoteMongoCollectionPtr,
String filter,
String update,
String projection,
String sort,
boolean upsert,
boolean returnNewDocument,
OsJavaNetworkTransport.NetworkTransportJNIResultCallback callback);
private static native void nativeFindOneAndDelete(int findOneAndDeleteType,
long remoteMongoCollectionPtr,
String filter,
String projection,
String sort,
boolean upsert,
boolean returnNewDocument,
OsJavaNetworkTransport.NetworkTransportJNIResultCallback callback);
}
|
def dns_error_message(rcode):
error_messages = {
0: "No error condition",
1: "Format error - The name server was unable to interpret the query",
2: "Server failure - The name server was unable to process this query due to a problem with the name server",
3: "Name error - Meaningful only for responses from an authoritative name server, this code signifies that the domain name referenced in the query does not exist",
4: "Not implemented - The name server does not support the requested kind of query",
5: "Refused - The name server refuses to perform the specified operation for policy reasons. For example, a name server may not wish to provide the information to the particular requester, or a name server may not wish to perform a particular operation (e.g., zone transfer) for particular data."
}
return error_messages.get(rcode, "Unknown error code")
# Test the function
print(dns_error_message(5)) # Output: Refused - The name server refuses to perform the specified operation for policy reasons. For example, a name server may not wish to provide the information to the particular requester, or a name server may not wish to perform a particular operation (e.g., zone transfer) for particular data.
print(dns_error_message(3)) # Output: Name error - Meaningful only for responses from an authoritative name server, this code signifies that the domain name referenced in the query does not exist
print(dns_error_message(6)) # Output: Unknown error code |
# Check if user input is a string or not
user_input = input("Please enter a number: ")
if isinstance(user_input, str):
print("The user entered a string.")
else:
print("The user entered a number.") |
<filename>MyMuduo/Lib/TimeStamp.h
#ifndef NLIB_TIMESTAMP_H
#define NLIB_TIMESTAMP_H
#include "header.h"
class TimeStamp :
public boost::equality_comparable<TimeStamp>,
public boost::less_than_comparable<TimeStamp>
{
public:
TimeStamp()
: m_nMicroSecondsSinceEpoch(0)
{
}
explicit TimeStamp(
int64_t microSecondsSinceEpochArg)
: m_nMicroSecondsSinceEpoch(microSecondsSinceEpochArg)
{
}
// 交换
void swap(TimeStamp& that)
{
std::swap(
m_nMicroSecondsSinceEpoch,
that.m_nMicroSecondsSinceEpoch);
}
string toString() const;
// 格式化显示
string toFormattedString(
bool showMicroseconds = true) const;
bool valid() const
{
return m_nMicroSecondsSinceEpoch > 0;
}
// 获取值
int64_t microSecondsSinceEpoch() const
{
return m_nMicroSecondsSinceEpoch;
}
// 转换为以秒为单位的值
time_t secondsSinceEpoch() const
{
return (time_t)(m_nMicroSecondsSinceEpoch
/ s_nMicroSecondsPerSecond);
}
static TimeStamp now();
static TimeStamp invalid()
{
return TimeStamp();
}
// 从time_t对象得到TimeStamp对象
static TimeStamp fromUnixTime(time_t t)
{
return fromUnixTime(t, 0);
}
static TimeStamp fromUnixTime(
time_t t,
int microseconds)
{
return
TimeStamp((int64_t)(t)
* s_nMicroSecondsPerSecond
+ microseconds);
}
static const int s_nMicroSecondsPerSecond
= 1000 * 1000;
private:
int64_t m_nMicroSecondsSinceEpoch;
};
inline bool operator<(
TimeStamp lhs,
TimeStamp rhs)
{
return lhs.microSecondsSinceEpoch()
< rhs.microSecondsSinceEpoch();
}
inline bool operator==(
TimeStamp lhs,
TimeStamp rhs)
{
return lhs.microSecondsSinceEpoch()
== rhs.microSecondsSinceEpoch();
}
// 以秒为单位的差值
inline double timeDifference(
TimeStamp high,
TimeStamp low)
{
int64_t diff = high.microSecondsSinceEpoch()
- low.microSecondsSinceEpoch();
return (double)(diff)
/ TimeStamp::s_nMicroSecondsPerSecond;
}
inline TimeStamp addTime(
TimeStamp timestamp,
double seconds)
{
int64_t delta = (int64_t)(seconds
* TimeStamp::s_nMicroSecondsPerSecond);
return TimeStamp(timestamp.microSecondsSinceEpoch() + delta);
}
#endif
|
# === LICENSE STATEMENT ===
# Copyright (c) 2011 <NAME> <<EMAIL>>
#
# Copying and distribution of this file, with or without modification, are
# permitted in any medium without royalty provided the copyright notice and
# this notice are preserved.
# === END LICENSE STATEMENT ===
import array
class DymoLabeler:
"""Create and work with a Dymo LabelManager PnP object.
This class contains both mid-level and high-level functions. In general,
the high-level functions should be used. However, special purpose usage
may require the mid-level functions. That is why they are provided.
However, they should be well understood before use. Look at the
high-level functions for help. Each function is marked in its docstring
with 'HLF' or 'MLF' in parentheses.
A partial reference of the protocol is the Technical Reference for the
LabelWriter 450:
<https://download.dymo.com/dymo/technical-data-sheets/LW%20450%20Series%20Technical%20Reference.pdf>
"""
_ESC = 0x1B
_SYN = 0x16
_MAX_BYTES_PER_LINE = 8 # 64 pixels on a 12mm tape
def __init__(self, dev):
"""Initialize the LabelManager object. (HLF)"""
self.cmd = []
self.response = False
self.bytesPerLine_ = None
self.dotTab_ = 0
self.dev = open(dev, "rb+")
self.maxLines = 200
def sendCommand(self):
"""Send the already built command to the LabelManager. (MLF)"""
if len(self.cmd) == 0:
return
cmdBin = array.array("B", self.cmd)
cmdBin.tofile(self.dev)
self.cmd = []
if not self.response:
return
self.response = False
responseBin = self.dev.read(8)
response = array.array("B", responseBin).tolist()
return response
def resetCommand(self):
"""Remove a partially built command. (MLF)"""
self.cmd = []
self.response = False
def buildCommand(self, cmd):
"""Add the next instruction to the command. (MLF)"""
self.cmd += cmd
def statusRequest(self):
"""Set instruction to get the device's status. (MLF)"""
cmd = [self._ESC, ord("A")]
self.buildCommand(cmd)
self.response = True
def dotTab(self, value):
"""Set the bias text height, in bytes. (MLF)"""
if value < 0 or value > self._MAX_BYTES_PER_LINE:
raise ValueError
cmd = [self._ESC, ord("B"), value]
self.buildCommand(cmd)
self.dotTab_ = value
self.bytesPerLine_ = None
def tapeColor(self, value):
"""Set the tape color. (MLF)"""
if value < 0:
raise ValueError
cmd = [self._ESC, ord("C"), value]
self.buildCommand(cmd)
def bytesPerLine(self, value):
"""Set the number of bytes sent in the following lines. (MLF)"""
if value < 0 or value + self.dotTab_ > self._MAX_BYTES_PER_LINE:
raise ValueError
if value == self.bytesPerLine_:
return
cmd = [self._ESC, ord("D"), value]
self.buildCommand(cmd)
self.bytesPerLine_ = value
def cut(self):
"""Set instruction to trigger cutting of the tape. (MLF)"""
cmd = [self._ESC, ord("E")]
self.buildCommand(cmd)
def line(self, value):
"""Set next printed line. (MLF)"""
self.bytesPerLine(len(value))
cmd = [self._SYN] + value
self.buildCommand(cmd)
def chainMark(self):
"""Set Chain Mark. (MLF)"""
self.dotTab(0)
self.bytesPerLine(self._MAX_BYTES_PER_LINE)
self.line([0x99] * self._MAX_BYTES_PER_LINE)
def skipLines(self, value):
"""Set number of lines of white to print. (MLF)"""
if value <= 0:
raise ValueError
self.bytesPerLine(0)
cmd = [self._SYN] * value
self.buildCommand(cmd)
def initLabel(self):
"""Set the label initialization sequence. (MLF)"""
cmd = [0x00] * 8
self.buildCommand(cmd)
def getStatus(self):
"""Ask for and return the device's status. (HLF)"""
self.statusRequest()
response = self.sendCommand()
print(response)
def printLabel(self, lines, margin=56 * 2):
"""Print the label described by lines. (Automatically split label if
larger than maxLines)"""
while len(lines) > self.maxLines + 1:
self.rawPrintLabel(lines[0 : self.maxLines], margin=0)
del lines[0 : self.maxLines]
self.rawPrintLabel(lines, margin=margin)
def rawPrintLabel(self, lines, margin=56 * 2):
"""Print the label described by lines. (HLF)"""
# optimize the matrix for the dymo label printer
dottab = 0
while [] not in lines and max(line[0] for line in lines) == 0:
lines = [line[1:] for line in lines]
dottab += 1
for line in lines:
while len(line) > 0 and line[-1] == 0:
del line[-1]
self.initLabel
self.tapeColor(0)
self.dotTab(dottab)
for line in lines:
self.line(line)
if margin > 0:
self.skipLines(margin)
self.statusRequest()
response = self.sendCommand()
print(response)
|
from torchvision import transforms as tf
from torchvision.transforms import functional as F
import torch
import PIL
from PIL import Image
import matplotlib
import matplotlib.pyplot as plt
from torchvision.transforms import functional as tvF
from torch.nn import functional as F
from scipy.spatial.transform import Rotation as R
def plot(disp, n='test'):
fig, (ax1) = plt.subplots(figsize=(6, 6), ncols=1)
pos = ax1.imshow(disp[:,:], cmap='Blues')
fig.colorbar(pos, ax=ax1)
plt.savefig(f'{n}.png')
__all__ = ['Augmentation']
import random
import numpy as np
class Augmentation():
def __init__(self,
output_size=(300,300),
add_depth = False,
degrees = 0,
flip_p = 0,
color_jitter = [0.2, 0.2, 0.2, 0.05],
jitter_real = True,
jitter_render = True,
normalize = True,
return_non_normalized = True,
return_ready = True, input_size= (300,300) ):
self.add_depth = add_depth
self.up_in = torch.nn.UpsamplingBilinear2d(size=output_size)
self.up_nn_in= torch.nn.UpsamplingNearest2d(size=output_size)
self.affine_flip = torch.tensor( [[[-1,0,1],[0,1,1]]],
dtype=torch.float32 )
self.flip_p = flip_p
self.degrees = degrees
H,W = input_size
grid_x = np.linspace(0,H-1,H)
grid_x = np.repeat( grid_x[:,None],W, axis=1)
grid_y = np.linspace(0,W-1,W)
grid_y = np.repeat( grid_y[None,:],H, axis=0)
self.grid_xy = np.stack( [grid_y, grid_x],axis=2)
self.return_non_normalized = return_non_normalized
self.jitter_real = jitter_real
self.jitter_render = jitter_render
self.normalize = normalize
self.return_ready = return_ready
if jitter_real or jitter_render:
self._jitter = tf.ColorJitter(*color_jitter)
if normalize:
self._norm = tf.Normalize(
[0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
def apply(self, idx, u_map, v_map, flow_mask, gt_label_cropped, real_img, render_img, real_d, render_d):
"""[summary]
Parameters
----------
idx : torch.tensor id
u_map : HxW
v_map : HxW
flow_mask : bool HxW
gt_label_cropped : int64 HxW
real_img : HWC 0-255
render_img : HWC 0-255
"""
render_img = render_img/255.0
real_img = real_img/255.0
gt_sel = (idx.repeat(1,*gt_label_cropped.shape[0:])+1)[0]
obj_mask = gt_label_cropped == gt_sel
flow_mask = flow_mask * obj_mask
inp = ( u_map, v_map, flow_mask, gt_label_cropped, real_img, real_d)
if random.random() < self.flip_p:
inp = self.affine_grid(self.affine_flip, *inp)
# affine = self.get_affine()
# inp = self.affine_grid( affine, *inp)
render_img = render_img.permute(2,0,1)
if self.jitter_real:
real_img = self._jitter( inp[4].permute(2,0,1) )
if self.jitter_render:
render_img = self._jitter( render_img ) # C,H,W
if self.normalize:
if self.return_non_normalized:
non_norm_real_img = real_img.clone()
non_norm_render_img = render_img.clone()
real_img = self._norm( real_img )
render_img = self._norm( render_img ) # C,H,W
if self.return_ready:
data = torch.cat([real_img, render_img], dim=0)
data = self.up_in(data[None])[0]
if self.add_depth:
d = torch.stack([inp[-1], render_d], dim=0)
d = torch.clamp(d, 0,20000)
d = self.up_nn_in(d[None])[0] / 10000
data = torch.cat([data,d],dim=0)
# self.up_nn_in( [], dim=0 )
uv = torch.stack([u_map, v_map], dim=0) # C,H,W
flow_mask = inp[2]
flow_mask = flow_mask[None,:,:].repeat(2,1,1)
gt_label_cropped = inp[3]
return data, uv, flow_mask, gt_label_cropped, non_norm_real_img, non_norm_render_img
def get_affine(self):
angle = (random.random()-0.5) * self.degrees * 2
rin = R.from_euler('z', angle, degrees=True).as_matrix()
affine = torch.ones( (1,2,3) )
affine[:,:2,:2] = torch.tensor( rin )[:2,:2]
return affine
def affine_grid(self, affine, u_map, v_map, flow_mask, gt_label_cropped, real_img, real_d):
N = 1
C = 2
H = 480
W = 640
grid = F.affine_grid(affine, (N,C,H,W),align_corners=True)
grid -= 1
real_img_rotate = F.grid_sample((real_img[None,:,:,:]).permute(0,3,1,2).type(torch.float32), grid, mode='bilinear', padding_mode='zeros', align_corners=True)
mask_rotate = F.grid_sample((flow_mask[None,:,:,None].type(torch.float32)).permute(0,3,1,2), grid, mode='nearest', padding_mode='zeros', align_corners=True)
gt_label_cropped_rotated = F.grid_sample((gt_label_cropped[None,:,:,None].type(torch.float32)).permute(0,3,1,2), grid, mode='nearest', padding_mode='zeros', align_corners=True)
real_d_rotated = F.grid_sample((real_d[None,:,:,None].type(torch.float32)).permute(0,3,1,2), grid, mode='nearest', padding_mode='zeros', align_corners=True)[0,0,:,:]
gt_label_cropped_rotated = gt_label_cropped_rotated[0,0,:,:]
real_img_rotate = real_img_rotate.permute(2,3,1,0)[:,:,:,0]
grid[:,:,:,0] = (grid[:,:,:,0]+1)/2*(W-1)
grid[:,:,:,1] = (grid[:,:,:,1]+1)/2*(H-1)
disp = self.grid_xy - grid.numpy()[0]
# uu = u_rotate[0,0] + disp[:,:,1]
# vv = v_rotate[0,0] + disp[:,:,0]
u_map = u_map + disp[:,:,1]
v_map = v_map + disp[:,:,0]
u_rotate = F.grid_sample((u_map[None,:,:,None]).permute(0,3,1,2).type(torch.float32), grid, mode='bilinear', padding_mode='zeros', align_corners=True)
v_rotate = F.grid_sample((v_map[None,:,:,None]).permute(0,3,1,2).type(torch.float32), grid, mode='bilinear', padding_mode='zeros', align_corners=True)
uu = u_rotate[0,0]
vv = v_rotate[0,0]
flow_mask_out = (mask_rotate == 1)[0,0]
return ( uu, vv, flow_mask_out, gt_label_cropped_rotated, real_img_rotate, real_d_rotated) |
<filename>util/binfmt_misc/s390x_check_s390x.go
// +build s390x
package binfmt_misc
func s390xSupported() error {
return nil
}
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolInvertedPendulumSwingup-v1_doule_ddpg_softcopy_epsilon_greedy_seed3_run8_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolInvertedPendulumSwingup-v1 --random-seed 3 --exploration-strategy epsilon_greedy --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolInvertedPendulumSwingup-v1/doule_ddpg_softcopy_epsilon_greedy_seed3_run8 --continuous-act-space-flag
|
#!/bin/sh
echo "#ls; ls || ls && ls # ls" | bin/rshell
# echo "((ps #) "
# echo "(echo a) # ls"
# echo ((echo a #|| echo b)"
# echo "#test -e bin/rshell [ bin/rshell]"
|
<gh_stars>0
import { IAuthorRepository } from 'src/core';
import { Repository } from 'typeorm';
export class MysqlAuthorRepository<T> implements IAuthorRepository<T> {
private _repository: Repository<T>;
constructor(repository: Repository<T>) {
this._repository = repository;
}
findAll(): Promise<T[]> {
return this._repository.find();
}
create(author): Promise<T> {
return this._repository.save(author);
}
findOneByName(name: string): Promise<T> {
return this._repository.findOne({ where: { name: name } });
}
async checkIfExists(name: string): Promise<boolean> {
if ((await this.findOneByName(name)) === undefined) {
return true;
} else {
return false;
}
}
}
|
export const CREATE_ORDER_REQUEST = "CREATE_ORDER_REQUEST";
export const CREATE_ORDER_SUCCESS = "CREATE_ORDER_SUCCESS";
export const CREATE_ORDER_FAIL = "CREATE_ORDER_FAIL";
export const MY_ORDERS_REQUEST = "MY_ORDERS_REQUEST";
export const MY_ORDERS_SUCCESS = "MY_ORDERS_SUCCESS";
export const MY_ORDERS_FAIL = "MY_ORDERS_FAIL";
export const ALL_ORDERS_REQUEST = "ALL_ORDERS_REQUEST";
export const ALL_ORDERS_SUCCESS = "ALL_ORDERS_SUCCESS";
export const ALL_ORDERS_FAIL = "ALL_ORDERS_FAIL";
export const UPDATE_ORDER_REQUEST = "UPDATE_ORDER_REQUEST";
export const UPDATE_ORDER_SUCCESS = "UPDATE_ORDER_SUCCESS";
export const UPDATE_ORDER_RESET = "UPDATE_ORDER_RESET";
export const UPDATE_ORDER_FAIL = "UPDATE_ORDER_FAIL";
export const DELETE_ORDER_REQUEST = "DELETE_ORDER_REQUEST";
export const DELETE_ORDER_SUCCESS = "DELETE_ORDER_SUCCESS";
export const DELETE_ORDER_RESET = "DELETE_ORDER_RESET";
export const DELETE_ORDER_FAIL = "DELETE_ORDER_FAIL";
export const ORDER_DETAILS_REQUEST = "ORDER_DETAILS_REQUEST";
export const ORDER_DETAILS_SUCCESS = "ORDER_DETAILS_SUCCESS";
export const ORDER_DETAILS_FAIL = "ORDER_DETAILS_FAIL";
export const CLEAR_ERRORS = "CLEAR_ERRORS";
|
#!/bin/bash
dialog --clear --inputbox "Please enter a string you would like to Convert : " 10 60 2> theString.tmp
clear
theString=$( cat theString.tmp)
$(rm -f theString.tmp)
dialog --clear --inputbox "Choose an option :\n
1) md5 sum\n
2) sha256\n
3) sha512" 10 60 2> choice.tmp
clear
encryptionChoice=$( cat choice.tmp)
$(rm -f choice.tmp)
if [ $encryptionChoice == "1" ]
then
sh -c "echo '$theString' | md5"
sh -c "echo '$theString' | md5 > Output.txt"
elif [ $encryptionChoice == 2 ]
then
sh -c "echo '$theString' | sha256"
sh -c "echo '$theString' | sha256> Output.txt"
elif [ $encryptionChoice == 3 ]
then
sh -c "echo '$theString' | sha512"
sh -c "echo '$theString' | sha512> Output.txt"
else
echo Please enter a valid choice
fi
exit
|
#!/bin/sh
#Config xray
rm -rf /etc/xray/config.json
cat << EOF > /etc/xray/config.json
{
"inbounds": [
{
"port": $PORT,
"protocol": "vless",
"settings": {
"decryption": "none",
"clients": [
{
"id": "$UUID"
}
]
},
"streamSettings": {
"network": "ws"
}
}
],
"outbounds": [
{
"protocol": "freedom"
}
]
}
EOF
#run xray
exec xray -c /etc/xray/config.json
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
variant_for_slice()
{
case "$1" in
"OpenSSL.xcframework/ios-arm64_x86_64-maccatalyst")
echo "maccatalyst"
;;
"OpenSSL.xcframework/macos-arm64_x86_64")
echo ""
;;
"OpenSSL.xcframework/ios-arm64_armv7")
echo ""
;;
"OpenSSL.xcframework/ios-arm64_i386_x86_64-simulator")
echo "simulator"
;;
esac
}
archs_for_slice()
{
case "$1" in
"OpenSSL.xcframework/ios-arm64_x86_64-maccatalyst")
echo "arm64 x86_64"
;;
"OpenSSL.xcframework/macos-arm64_x86_64")
echo "arm64 x86_64"
;;
"OpenSSL.xcframework/ios-arm64_armv7")
echo "arm64 armv7"
;;
"OpenSSL.xcframework/ios-arm64_i386_x86_64-simulator")
echo "arm64 i386 x86_64"
;;
esac
}
copy_dir()
{
local source="$1"
local destination="$2"
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" \"${source}*\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" "${source}"/* "${destination}"
}
SELECT_SLICE_RETVAL=""
select_slice() {
local xcframework_name="$1"
xcframework_name="${xcframework_name##*/}"
local paths=("${@:2}")
# Locate the correct slice of the .xcframework for the current architectures
local target_path=""
# Split archs on space so we can find a slice that has all the needed archs
local target_archs=$(echo $ARCHS | tr " " "\n")
local target_variant=""
if [[ "$PLATFORM_NAME" == *"simulator" ]]; then
target_variant="simulator"
fi
if [[ ! -z ${EFFECTIVE_PLATFORM_NAME+x} && "$EFFECTIVE_PLATFORM_NAME" == *"maccatalyst" ]]; then
target_variant="maccatalyst"
fi
for i in ${!paths[@]}; do
local matched_all_archs="1"
local slice_archs="$(archs_for_slice "${xcframework_name}/${paths[$i]}")"
local slice_variant="$(variant_for_slice "${xcframework_name}/${paths[$i]}")"
for target_arch in $target_archs; do
if ! [[ "${slice_variant}" == "$target_variant" ]]; then
matched_all_archs="0"
break
fi
if ! echo "${slice_archs}" | tr " " "\n" | grep -F -q -x "$target_arch"; then
matched_all_archs="0"
break
fi
done
if [[ "$matched_all_archs" == "1" ]]; then
# Found a matching slice
echo "Selected xcframework slice ${paths[$i]}"
SELECT_SLICE_RETVAL=${paths[$i]}
break
fi
done
}
install_xcframework() {
local basepath="$1"
local name="$2"
local package_type="$3"
local paths=("${@:4}")
# Locate the correct slice of the .xcframework for the current architectures
select_slice "${basepath}" "${paths[@]}"
local target_path="$SELECT_SLICE_RETVAL"
if [[ -z "$target_path" ]]; then
echo "warning: [CP] $(basename ${basepath}): Unable to find matching slice in '${paths[@]}' for the current build architectures ($ARCHS) and platform (${EFFECTIVE_PLATFORM_NAME-${PLATFORM_NAME}})."
return
fi
local source="$basepath/$target_path"
local destination="${PODS_XCFRAMEWORKS_BUILD_DIR}/${name}"
if [ ! -d "$destination" ]; then
mkdir -p "$destination"
fi
copy_dir "$source/" "$destination"
echo "Copied $source to $destination"
}
install_xcframework "${PODS_ROOT}/OpenSSL-Universal/Frameworks/OpenSSL.xcframework" "OpenSSL-Universal" "framework" "ios-arm64_x86_64-maccatalyst" "ios-arm64_armv7" "ios-arm64_i386_x86_64-simulator"
|
<filename>magic-tumb-wechat-intf/src/main/java/com/iamdigger/magictumblr/wcintf/utils/SerializeUtil.java
package com.iamdigger.magictumblr.wcintf.utils;
import com.iamdigger.magictumblr.wcintf.bean.TextMsg;
import com.thoughtworks.xstream.XStream;
/**
* @author Sam
* @since 3.0.0
*/
public class SerializeUtil {
private final static Object IN_TEXT_LOCK = new Object();
private static XStream textXStream = null;
private static void initTextStream() {
if (null == textXStream) {
synchronized (IN_TEXT_LOCK) {
if (null == textXStream) {
textXStream = new XStream();
textXStream.alias("xml", TextMsg.class);
textXStream.aliasField("ToUserName", TextMsg.class, "toUserName");
textXStream.aliasField("FromUserName", TextMsg.class, "fromUserName");
textXStream.aliasField("CreateTime", TextMsg.class, "createTime");
textXStream.aliasField("MsgType", TextMsg.class, "msgType");
textXStream.aliasField("MsgId", TextMsg.class, "msgId");
textXStream.aliasField("Content", TextMsg.class, "content");
textXStream.aliasField("Event", TextMsg.class, "event");
textXStream.aliasField("EventKey", TextMsg.class, "eventKey");
}
}
}
}
public static TextMsg textFromXml(String xml) {
initTextStream();
return (TextMsg) textXStream.fromXML(xml);
}
public static String textToXml(TextMsg text) {
initTextStream();
return textXStream.toXML(text);
}
}
|
// Graphics library implementation
class GraphicsLibrary {
private Element vPortrait;
public GraphicsLibrary() {
// Initialize the portrait element
vPortrait = new Element();
}
public void setPortraitPos(int x, int y) {
vPortrait.setPos(x, y);
}
// Other graphics library functions can be implemented here
}
// Element class representing a graphical element
class Element {
private int x;
private int y;
public void setPos(int x, int y) {
this.x = x;
this.y = y;
}
public int getX() {
return x;
}
public int getY() {
return y;
}
}
// Main program to demonstrate the usage of the graphics library
public class Main {
public static void main(String[] args) {
GraphicsLibrary graphics = new GraphicsLibrary();
// Set the position of the portrait element
graphics.setPortraitPos(100, 150);
// Get the position of the portrait element and display it
Element portrait = graphics.getPortrait();
System.out.println("Portrait position: (" + portrait.getX() + ", " + portrait.getY() + ")");
}
} |
<filename>mq-client-open/src/main/java/com/sohu/tv/mq/rocketmq/RocketMQConsumer.java
package com.sohu.tv.mq.rocketmq;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.net.HttpURLConnection;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import org.apache.rocketmq.client.consumer.DefaultMQPushConsumer;
import org.apache.rocketmq.client.consumer.listener.ConsumeConcurrentlyContext;
import org.apache.rocketmq.client.consumer.listener.ConsumeConcurrentlyStatus;
import org.apache.rocketmq.client.consumer.listener.ConsumeOrderlyContext;
import org.apache.rocketmq.client.consumer.listener.ConsumeOrderlyStatus;
import org.apache.rocketmq.client.consumer.listener.MessageListenerConcurrently;
import org.apache.rocketmq.client.consumer.listener.MessageListenerOrderly;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.consumer.DefaultMQPushConsumerImpl;
import org.apache.rocketmq.client.impl.consumer.PullMessageService;
import org.apache.rocketmq.client.impl.consumer.PullRequest;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.client.trace.AsyncTraceDispatcher;
import org.apache.rocketmq.client.trace.hook.ConsumeMessageTraceHookImpl;
import org.apache.rocketmq.common.ServiceState;
import org.apache.rocketmq.common.ServiceThread;
import org.apache.rocketmq.common.consumer.ConsumeFromWhere;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.common.protocol.RequestCode;
import org.apache.rocketmq.common.protocol.heartbeat.MessageModel;
import org.apache.rocketmq.common.utils.HttpTinyClient;
import org.apache.rocketmq.common.utils.HttpTinyClient.HttpResult;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import com.sohu.index.tv.mq.common.BatchConsumerCallback;
import com.sohu.index.tv.mq.common.ConsumerCallback;
import com.sohu.tv.mq.common.AbstractConfig;
import com.sohu.tv.mq.dto.ConsumerConfigDTO;
import com.sohu.tv.mq.dto.DTOResult;
import com.sohu.tv.mq.metric.ConsumeStatManager;
import com.sohu.tv.mq.netty.SohuClientRemotingProcessor;
import com.sohu.tv.mq.rocketmq.limiter.LeakyBucketRateLimiter;
import com.sohu.tv.mq.rocketmq.limiter.RateLimiter;
import com.sohu.tv.mq.rocketmq.limiter.SwitchableRateLimiter;
import com.sohu.tv.mq.rocketmq.limiter.TokenBucketRateLimiter;
import com.sohu.tv.mq.util.Constant;
/**
* rocketmq 消费者
*
* @Description: push封装
* @author copy from indexmq
* @date 2018年1月17日
*/
@SuppressWarnings("deprecation")
public class RocketMQConsumer extends AbstractConfig {
// 支持一批消息消费
private BatchConsumerCallback<?> batchConsumerCallback;
/**
* 消费者
*/
private DefaultMQPushConsumer consumer;
@SuppressWarnings("rawtypes")
private ConsumerCallback consumerCallback;
/**
* 是否重试
*/
private boolean reconsume = true;
/**
* 是否debug
*/
private boolean debug;
// "tag1 || tag2 || tag3"
private String subExpression = "*";
// 是否顺序消费
private boolean consumeOrderly = false;
// 跳过重试消息时间,默认为-1,即不跳过
private volatile long retryMessageResetTo = -1;
// 消息限速器
private RateLimiter rateLimiter;
private ScheduledExecutorService clientConfigScheduledExecutorService;
private Class<?> consumerParameterTypeClass;
// 关闭等待最大时间
private long shutdownWaitMaxMillis = 30000;
// 是否开启统计
private boolean enableStats = true;
/**
* 一个应用创建一个Consumer,由应用来维护此对象,可以设置为全局对象或者单例<br>
* ConsumerGroupName需要由应用来保证唯一
*/
public RocketMQConsumer(String consumerGroup, String topic) {
this(consumerGroup, topic, false);
}
/**
* 一个应用创建一个Consumer,由应用来维护此对象,可以设置为全局对象或者单例<br>
* ConsumerGroupName需要由应用来保证唯一
*/
public RocketMQConsumer(String consumerGroup, String topic, boolean useLeakyBucketRateLimiter) {
super(consumerGroup, topic);
consumer = new DefaultMQPushConsumer(consumerGroup);
// 消费消息超时将会发回重试队列,超时时间由默认的15分钟修改为2小时
consumer.setConsumeTimeout(2 * 60);
// 初始化限速器
if (useLeakyBucketRateLimiter) {
initLeakyBucketRateLimiter();
} else {
initTokenBucketRateLimiter();
}
// 注册线程统计
ConsumeStatManager.getInstance().register(getGroup());
}
public void start() {
try {
// 初始化配置
initConfig(consumer);
if (getClusterInfoDTO().isBroadcast()) {
consumer.setMessageModel(MessageModel.BROADCASTING);
}
consumer.subscribe(topic, subExpression);
// 构建消费者对象
final MessageConsumer messageConsumer = new MessageConsumer(this);
// 注册顺序或并发消费
if (consumeOrderly) {
consumer.registerMessageListener(new MessageListenerOrderly() {
public ConsumeOrderlyStatus consumeMessage(List<MessageExt> msgs, ConsumeOrderlyContext context) {
return messageConsumer.consumeMessage(msgs, context);
}
});
} else {
consumer.registerMessageListener(new MessageListenerConcurrently() {
public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> msgs,
ConsumeConcurrentlyContext context) {
return messageConsumer.consumeMessage(msgs, context);
}
});
}
// 初始化消费者参数类型
initConsumerParameterTypeClass();
// 初始化定时调度任务
initScheduleTask();
// 消费者启动
consumer.start();
// init after start
initAfterStart();
logger.info("topic:{} group:{} start", topic, group);
} catch (MQClientException e) {
logger.error(e.getMessage(), e);
}
}
/**
* 从mqcloud更新动态配置
*/
private void initScheduleTask() {
// 数据采样线程
clientConfigScheduledExecutorService = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "updateConsumerConfigThread-" + getGroup());
}
});
clientConfigScheduledExecutorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
HttpResult result = HttpTinyClient.httpGet(
"http://" + getMqCloudDomain() + "/consumer/config/" + getGroup(), null, null, "UTF-8", 5000);
if (HttpURLConnection.HTTP_OK != result.code) {
logger.error("http response err: code:{},info:{}", result.code, result.content);
return;
}
DTOResult<ConsumerConfigDTO> dtoResult = JSON.parseObject(result.content, new TypeReference<DTOResult<ConsumerConfigDTO>>(){});
ConsumerConfigDTO consumerConfigDTO = dtoResult.getResult();
if(consumerConfigDTO == null) {
return;
}
// 1.更新重试跳过时间戳
if (consumerConfigDTO.getRetryMessageResetTo() != null &&
retryMessageResetTo != consumerConfigDTO.getRetryMessageResetTo()) {
setRetryMessageResetTo(consumerConfigDTO.getRetryMessageResetTo());
}
// 2.更新消费是否暂停
boolean needCheckPause = false;
if (consumerConfigDTO.getPause() != null) {
String pauseClientId = consumerConfigDTO.getPauseClientId();
// 停止所有实例
if (pauseClientId == null || pauseClientId.length() == 0) {
needCheckPause = true;
} else if (consumerConfigDTO.getPauseClientId()
.equals(consumer.getDefaultMQPushConsumerImpl().getmQClientFactory().getClientId())) { // 只停止当前实例
needCheckPause = true;
}
}
if (needCheckPause && consumer.getDefaultMQPushConsumerImpl().isPause() != consumerConfigDTO.getPause()) {
setPause(consumerConfigDTO.getPause());
}
// 3.更新限速
if (consumerConfigDTO.getEnableRateLimit() != null &&
isEnableRateLimit() != consumerConfigDTO.getEnableRateLimit()) {
setEnableRateLimit(consumerConfigDTO.getEnableRateLimit());
}
if (consumerConfigDTO.getPermitsPerSecond() != null) {
int rate = consumerConfigDTO.getPermitsPerSecond().intValue();
if (getRate() != rate) {
setRate(rate);
}
}
} catch (Throwable ignored) {
logger.warn("skipRetryMessage err:{}", ignored);
}
}
}, 5, 60, TimeUnit.SECONDS);
}
public void shutdown() {
DefaultMQPushConsumerImpl innerConsumer = consumer.getDefaultMQPushConsumerImpl();
if (ServiceState.RUNNING != innerConsumer.getServiceState()) {
logger.info("conusmer:{} state is {}, no need shutdown", getGroup(), innerConsumer.getServiceState());
return;
}
// 1.首先关闭rebalance线程,不再接受新的队列变更等。
ServiceThread thread = getField(MQClientInstance.class, "rebalanceService", innerConsumer.getmQClientFactory());
thread.shutdown();
// 2.接着标记拉取线程停止,不直接关闭是为了把拉下来的消息消费完毕。
PullMessageService pull = innerConsumer.getmQClientFactory().getPullMessageService();
pull.makeStop();
// 3.根据拉取任务数是否与处理队列数相等,来判断消息是否已经消费完毕;超过30秒则不再等待
long start = System.currentTimeMillis();
LinkedBlockingQueue<PullRequest> q = getField(PullMessageService.class, "pullRequestQueue", pull);
int pullRequestSize = getPullRequestSize(q);
while (pullRequestSize != innerConsumer.getRebalanceImpl().getProcessQueueTable().size()) {
long use = System.currentTimeMillis() - start;
if (use > getShutdownWaitMaxMillis()) {
logger.warn("{} shutdown too long, use:{}ms, break!!, pullRequestQueueSize:{} processQueueTableSize:{}",
getGroup(), use, pullRequestSize, innerConsumer.getRebalanceImpl().getProcessQueueTable().size());
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
logger.warn("ignore interrupted!!");
}
pullRequestSize = getPullRequestSize(q);
}
// 4.如下为正常关闭流程
consumer.shutdown();
rateLimiter.shutdown();
clientConfigScheduledExecutorService.shutdown();
}
/**
* 启动后,初始化某些逻辑
*/
public void initAfterStart() {
// 注册私有处理器
MQClientInstance mqClientInstance = consumer.getDefaultMQPushConsumerImpl().getmQClientFactory();
mqClientInstance.getMQClientAPIImpl().getRemotingClient()
.registerProcessor(RequestCode.GET_CONSUMER_RUNNING_INFO,
new SohuClientRemotingProcessor(mqClientInstance), null);
}
private int getPullRequestSize(LinkedBlockingQueue<PullRequest> q) {
if (q == null) {
return 0;
}
int size = 0;
for (PullRequest pullRequest : q) {
if (getGroup().equals(pullRequest.getConsumerGroup())) {
++size;
}
}
return size;
}
/**
* 获取类的字段实例
* @param clz
* @param field
* @param obj
* @return
*/
@SuppressWarnings("unchecked")
private <T> T getField(Class<?> clz, String field, Object obj){
try {
Field f = clz.getDeclaredField(field);
f.setAccessible(true);
return(T) f.get(obj);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* Batch consumption size
* 不建议设置该值,采用默认即可
* @param consumeMessageBatchMaxSize
*/
@Deprecated
public void setConsumeMessageBatchMaxSize(int consumeMessageBatchMaxSize) {
if (consumeMessageBatchMaxSize <= 0) {
return;
}
consumer.setConsumeMessageBatchMaxSize(consumeMessageBatchMaxSize);
}
public void setConsumeFromWhere(ConsumeFromWhere consumeFromWhere) {
consumer.setConsumeFromWhere(consumeFromWhere);
}
public void setReconsume(boolean reconsume) {
this.reconsume = reconsume;
}
@SuppressWarnings("rawtypes")
public void setConsumerCallback(ConsumerCallback consumerCallback) {
this.consumerCallback = consumerCallback;
}
public void setConsumeTimestamp(String consumeTimestamp) {
consumer.setConsumeTimestamp(consumeTimestamp);
}
public DefaultMQPushConsumer getConsumer() {
return consumer;
}
/**
* 消费线程数,默认20
*
* @param num
*/
public void setConsumeThreadMin(int num) {
if (num <= 0) {
return;
}
consumer.setConsumeThreadMin(num);
}
/**
* 消费线程数,默认20
* 该参数无用
* @param num
*/
@Deprecated
public void setConsumeThreadMax(int num) {
if (num <= 0) {
return;
}
consumer.setConsumeThreadMax(num);
}
/**
* 一次拉取多少个消息 ,默认32
*
* @param size
*/
public void setPullBatchSize(int size) {
if (size < 0) {
return;
}
consumer.setPullBatchSize(size);
}
/**
* queue中缓存多少个消息时进行流控 ,默认1000
*
* @param size
*/
public void setPullThresholdForQueue(int size) {
if (size < 0) {
return;
}
consumer.setPullThresholdForQueue(size);
}
/**
* queue中缓存多少M消息时进行流控 ,默认100
*
* @param size
*/
public void setPullThresholdSizeForQueue(int size) {
if (size < 0) {
return;
}
consumer.setPullThresholdSizeForQueue(size);
}
/**
* topic维度缓存多少个消息时进行流控 ,默认-1,不限制
*
* @param size
*/
public void setPullThresholdForTopic(int size) {
if (size < 0) {
return;
}
consumer.setPullThresholdForTopic(size);
}
/**
* topic维度缓存多少M消息时进行流控 ,默认-1,不限制
*
* @param size
*/
public void setPullThresholdSizeForTopic(int size) {
if (size < 0) {
return;
}
consumer.setPullThresholdSizeForTopic(size);
}
/**
* 拉取消息的时间间隔,毫秒,默认为0
*
* @param pullInterval
*/
public void setPullInterval(int pullInterval) {
consumer.setPullInterval(pullInterval);
}
@SuppressWarnings("rawtypes")
public ConsumerCallback getConsumerCallback() {
return consumerCallback;
}
@SuppressWarnings("unchecked")
public <T> BatchConsumerCallback<T> getBatchConsumerCallback() {
return (BatchConsumerCallback<T>) batchConsumerCallback;
}
@SuppressWarnings({"rawtypes"})
public void setBatchConsumerCallback(BatchConsumerCallback batchConsumerCallback) {
this.batchConsumerCallback = batchConsumerCallback;
}
/**
* 1.8.3之后不用设置broadcast了,可以自动区分
*
* @param broadcast
*/
@Deprecated
public void setBroadcast(boolean broadcast) {
}
public String getSubExpression() {
return subExpression;
}
public void setSubExpression(String subExpression) {
this.subExpression = subExpression;
}
public void setDebug(boolean debug) {
this.debug = debug;
}
public boolean isDebug() {
return debug;
}
public boolean isReconsume() {
return reconsume;
}
public void setConsumeOrderly(boolean consumeOrderly) {
this.consumeOrderly = consumeOrderly;
}
@Override
protected int role() {
return CONSUMER;
}
@Override
protected void registerTraceDispatcher(AsyncTraceDispatcher traceDispatcher) {
consumer.getDefaultMQPushConsumerImpl().registerConsumeMessageHook(
new ConsumeMessageTraceHookImpl(traceDispatcher));
}
/**
* traceEnabled is controlled by MQCloud
*
* @param traceEnabled
*/
@Deprecated
public void setTraceEnabled(boolean traceEnabled) {
}
/**
* Maximum amount of time in minutes a message may block the consuming
* thread.
*/
public void setConsumeTimeout(long consumeTimeout) {
if (consumeTimeout <= 0) {
return;
}
consumer.setConsumeTimeout(consumeTimeout);
}
/**
* 是否开启vip通道
*
* @param vipChannelEnabled
*/
public void setVipChannelEnabled(boolean vipChannelEnabled) {
consumer.setVipChannelEnabled(vipChannelEnabled);
}
public long getRetryMessageResetTo() {
return retryMessageResetTo;
}
public void setRetryMessageResetTo(long retryMessageResetTo) {
logger.info("topic:{}'s consumer:{} retryMessageReset {}->{}", getTopic(), getGroup(), this.retryMessageResetTo, retryMessageResetTo);
this.retryMessageResetTo = retryMessageResetTo;
}
/**
* 最大重新消费次数
* 默认为16次
* @param maxReconsumeTimes
*/
public void setMaxReconsumeTimes(int maxReconsumeTimes) {
consumer.setMaxReconsumeTimes(maxReconsumeTimes);
}
public RateLimiter getRateLimiter() {
return rateLimiter;
}
public void setRateLimiter(RateLimiter rateLimiter) {
this.rateLimiter = rateLimiter;
}
/**
* 设置速率
* @param permitsPerSecond
*/
public void setRate(int permitsPerSecond) {
if (permitsPerSecond < 1) {
logger.warn("topic:{}'s consumer:{} qps:{} must >= 1", getTopic(), getGroup(), permitsPerSecond);
return;
}
rateLimiter.setRate(permitsPerSecond);
}
public int getRate() {
return rateLimiter.getRate();
}
public void setPause(boolean pause) {
logger.info("topic:{}'s consumer:{} pause changed: {}->{}", getTopic(), getGroup(), isPause(), pause);
consumer.getDefaultMQPushConsumerImpl().setPause(pause);
}
public boolean isPause() {
return consumer.getDefaultMQPushConsumerImpl().isPause();
}
public void setEnableRateLimit(boolean enableRateLimit) {
if (rateLimiter instanceof SwitchableRateLimiter) {
((SwitchableRateLimiter) rateLimiter).setEnabled(enableRateLimit);
}
}
public boolean isEnableRateLimit() {
if (rateLimiter instanceof SwitchableRateLimiter) {
return ((SwitchableRateLimiter) rateLimiter).isEnabled();
}
return false;
}
public long getShutdownWaitMaxMillis() {
return shutdownWaitMaxMillis;
}
public void setShutdownWaitMaxMillis(long shutdownWaitMaxMillis) {
this.shutdownWaitMaxMillis = shutdownWaitMaxMillis;
}
/**
* 初始化漏桶限速器
*/
public void initLeakyBucketRateLimiter() {
initRateLimiter(new LeakyBucketRateLimiter(group, 2 * consumer.getConsumeThreadMin(),
Constant.LIMIT_CONSUME_TPS, TimeUnit.SECONDS));
}
/**
* 初始化令牌桶限速器
*/
public void initTokenBucketRateLimiter() {
initRateLimiter(new TokenBucketRateLimiter(Constant.LIMIT_CONSUME_TPS));
}
public boolean isEnableStats() {
return enableStats;
}
public void setEnableStats(boolean enableStats) {
this.enableStats = enableStats;
}
/**
* 初始化限速器
* @param rateLimiter
*/
public void initRateLimiter(RateLimiter rateLimiter) {
SwitchableRateLimiter switchableRateLimiter = new SwitchableRateLimiter();
switchableRateLimiter.setName(group);
switchableRateLimiter.setRateLimiter(rateLimiter);
this.rateLimiter = switchableRateLimiter;
}
protected Class<?> getConsumerParameterTypeClass() {
return consumerParameterTypeClass;
}
public void initConsumerParameterTypeClass() {
consumerParameterTypeClass = detectConsumerParameterTypeClass();
}
/**
* 获取消费者参数类型
* @return
*/
private Class<?> detectConsumerParameterTypeClass() {
try {
if(getConsumerCallback() != null) {
return _getConsumerParameterTypeClass();
}
return _getBatchConsumerParameterTypeClass();
} catch (Throwable e) {
logger.warn("ignore, detect consumer parameter type failed:{}", e.toString());
}
return null;
}
/**
* 获取消费者参数类型
* @return
*/
private Class<?> _getConsumerParameterTypeClass() {
Method[] methods = getConsumerCallback().getClass().getMethods();
for (Method method : methods) {
if (!"call".equals(method.getName())) {
continue;
}
if (!Modifier.isPublic(method.getModifiers())) {
continue;
}
if (!method.getReturnType().equals(Void.TYPE)) {
continue;
}
Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length != 2) {
continue;
}
if (MessageExt.class != parameterTypes[1]) {
continue;
}
logger.info("consumer:{}'s parameterTypeClass:{}", getGroup(), parameterTypes[0].getName());
return parameterTypes[0];
}
return null;
}
/**
* 获取消费者参数类型
* @return
*/
private Class<?> _getBatchConsumerParameterTypeClass() {
Method[] methods = getBatchConsumerCallback().getClass().getMethods();
for (Method method : methods) {
if (!"call".equals(method.getName())) {
continue;
}
if (!Modifier.isPublic(method.getModifiers())) {
continue;
}
if (!method.getReturnType().equals(Void.TYPE)) {
continue;
}
Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length != 1) {
continue;
}
if (List.class != parameterTypes[0]) {
continue;
}
Type[] interfaceTypes = getBatchConsumerCallback().getClass().getGenericInterfaces();
Type type;
if (interfaceTypes.length == 0) {
type = getBatchConsumerCallback().getClass().getGenericSuperclass();
} else {
type = interfaceTypes[0];
}
Class<?> clz = null;
if (ParameterizedType.class.isAssignableFrom(type.getClass())) {
clz = (Class<?>)(((ParameterizedType) type).getActualTypeArguments())[0];
}
logger.info("consumer:{}'s parameterTypeClass:{}", getGroup(), clz);
return clz;
}
return null;
}
}
|
#include "LoginWebRequest.h"
#include "Common.h"
#include "Log.h"
#include "json/json.h"
#include "GameClient.h"
CLoginWebRequest::CLoginWebRequest(const char *pUrl)
: m_url(pUrl)
{
}
CLoginWebRequest::~CLoginWebRequest()
{
}
CLoginWebRequest * CLoginWebRequest::Create(const char *pUrl)
{
return new CLoginWebRequest(pUrl);
}
void CLoginWebRequest::Release(CLoginWebRequest* pSession )
{
delete pSession;
}
const char * CLoginWebRequest::GetUrl() const
{
return m_url.c_str();
}
unsigned int CLoginWebRequest::OnData(const void *pData, unsigned int bytes)
{
if(NULL == pData){
OutputDebug("NULL == pData");
return bytes;
}
std::string strJson((char*)pData);
Json::Value jsonValue;
Json::Reader jsonReader;
jsonReader.parse(strJson, jsonValue, false);
if(!jsonValue.isObject()) {
OutputDebug("OnData is FAIL [%s] ", strJson.c_str());
return bytes;
}
Json::Value& jsonCode = jsonValue["code"];
if(jsonCode.isInt() && jsonCode.asInt() == TRUE) {
CGameClient::PTR_T pGameClient(CGameClient::Pointer());
Json::Value& jsonAccountId = jsonValue["accountId"];
uint64_t u64Account = jsonAccountId.asUInt64();
Json::Value& jsonSessionKey = jsonValue["sessionKey"];
std::string strSessionKey(jsonSessionKey.asString());
Json::Value& jsonLoginIp = jsonValue["loginIp"];
std::string strLoginIp(jsonLoginIp.asString());
pGameClient->LoginWebSuccess(strLoginIp.c_str(), u64Account, strSessionKey.c_str());
} else {
CGameClient::PTR_T pGameClient(CGameClient::Pointer());
pGameClient->LoginWebFail();
}
return bytes;
}
unsigned int CLoginWebRequest::OnHead(const char *pHead, unsigned int bytes)
{
return bytes;
}
int CLoginWebRequest::OnProgress(double dltotal, double dlnow)
{
return 0;
}
void CLoginWebRequest::OnDone(unsigned int dwStatus, const char *)
{
EasyClearup();
Release(this);
}
|
"""
Calculate the distance between two points in two-dimensional space
"""
import math
def calculate_distance(x1, y1, x2, y2):
x_distance = x2 - x1
y_distance = y2 - y1
distance = math.sqrt(x_distance ** 2 + y_distance ** 2)
return distance
if __name__ == "__main__":
p1_x, p1_y = 1, 1
p2_x, p2_y = 3, 3
print(calculate_distance(p1_x, p1_y, p2_x, p2_y)) |
<reponame>paulocesarmelo/ap2-ufg
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package revisao;
/**
*
* @author PauloCésar
*/
public class Empresa {
private String cnpj;
private Data abertura;
private Cliente[] clientes;
private Empregado[] empregados;
public String getCnpj() {
return cnpj;
}
public void setCnpj(String cnpj) {
this.cnpj = cnpj;
}
public Data getAbertura() {
return abertura;
}
public void setAbertura(Data abertura) {
this.abertura = abertura;
}
public Cliente[] getClientes() {
return clientes;
}
public void setClientes(Cliente[] clientes) {
this.clientes = clientes;
}
public Empregado[] getEmpregados() {
return empregados;
}
public void setEmpregados(Empregado[] empregados) {
this.empregados = empregados;
}
}
|
var uploader = function () {
var u = {};
u.init = function () {
var config = {
url: '/image', //上传的服务器地址
data: {},
zoom: false, //允许放大
allowType: ["gif", "jpeg", "jpg", "bmp",'png'], //允许上传图片的类型
maxSize :2, //允许上传图片的最大尺寸,单位M
before: function () {
// alert('上传前回调函数');
},
success:function(data){
// alert('上传成功回调函数');
console.log(data);
},
error:function (e) {
// alert('上传失败回调函数');
console.log(e);
}
};
var imageInput = $('.upload-input');
var imageSection = imageInput.parent('.upload-section');
var imageBox = imageInput.parent().parent(".image-box");
var inputName = imageInput.attr('name');
// 设置是否在上传中全局变量
var isUploading = false;
// 触发上传操作
$('.upload-section').click(function () {
$(this).children('.upload-input').change(function () {
var imageSection = $("<section class='image-section image-loading'></section>");
var imageShade = $("<div class='image-shade'></div>");
var imageShow = $("<img class='image-show image-opcity' />");
var imageInput = $("<input class='" + inputName + "' name='" + inputName + "[]' value='' type='hidden'>");
var imageZoom = $("<div class='image-zoom'></div>");
var imageDelete = $("<div class='image-delete'></div>");
// 隐藏上传框
$(this).children('.upload-input').hide();
$(this).parent('.image-box').prepend(imageSection);
imageShade.appendTo(imageSection);
imageDelete.appendTo(imageSection);
// 判断是否开启缩放功能
if (config.zoom && config.zoom === true) {
imageZoom.appendTo(imageSection);
}
imageShow.appendTo(imageSection);
imageInput.appendTo(imageSection);
handleFileSelect();
});
});
// 上传操作
var handleFileSelect = function () {
if (typeof FileReader == "undefined") {
return false;
}
var postUrl = config.url;
var maxSize = config.maxSize;
if (!postUrl) {
// todo 弹出样式优化
alert('请设置要上传的服务端地址');
return false;
}
var files = imageInput[0].files;
var fileObj = files[0];
// 只能上传图片文件
if (!fileObj || !fileObj.type.match('image.*')) {
return false;
}
var fileSize = (fileObj.size) / (1024 * 1024);
if (fileSize > maxSize) {
alert('上传图片不能超过' + maxSize + 'M,当前上传图片的大小为' + fileSize.toFixed(2) + 'M');
return false;
}
if (isUploading == true) {
alert('文件正在上传中,请稍候再试!');
return false;
}
// 将上传状态设为正在上传中
isUploading = true;
// 执行前置函数
var callback = config.before;
if (callback && callback() === false) {
return false;
}
ajaxUpload();
};
var ajaxUpload = function () {
// 获取最新的
var imageSection = $('.image-section:first');
var imageShow = $('.image-show:first');
var formData = new FormData();
var fileData = imageInput[0].files;
if (fileData) {
// 目前仅支持单图上传
formData.append(inputName, fileData[0]);
}
var postData = config.data;
if (postData) {
for (var i in postData) {
formData.append(i, postData[i]);
}
}
// ajax提交表单对象
$.ajax({
url: config.url,
type: "post",
data: formData,
processData: false,
contentType: false,
dataType: 'json',
success: function (data) {
if (data.code == 0) {
data = data.data;
}
if (!data.src) {
alert('服务器返回的json数据中必须包含src元素');
imageBox.children('.image-section').show();
imageSection.remove();
return false;
}
imageSection.removeClass("image-loading");
imageShow.removeClass("image-opcity");
imageShow.attr('src', data.src);
imageShow.siblings('input').val(data.src);
// 将上传状态设为非上传中
isUploading = false;
// 执行成功回调函数
var callback = config.success;
callback(data);
},
error: function (e) {
imageSection.remove();
imageBox.children('.image-section').show();
// 执行失败回调函数
var callback = config.error;
callback(e);
}
});
};
var createDeleteModal = function () {
var deleteModal = $("<aside class='delete-modal'><div class='modal-content'><p class='modal-tip'>您确定要删除作品图片吗?</p><p class='modal-btn'> <span class='confirm-btn'>确定</span><span class='cancel-btn'>取消</span></p></div></aside>");
// 创建删除模态框
deleteModal.appendTo('.image-box');
// 显示弹框
imageBox.delegate(".image-delete", "click", function () {
// 声明全局变量
deleteImageSection = $(this).parent();
deleteModal.show();
});
// 确认删除
$(".confirm-btn").click(function () {
deleteImageSection.remove();
imageBox.children('.upload-section').show();
deleteModal.hide();
});
// 取消删除
$(".cancel-btn").click(function () {
deleteModal.hide();
});
};
var createImageZoom = function () {
var zoomWindow = $("<div id='zoom-window'></div>");
var zoomShade = $("<div id='zoom-shade'></div>");
var zoomBox = $("<div id='zoom-box'></div>");
var zoomContent = $("<img src='http://www.jq22.com/demo/jqueryfancybox201707292345/example/4_b.jpg'>");
zoomWindow.append(zoomShade);
zoomWindow.append(zoomBox);
zoomContent.appendTo(zoomBox);
$("body").append(zoomWindow);
// 显示弹框
imageBox.delegate(".image-zoom", "click", function () {
var src = $(this).siblings('img').attr('src');
zoomBox.find('img').attr('src', src);
zoomWindow.show();
});
// 关闭弹窗
$("body").delegate("#zoom-shade", "click", function () {
zoomWindow.hide();
});
};
// 判断是否开启缩放功能
if (config.zoom && config.zoom === true) {
createImageZoom();
}
createDeleteModal();
};
return u;
}();
uploader.init();
|
#!/bin/bash
################################################################################
# This script is used to update metadata blocks from release and custom files.
################################################################################
# Fail on any error
set -euo pipefail
DATAVERSE_SERVICE_HOST=${DATAVERSE_SERVICE_HOST:-"dataverse"}
DATAVERSE_SERVICE_PORT=${DATAVERSE_SERVICE_PORT:-"8080"}
DATAVERSE_URL=${DATAVERSE_URL:-"http://${DATAVERSE_SERVICE_HOST}:${DATAVERSE_SERVICE_PORT}"}
# Check API key secret is available
if [ ! -s "${SECRETS_DIR}/api/key" ]; then
echo "No API key present. Failing."
exit 126
fi
API_KEY=`cat ${SECRETS_DIR}/api/key`
# Find all TSV files
TSVS=`find "${METADATA_DIR}" "${HOME_DIR}" -maxdepth 5 -iname '*.tsv'`
# Check for builtin blocks to be present
BUILTIN=("astrophysics.tsv" "biomedical.tsv" "citation.tsv" "geospatial.tsv" "journals.tsv" "social_science.tsv")
miss=1
fail=1
for mdb in "${BUILTIN[@]}"; do
grep "${mdb}" <<< "${TSVS}" > /dev/null 2>&1 || miss=0
if [ $miss -eq 0 ]; then
echo "ERROR: could not find builtin (release) metadata block file ${mdb} within ${METADATA_DIR} or ${HOME_DIR}"
fail=0
miss=1
fi
done
# Abort if any builtin metadata file has not been find- or readable
if [ $fail -eq 0 ]; then
echo "Aborting."
exit 125
fi
# Load metadata blocks
echo "${TSVS}" | xargs -n1 -I "%mdb%" sh -c "echo -n \"Loading %mdb%: \"; curl -sS -f -H \"Content-type: text/tab-separated-values\" -X POST --data-binary \"@%mdb%\" \"${DATAVERSE_URL}/api/admin/datasetfield/load?unblock-key=${API_KEY}\" 2>&1 | jq -M '.status'"
|
import config from './config'
import DibiRouter from '../src'
import {start} from './server'
describe('DiBiRouter: Start', function() {
describe('Start server', function() {
it('should init test_01 routes and server them', async function() {
const routes= await DibiRouter(config.db, /*tables*/ '*', /*prefix*/ config.server.url, /*schema*/ 'public')
start(routes)
})
})
})
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import yaml
import pyncml
import netCDF4
import os
from StringIO import StringIO
# <codecell>
cd /usgs/data2/notebook/ROMS
# <codecell>
#map ROMS variables to CF standard_names
cf = {'zeta':'sea_surface_height_above_datum',
'temp':'sea_water_potential_temperature',
'salt':'sea_water_salinity',
'u':'x_sea_water_velocity',
'v':'y_sea_water_velocity',
'ubar':'barotropic_x_sea_water_velocity',
'vbar':'barotropic_y_sea_water_velocity',
'Hwave':'sea_surface_wave_significant_height'}
# <codecell>
x="""
dataset:
id: "USGS_COAWST_MVCO_CBLAST_Ripples_SWAN_40m"
title: "USGS-CMG-COAWST Model: CBLAST2007 Ripples with SWAN-40m res"
summary: "Simulation of hydrodynamics and bottom stress south of Marthas Vineyard, MA using the COAWST modeling system. These results are from the 40m inner nest of a four-level nested simulation."
creator:
email: <EMAIL>
name: <NAME>
url: http://water.usgs.gov/fluxes
license: "The data may be used and redistributed for free but is not intended for legal use, since it may contain inaccuracies. Neither the data Contributor, nor the United States Government, nor any of their employees or contractors, makes any warranty, express or implied, including warranties of merchantability and fitness for a particular purpose, or assumes any legal liability for the accuracy, completeness, or usefulness, of this information."
references:
- http://www.whoi.edu/science/AOPE/dept/CBLASTmain.html
- http://water.usgs.gov/fluxes/mvco.html
- doi:10.1029/2011JC007035
acknowledgements:
- USGS-CMGP
- NSF
variables:
include:
- temp
- salt
exclude:
- ubar
- vbar
aggregation:
time_var: ocean_time
dir: /usgs/data0/mvco_ce/mvco_output/spatial_7_ar0fd
sample_file: his_case7_ar0fd_0001.nc
pattern: .*/his_case7_ar0fd_[0-9]{4}\.nc$
"""
#
# couldn't get this to work
#stream = open(StringIO(x))
# <codecell>
# so read file instead
stream = open("meta.yaml", 'r')
a = yaml.load(stream)
# <codecell>
a['dataset']
# <codecell>
def header():
str='<?xml version="1.0" encoding="UTF-8"?>\n<netcdf xmlns="http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2">\n'
str += str_att('Conventions','CF-1.6, SGRID-0.1, ACDD-1.3')
str += str_att('cdm_data_type','Grid')
return str
# <codecell>
def footer(str):
str += '</netcdf>\n'
return str
# <codecell>
def str_att(name,value):
if isinstance(value, list):
value = ','.join(value)
return ' <attribute name="{:s}" type="String" value="{:s}"/>\n'.format(name,value)
# <codecell>
def add_global_atts(str,a):
d = a['dataset']
for key, value in d.iteritems():
# handle simple attribute pairs first
if key in ['id','license','summary','title','project','naming_authority','references','acknowledgements']:
str += str_att(key,value)
elif key in ['creator','publisher']:
email = value.get("email", None)
if email:
str += str_att('_'.join([key,'email']),email)
url = value.get("url", None)
if url:
str += str_att('_'.join([key,'url']),url)
name = value.get("name", None)
if name:
str += str_att('_'.join([key,'name']),name)
elif key in ['contributor']:
role = value.get("role", None)
if email:
str += str_att('_'.join([key,'role']),role)
email = value.get("email", None)
if email:
str += str_att('_'.join([key,'email']),email)
url = value.get("url", None)
if url:
str += str_att('_'.join([key,'url']),url)
name = value.get("name", None)
if name:
str += str_att('_'.join([key,'name']),name)
return str
# <codecell>
def add_var_atts(str,a):
ncfile=os.path.join(a['aggregation']['dir'],a['aggregation']['sample_file'])
nc = netCDF4.Dataset(ncfile)
ncv = nc.variables
# get a list of all variables more than 1D
vars = [var for var, vart in ncv.items() if vart.ndim > 1]
vars_all = set(vars)
vars_include = set(a['variables']['include'])
vars_exclude = set(a['variables']['exclude'])
if a['variables']['exclude']:
vars = list(vars_all - vars_all.intersection(vars_exclude))
else:
if a['variables']['include']:
vars = list(vars_all.intersection(vars_include))
#rho_vars = [var for var, vart in ncv.items() if hasattr(vart,'coordinates') and 'lon_rho' in vart.coordinates and 'lat_rho' in vart.coordinates]
rho_vars = [var for var, vart in ncv.items() if 'eta_rho' in vart.dimensions and 'xi_rho' in vart.dimensions]
u_vars = [var for var, vart in ncv.items() if 'eta_u' in vart.dimensions and 'xi_u' in vart.dimensions]
v_vars = [var for var, vart in ncv.items() if 'eta_v' in vart.dimensions and 'xi_v' in vart.dimensions]
for var in vars:
str += '<variable name="{:s}">\n'.format(var)
try:
str += str_att('standard_name',cf[var])
except:
pass
str += str_att('grid','grid')
str += str_att('content_coverage_type','modelResult')
if var in rho_vars:
str += str_att('location','face')
elif var in u_vars:
str += str_att('location','edge1')
elif var in v_vars:
str += str_att('location','edge2')
str += '</variable>\n\n'
return str
# <codecell>
def write_grid_var(str):
grid_var="""<variable name="grid" type="int">
<attribute name="cf_role" value="grid_topology"/>
<attribute name="topology_dimension" type="int" value="2"/>
<attribute name="node_dimensions" value="xi_psi eta_psi"/>
<attribute name="face_dimensions"
value="xi_rho: xi_psi (padding: both) eta_rho: eta_psi (padding: both)"/>
<attribute name="edge1_dimensions" value="xi_u: xi_psi eta_u: eta_psi (padding: both)"/>
<attribute name="edge2_dimensions" value="xi_v: xi_psi (padding: both) eta_v: eta_psi"/>
<attribute name="node_coordinates" value="lon_psi lat_psi"/>
<attribute name="face_coordinates" value="lon_rho lat_rho"/>
<attribute name="edge1_coordinates" value="lon_u lat_u"/>
<attribute name="edge2_coordinates" value="lon_v lat_v"/>
<attribute name="vertical_dimensions" value="s_rho: s_w (padding: none)"/>
</variable>\n """
str += grid_var
return str
# <codecell>
def add_aggregation_scan(str,a):
agg = a['aggregation']
# <aggregation dimName="ocean_time" type="joinExisting">
# <scan location="." regExp=".*his_case7_ar0fd_[0-9]{4}\.nc$" subdirs="false"/>
# </aggregation>
str += '<aggregation dimName="{:s}" type="joinExisting">\n'.format(agg['time_var'])
str += '<scan location="." regExp="{:s}" subdirs="false"/>\n</aggregation>\n'.format(agg['pattern'])
return str
# <codecell>
str = header()
#str = add_global_atts(str,a)
#str = add_var_atts(str,a)
#str = write_grid_var(str)
str = add_aggregation_scan(str,a)
str = footer(str)
# <codecell>
print str
# <codecell>
with open('{:s}/test5.ncml'.format(a['aggregation']['dir']),'w') as text_file:
text_file.write("{:s}".format(str))
# <codecell>
|
<html>
<head>
<title>Stock Price Histogram</title>
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
</head>
<body>
<div id="graph"></div>
<script>
var stockData = [x_data, y_data];
var data = [{
x: stockData[0],
y: stockData[1],
type: 'histogram'
}]
Plotly.newPlot('graph', data);
</script>
</body>
</html> |
#! /bin/bash
# .. seealso::
# - https://github.com/junaruga/podman-experiment/blob/master/.travis.yml
# - https://travis-ci.community/t/podman-libpod-support/6823/6
# - https://podman.io/getting-started/installation.html
#
set -ex
sudo sh -c "echo 'deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/xUbuntu_18.04/ /' > /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list"
curl -L https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/xUbuntu_18.04/Release.key | sudo apt-key add -
sudo apt-get update -qq
sudo apt-get -qq -y install podman
# Show version and info.
podman --version
podman version
podman info --debug
podman pull docker.io/pycontribs/centos:8
apt-cache show podman
dpkg-query -L podman
# Hack podman's configuration files.
# /etc/containers/registries.conf does not exist.
# https://clouding.io/kb/en/how-to-install-and-use-podman-on-ubuntu-18-04/
ls -1 /etc/containers/registries.conf || true
sudo mkdir -p /etc/containers
echo -e "[registries.search]\nregistries = ['docker.io', 'quay.io']" | sudo tee /etc/containers/registries.conf
# vim:sw=2:ts=2:et:
|
<reponame>hylophile/frontend
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import React, { Component } from 'react';
import { bindActionCreators } from 'redux';
import { Button, Form } from 'react-bootstrap';
import {
DashboardLink as DashLink,
DashboardLayout as Layout,
UserRoles
} from 'components/Dashboard';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { actions as rolesActions } from 'reducers/roles';
import { actions as dashActions } from 'reducers/dashboard';
import { actions as usersActions } from 'reducers/users';
import { getAllUsers } from 'selectors/users';
export class RoleAddUser extends Component {
static propTypes = {
actions: PropTypes.object.isRequired,
name: PropTypes.string,
layoutOptions: PropTypes.object.isRequired,
roleId: PropTypes.number,
collection: PropTypes.array
};
constructor(props) {
super(props);
this.state = { userId: '0', submitting: false };
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
componentDidMount() {
const { actions } = this.props;
actions.requestUsers({ page: 1, limit: 100 });
}
handleChange(e) {
const { actions } = this.props;
const userId = Number(e.target.value);
this.setState({ userId });
// Refresh User Roles for <UserRoles /> component
actions.requestUserRoles({ userId });
}
handleSubmit(e) {
e.preventDefault();
this.setState({ submitting: true });
const { userId } = this.state;
const { actions, roleId } = this.props;
if (userId === '0') {
this.setState({ submitting: false });
} else {
actions.createRoleUser({ roleId, userId, active: true });
actions.selectDashboard({ name: 'Roles' });
}
}
render() {
const { collection, name, layoutOptions, roleId } = this.props;
const { userId, submitting } = this.state;
const isUserIdSelected = userId !== '0';
return (
<Layout
pageTitle="Add Role - Dashboard"
header={`Roles > Assign ${name} Role`}
options={layoutOptions}
>
<FontAwesomeIcon icon="chevron-left" />
<DashLink to="#roles" name="Roles">
Back
</DashLink>
<Form noValidate onSubmit={this.handleSubmit}>
<Form.Group>
<Form.Label>Select User to Assign to {name}s Role</Form.Label>
<Form.Control
as="select"
name="userId"
onChange={e => this.handleChange(e)}
value={userId}
>
<option value="0" key="x">
Choose a User
</option>
{collection.map((user, index) => {
return (
<option value={user.id} key={index}>
{user.UserProfile.name} ({user.emailAddress})
</option>
);
})}
</Form.Control>
</Form.Group>
{isUserIdSelected && (
<Button
className="button-animation"
variant="primary"
type="submit"
disabled={submitting}
>
<span>Save</span>
</Button>
)}
</Form>
<br />
{isUserIdSelected && (
<UserRoles
layoutOptions={{ header: false, title: true, border: 'info' }}
userId={userId}
roleId={roleId}
/>
)}
</Layout>
);
}
}
const mapStateToProps = state => ({
collection: getAllUsers(state)
});
const mapDispatchToProps = dispatch => ({
actions: bindActionCreators(
{
...rolesActions,
...dashActions,
...usersActions
},
dispatch
)
});
export default connect(mapStateToProps, mapDispatchToProps)(RoleAddUser);
|
#include "../BoardDefines.h"
//#include <Wire.h>
#include <pins_arduino.h>
//#include "../pinout.h";
// Keep track of last rotary value
int lastCount = 100;
// Updated by the ISR (Interrupt Service Routine)
volatile int virtualPosition = 100;
// ------------------------------------------------------------------
// INTERRUPT INTERRUPT INTERRUPT INTERRUPT INTERRUPT
// ------------------------------------------------------------------
volatile int counter = 0;
volatile static unsigned long last_interrupt_time = 0;
volatile byte aFlag = 0; // let's us know when we're expecting a rising edge on pinA to signal that the encoder has arrived at a detent
volatile byte bFlag = 0; // let's us know when we're expecting a rising edge on pinB to signal that the encoder has arrived at a detent (opposite direction to when aFlag is set)
volatile int encoderPos = 0; //this variable stores our current value of encoder position. Change to int or uin16_t instead of byte if you want to record a larger range than 0-255
volatile int oldEncPos = 0; //stores the last encoder position value so we can compare to the current reading and see if it has changed (so we know when to print to the serial monitor)
volatile byte reading = 0; //somewhere to store the direct values we read from our interrupt pins before checking to see if we have moved a whole detent
int getEncoderPos() {
if(oldEncPos != encoderPos) {
// DebugPort.print("ENC: ");
// DebugPort.println(encoderPos);
oldEncPos = encoderPos;
}
return encoderPos;
}
void isrEncoderClk(){
VENT_DEBUG_FUNC_START();
cli(); //stop interrupts happening before we read pin values
reading = PIND & 0xC; // read all eight pin values then strip away all but pinA and pinB's values
if(reading == B00001100 && aFlag) { //check that we have both pins at detent (HIGH) and that we are expecting detent on this pin's rising edge
encoderPos --; //decrement the encoder's position count
bFlag = 0; //reset flags for the next turn
aFlag = 0; //reset flags for the next turn
}
else if (reading == B00000100) bFlag = 1; //signal that we're expecting pinB to signal the transition to detent from free rotation
sei(); //restart interrupts
VENT_DEBUG_FUNC_END();
}
void isrEncoderDt(){
VENT_DEBUG_FUNC_START();
cli(); //stop interrupts happening before we read pin values
reading = PIND & 0xC; //read all eight pin values then strip away all but pinA and pinB's values
if (reading == B00001100 && bFlag) { //check that we have both pins at detent (HIGH) and that we are expecting detent on this pin's rising edge
encoderPos ++; //increment the encoder's position count
bFlag = 0; //reset flags for the next turn
aFlag = 0; //reset flags for the next turn
}
else if (reading == B00001000) aFlag = 1; //signal that we're expecting pinA to signal the transition to detent from free rotation
sei(); //restart interrupts
VENT_DEBUG_FUNC_END();
}
bool switch_position_changed = false;
void isr_processSwitch() {
VENT_DEBUG_FUNC_START();
switch_position_changed = true;
VENT_DEBUG_FUNC_END();
}
RT_Events_T encoderScanIsr() {
RT_Events_T retVal = RT_NONE;
VENT_DEBUG_FUNC_START();
counter = getEncoderPos();
if (lastCount != counter) {
if (lastCount < counter )
retVal = RT_INC;
else
retVal = RT_DEC;
}
lastCount = counter;
VENT_DEBUG_FUNC_END();
return retVal;
}
void isr_processStartEdit() {
static unsigned long lastSwitchTime = 0;
unsigned long switchTime = millis();
VENT_DEBUG_FUNC_START();
if ((switchTime - lastSwitchTime) < DBNC_INTVL_SW) {
VENT_DEBUG_FUNC_END();
return;
}
lastSwitchTime = switchTime;
VENT_DEBUG_FUNC_END();
}
unsigned long lastButtonPress = 0;
int currentStateCLK;
int lastStateCLK;
boolean no_input = true;
int btnState;
RT_Events_T encoderScanUnblocked()
{
RT_Events_T eRTState_EnCoder = RT_NONE;
VENT_DEBUG_FUNC_START();
// Read the current state of CLK
eRTState_EnCoder = encoderScanIsr();
if ((eRTState_EnCoder == RT_INC) || (eRTState_EnCoder == RT_DEC))
no_input = false;
// Read the button state
int btnState = digitalRead(DISP_ENC_SW);
//If we detect LOW signal, button is pressed
if (btnState == LOW)
{
//if 50ms have passed since last LOW pulse, it means that the
//button has been pressed, released and pressed again
if ((millis() - lastButtonPress > 50) && switch_position_changed) {
eRTState_EnCoder = RT_BT_PRESS;
switch_position_changed = false;
no_input = false;
}
// Remember last button press event
lastButtonPress = millis();
}
RT_Events_T returnState = eRTState_EnCoder;
if (eRTState_EnCoder != RT_NONE)
{
digitalWrite(BUZZER_PIN, HIGH);
delay(1);
digitalWrite(BUZZER_PIN, LOW);
// lcd.setCursor(5,0);
// lcd.print(returnState);
}
// lcd.setCursor(7,0);
// lcd.print(" ");
// lcd.print(returnState);
// VENT_DEBUG_FUNC_END();
return returnState;
}
RT_Events_T Encoder_Scan(void)
{
RT_Events_T eRTState = RT_NONE;
VENT_DEBUG_FUNC_START();
no_input = true;
while(no_input)
{
eRTState = encoderScanUnblocked();
}
VENT_DEBUG_FUNC_END();
return(eRTState);
}
|
from django.db import models
from accounts.models import Professional
class Room(models.Model):
id = models.AutoField(primary_key=True)
slug = models.SlugField(max_length=80, unique=True, blank=True)
name = models.CharField(max_length=60, unique=True, blank=True)
ACCESS_CHOICES = [('Public', 'Public'), ('Private', 'Private')]
access = models.CharField(max_length=9, choices=ACCESS_CHOICES, blank=True)
is_active = models.BooleanField(default=True)
black_list = models.ManyToManyField(Professional, related_name='rooms_forbidden', blank=True) |
chrome.extension.sendMessage({}, () => {
console.log(`connection to background page has been established`);
onDocumentReady()
.then(() => {
console.log(`document is ready`);
return downloadFile({url: "https://mint.intuit.com/transactionDownload.event?queryNew=&offset=0&filterType=cash&comparableType=8"})
})
.then(blob => blob.text())
.then(fileContent => {
console.log(`file downloaded, sending to the background`);
chrome.extension.sendMessage({
event: "file-downloaded",
payload: {
fileContent
}
})
})
});
|
<filename>webapp/NoteTaking/src/main/java/edu/neu/coe/csye6225/service/UserVerification.java
package edu.neu.coe.csye6225.service;
import edu.neu.coe.csye6225.entity.User;
import java.util.Base64;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
public class UserVerification {
public static User addVerification(String auth) {
if (StringUtils.isBlank(auth) || StringUtils.isEmpty(auth))
return null;
String basic = "Basic";
if (!auth.contains(basic))
return null;
List<String> list;
String codeString = auth.substring(basic.length()).trim();
try {
codeString = new String(Base64.getDecoder().decode(codeString));
list = java.util.Arrays.asList(codeString.replace(" ", "").split(":"));
} catch (Exception e) {
return null;
}
if (list.size() != 2) {
return null;
}
User user = new User();
user.setUsername(list.get(0));
user.setPassword(list.get(1));
return user;
}
}
|
import numpy as np
from sklearn.model_selection import train_test_split
# prepare the data
user_data = np.array(< array of user data >)
features = user_data[:,:-1]
labels = user_data[:,-1]
# split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(features, labels, test_size=0.2)
# create the model
from sklearn.linear_model import LinearRegression
model = LinearRegression()
model.fit(X_train, y_train)
# make predictions
predictions = model.predict(X_test)
# evaluate the model
from sklearn.metrics import accuracy_score
accuracy = accuracy_score(y_test, predictions)
print('Model accuracy: ', accuracy) |
import play.db.jpa.JPAApi;
import scala.concurrent.ExecutionContext;
public class ActorsRepository extends BaseRepository<Actor, Long> {
@Inject
public ActorsRepository(JPAApi jpaApi, DatabaseExecutionContext context) {
super(jpaApi, context, Actor.class, "firstName");
}
}
import play.db.jpa.JPAApi;
import scala.concurrent.ExecutionContext;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import java.util.List;
import java.util.concurrent.CompletionStage;
public abstract class BaseRepository<T, ID> {
private final JPAApi jpaApi;
private final DatabaseExecutionContext context;
private final Class<T> entityClass;
private final String defaultSortField;
public BaseRepository(JPAApi jpaApi, DatabaseExecutionContext context, Class<T> entityClass, String defaultSortField) {
this.jpaApi = jpaApi;
this.context = context;
this.entityClass = entityClass;
this.defaultSortField = defaultSortField;
}
public CompletionStage<T> findById(ID id) {
return jpaApi.withTransaction(entityManager -> {
T entity = entityManager.find(entityClass, id);
return entity;
});
}
public CompletionStage<List<T>> findAll() {
return jpaApi.withTransaction(entityManager -> {
String jpql = "SELECT e FROM " + entityClass.getSimpleName() + " e ORDER BY e." + defaultSortField;
TypedQuery<T> query = entityManager.createQuery(jpql, entityClass);
List<T> entities = query.getResultList();
return entities;
});
}
public CompletionStage<T> save(T entity) {
return jpaApi.withTransaction(entityManager -> {
entityManager.persist(entity);
return entity;
});
}
public CompletionStage<T> update(T entity) {
return jpaApi.withTransaction(entityManager -> {
T updatedEntity = entityManager.merge(entity);
return updatedEntity;
});
}
public CompletionStage<Void> delete(ID id) {
return jpaApi.withTransaction(entityManager -> {
T entity = entityManager.find(entityClass, id);
if (entity != null) {
entityManager.remove(entity);
}
});
}
} |
// Copyright 2009 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.enterprise.secmgr.authzcontroller;
import com.google.enterprise.secmgr.authncontroller.SessionSnapshot;
import com.google.enterprise.secmgr.common.Resource;
import com.google.enterprise.secmgr.modules.AuthzResult;
import java.io.IOException;
import java.util.Collection;
import javax.annotation.Nonnull;
import javax.annotation.ParametersAreNonnullByDefault;
/**
* The top-level authorization interface.
*/
@ParametersAreNonnullByDefault
public interface AuthorizationController {
@Nonnull
public AuthzResult authorize(Collection<Resource> resources, SessionSnapshot snapshot,
boolean enableFastAuthz)
throws IOException;
}
|
import org.mockito.Mockito;
public class Entity {
private Repository repository;
private Value originalValue;
private Value modifiedValue;
private boolean persisted;
public Entity(Repository repository) {
this.repository = repository;
}
public Value modify() {
originalValue = new Value(/* initialize with current state of entity */);
// Modify the entity
// ...
modifiedValue = new Value(/* initialize with modified state of entity */);
return originalValue;
}
public void persist() throws Exception {
try {
repository.insert(/* pass the modified entity to the repository for persistence */);
persisted = true;
} catch (Exception e) {
throw e; // Propagate the exception
}
}
public Value get() {
return modifiedValue; // Return the current state of the entity
}
public boolean isPersisted() {
return persisted; // Return true if the entity has been persisted
}
public boolean isDirty() {
return modifiedValue != null && !modifiedValue.equals(originalValue);
}
} |
# Ask the user to enter their age.
age = int(input("Please enter your age :"))
# Depending on the age group display a different ticket price.
if age > 0 and age < 18:
print("The ticket costs : 1.50£")
elif age >= 18 and age < 65:
print("The ticket costs : 2.20£")
elif age >= 65 and age < 100:
print("The ticket costs : 1.20£")
|
/*
* Copyright 2006-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.egovframe.brte.sample.example.support;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.aspectj.lang.JoinPoint;
/**
* 로그틀을 제공하는 클래스
*
* @author 배치실행개발팀
* @since 2012. 07.30
* @see <pre>
* == 개정이력(Modification Information) ==
* 수정일 수정자 수정내용
* ------ -------- ---------------------------
* 2012. 07.30 배치실행개발팀 최초 생성
* </pre>
*/
public class EgovLogAdvice {
private static Log log = LogFactory.getLog(EgovLogAdvice.class);
/**
* 기본 로그틀 제공
*/
public void doBasicLogging(JoinPoint pjp) throws Throwable {
Object[] args = pjp.getArgs();
StringBuffer output = new StringBuffer();
output.append(pjp.getTarget().getClass().getName()).append(": ");
output.append(pjp.toShortString()).append(": ");
for (Object arg : args) {
output.append(arg).append(" ");
}
log.info("Basic: " + output.toString());
}
/**
* processed: item 로그틀 제공
*/
public void doStronglyTypedLogging(Object item) {
log.info("Processed: " + item);
}
}
|
#!/bin/bash
# Auth:bell@greedlab.com
function usage() {
echo "Usage: getAppBuild.sh [-p <plist file path>]"
echo "Options:"
echo "-p plist file path"
echo "-h help"
echo "Example:"
echo "replace.sh -i 'input string' -o 'output string' -f 'm'"
exit 1
}
PLIST_FILE_PATH=""
while getopts p:h opt; do
case $opt in
p)
PLIST_FILE_PATH=$OPTARG;;
h)
usage;;
\?)
usage;;
esac
done
if [[ -f ${PLIST_FILE_PATH} ]]; then
app_build=`/usr/libexec/PlistBuddy -c "Print CFBundleVersion" ${PLIST_FILE_PATH}`
if [[ -n ${app_build} ]]; then
echo ${app_build}
else
exit 1
fi
else
exit 2
fi
|
<gh_stars>0
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.transports.http.transport.netty;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponse;
import java.util.UUID;
import java.util.concurrent.Callable;
import org.kaaproject.kaa.server.common.server.KaaCommandProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* AbstractCommand abstract Class.
* Implements some base setters and getters and define abstract processing flow.
* AbstractCommand implements callable interface and used from DefaultHandler to
* decode HTTP request, process command in Executor and encode HTTP response.
* Following flow is applied:
* parse() - decode HTTP request in Netty inbound pipeline flow
* process() - process command in executor thread
* getHttpResponse() - encode HTTP response in Netty outbound pipeline flow.
*
* @author <NAME>
*/
public abstract class AbstractCommand implements Callable<AbstractCommand>, KaaCommandProcessor<HttpRequest, HttpResponse> {
protected static final Logger LOG = LoggerFactory //NOSONAR
.getLogger(AbstractCommand.class);
protected static String COMMAND_NAME = "";
private HttpRequest httpRequest;
/** Session UUID */
private UUID sessionUuid;
/** Time of SYNC processing */
private long syncTime = 0;
/** integer representing ID of HTTP request */
private int commandId;
/**
* @return the commandId
*/
@Override
public int getCommandId() {
return commandId;
}
/**
* @param commandId the commandId to set
*/
@Override
public void setCommandId(int commandId) {
this.commandId = commandId;
}
/**
* HttpRequest getter.
* @return HttpRequest
*/
@Override
public HttpRequest getRequest() {
return httpRequest;
}
/**
* HttpRequest setter.
* @param httpRequest - HTTP request
*/
@Override
public void setRequest(HttpRequest httpRequest) {
this.httpRequest = httpRequest;
}
/**
* Static method getCommandName.
* Used to represent command part of URI.
* @return - String CommandName
*/
public static String getCommandName() {
return COMMAND_NAME;
}
/**
* Command Name getter.
* @return String
*/
@Override
public String getName() {
return COMMAND_NAME;
}
/**
* Default CommandProcessor Class constructor.
*/
public AbstractCommand() {
}
/**
* parse() - used to decoder HttpRequest, find necessary CommandProcessor
* and create CommandProcessor instance using CommanFactory.getProcessor()
* @throws Exception - if HttpRequest parse failed or CommandProcessor not found.
*/
public abstract void parse() throws Exception; //NOSONAR
/**
* process() - is run in executor thread and process requests.
* @throws Exception - if some error occurred during processing.
*/
public abstract void process() throws Exception; //NOSONAR
/**
* getHttpResponse() - encode processing result into HTTP Response.
* @return HttpResponse.
*/
@Override
public abstract HttpResponse getResponse();
@Override
public void setResponse(HttpResponse response) {
// Nothing to do
}
/**
* isNeedConnectionClose() - used to indicate is it necessary to close Channel after
* HTTP response returned to client.
* @return boolean - true to Close connection.
*/
public abstract boolean isNeedConnectionClose();
@Override
public AbstractCommand call() throws Exception {
LOG.trace("{} : Process start", getCommandName());
process();
LOG.trace("{}: Process finish", getCommandName());
return this;
}
/**
* @return the sessionUuid
*/
public UUID getSessionUuid() {
return sessionUuid;
}
/**
* @param sessionUuid the sessionUuid to set
*/
public void setSessionUuid(UUID sessionUuid) {
this.sessionUuid = sessionUuid;
}
/**
* @return the syncTime
*/
@Override
public long getSyncTime() {
return syncTime;
}
/**
* @param syncTime the syncTime to set
*/
@Override
public void setSyncTime(long syncTime) {
this.syncTime = syncTime;
}
public abstract int getNextProtocol();
}
|
#!/bin/sh
#
# pvのインストール:
# brew install pv
#
set -eu
TARGET_HOST="${1}"
CONTAINER_NAME="co2mon"
pv --version > /dev/null || (echo "pvが見当たりません" >&2; exit 1)
./docker_build.sh &&
(docker image save co2mon |
pv |
ssh "${TARGET_HOST}" docker image load)
|
#!/usr/bin/env bash
#TEST: Configure 4 repositories and sign a single zone using 2 of these different repositories.
if [ -n "$HAVE_MYSQL" ]; then
ods_setup_conf conf.xml conf-mysql.xml
fi &&
ods_reset_env &&
ods_softhsm_init_token 1 "OpenDNSSEC2" "1111" "1111" &&
ods_softhsm_init_token 2 "OpenDNSSEC3" "2222" "2222" &&
ods_softhsm_init_token 3 "OpenDNSSEC4" "3333" "3333" &&
ods_start_ods-control &&
syslog_waitfor 60 'ods-signerd: .*\[STATS\] ods' &&
test -f "$INSTALL_ROOT/var/opendnssec/signed/ods" &&
ods_stop_ods-control &&
return 0
ods_kill
return 1
|
<reponame>JustinZuidgeest/WeatherstationJava<filename>StorageServer/src/QueryParser/QueryWorker.java
package QueryParser;
import SocketConnection.ConnectionWorker;
import SocketConnection.Main;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* The QueryWorker class is responsible for parsing the requested data into a String format to be sent back to the PHP
* server for processing
*/
public class QueryWorker implements Runnable{
private ConnectionWorker connectionWorker;
private String[] countries;
private int count;
private String filepath;
public QueryWorker(ConnectionWorker connectionWorker, String[] countries, int count, String filepath){
this.connectionWorker = connectionWorker;
this.countries = countries;
this.count = count;
this.filepath = filepath;
}
@Override
public void run() {
long startTime = System.currentTimeMillis();
//Check if the filepath is null (and therefore an update query was requested)
if (filepath == null){
parseUpdate();
}else{
getAvarages();
}
long estimatedTime = System.currentTimeMillis() - startTime;
System.out.println("Query Thread Done. Estimated execution time: " + estimatedTime + "ms\n");
}
/**
* Uses the IOWorker instance to retrieve data from the latest update file and creates WeatherMeasurement objects from the data
*/
private void parseUpdate(){
//Yield thread if IOWorker happens to be updating its most recent data
while(!Main.ioWorker.getQueryable()){
Thread.yield();
}
ArrayList<String> csvLines = Main.ioWorker.getUpdateList();
System.out.println("Fetched new data. Size: " + csvLines.size());
HashMap<String, HashMap> stationList = Main.ioWorker.getStationList();
ArrayList<ArrayList> queryMeasurements = new ArrayList<>();
for (String country : countries) {
ArrayList<WeatherMeasurement> countryMeasurements = new ArrayList<>();
for (String line : csvLines) {
//Split every line of the csv file at the , symbol
String[] splitLine = line.split(",");
HashMap<String, String> stationData = stationList.get(splitLine[0]);
//Find the country associated with the weatherstation
String stationCountry = stationData.get("CNT");
//If the weatherstation is inside a country that was requested in the query, make a new WeatherMeasurement object
if (stationCountry.equals(country)) {
String stationName = stationData.get("LOC");
float windchill = calculateWindchill(Float.parseFloat(splitLine[1]), Float.parseFloat(splitLine[6]));
WeatherMeasurement tempMeasurement = new WeatherMeasurement(
splitLine[0], stationName, stationCountry, stationData.get("LAT"), stationData.get("LONG"),
splitLine[1],windchill, splitLine[2], splitLine[3], splitLine[4], splitLine[5], splitLine[6],
splitLine[7], splitLine[8], splitLine[9], splitLine[10]);
countryMeasurements.add(tempMeasurement);
}
}
queryMeasurements.add(countryMeasurements);
}
//Pass the ArrayList filled with WeatherMeasurement objects for every country to the String parse function
parseToString(queryMeasurements);
}
/**
* Calculates the avarages per station from a retrieved file
*/
private void getAvarages(){
//Read all the lines of the file into an ArrayList
ArrayList<String> allLines = Main.ioWorker.readFile(filepath);
//Notify the PHP script of no data could be extracted for the specified file
if (allLines == null){
connectionWorker.setReturnQuery("No Data");
return;
}
HashMap<String, ArrayList<Float[]>> stationReadings = new HashMap<>();
//Loop through all the lines in the file
for (String line : allLines){
String[] splitLine = line.split(",");
//If the stationID key already exists, add the measurements to an ArrayList inside a hashmap
if (stationReadings.containsKey(splitLine[0])){
Float temp = Float.parseFloat(splitLine[1]);
Float wind = Float.parseFloat(splitLine[2]);
Float pressure = Float.parseFloat(splitLine[3]);
Float[] tempFloat = {temp, wind, pressure};
stationReadings.get(splitLine[0]).add(tempFloat);
//If the stationID doesn't exist yet, put a new ArrayList in the hashmap using the stationID as key
}else{
Float temp = Float.parseFloat(splitLine[1]);
Float wind = Float.parseFloat(splitLine[2]);
Float pressure = Float.parseFloat(splitLine[3]);
Float[] tempFloat = {temp, wind, pressure};
ArrayList<Float[]> tempArray = new ArrayList<>();
tempArray.add(tempFloat);
stationReadings.put(splitLine[0], tempArray);
}
}
//The ArrayList that will be sent to be parsed for the PHP request
ArrayList<String> historyArray = new ArrayList<>();
//Loop through the hashmap containing station measurements, calculating avarages per station and appending those
//avarages to the StringBuilder
for(Map.Entry<String, ArrayList<Float[]>> entry : stationReadings.entrySet()){
StringBuilder builder = new StringBuilder();
int count = 0;
float totalTemp = 0;
float totalPressure = 0;
float totalWind = 0;
for (Float[] stationReading : entry.getValue()){
totalTemp += stationReading[0];
totalWind += stationReading[1];
totalPressure += stationReading[2];
count++;
}
float avarageTemp = totalTemp / count;
float avaragePressure = totalPressure / count;
float avarageWind = totalWind / count;
builder.append(entry.getKey());
builder.append(",");
String tempSTR = String.format("\"%.2f\"", avarageTemp).replace(",", ".");
builder.append(tempSTR.substring(1, tempSTR.length() -1));
builder.append(",");
String windSTR = String.format("\"%.2f\"", avarageWind).replace(",", ".");
builder.append(windSTR.substring(1, windSTR.length() -1));
builder.append(",");
String pressureSTR = String.format("\"%.2f\"", avaragePressure).replace(",", ".");
builder.append(pressureSTR.substring(1, pressureSTR.length() -1));
historyArray.add(builder.toString());
}
parseHistory(historyArray);
}
/**
* Uses the IOWorker instance to retrieve data from the requested day and creates WeatherMeasurement objects from the data
*/
private void parseHistory(ArrayList<String> csvLines){
HashMap<String, HashMap> stationList = Main.ioWorker.getStationList();
ArrayList<ArrayList> queryMeasurements = new ArrayList<>();
for (String country : countries) {
ArrayList<WeatherMeasurement> countryMeasurements = new ArrayList<>();
for (String line : csvLines) {
//Split every line of the csv file at the , symbol
String[] splitLine = line.split(",");
HashMap<String, String> stationData = stationList.get(splitLine[0]);
//Find the country associated with the weatherstation
String stationCountry = stationData.get("CNT");
//If the weatherstation is inside a country that was requested in the query, make a new WeatherMeasurement object
if (stationCountry.equals(country)) {
String stationName = stationData.get("LOC");
float windchill = calculateWindchill(Float.parseFloat(splitLine[1]), Float.parseFloat(splitLine[2]));
WeatherMeasurement tempMeasurement = new WeatherMeasurement(
splitLine[0], stationName, stationCountry, stationData.get("LAT"), stationData.get("LONG"),
splitLine[1], windchill, null, splitLine[3], null, null, splitLine[2],
null, null, null, null);
countryMeasurements.add(tempMeasurement);
}
}
queryMeasurements.add(countryMeasurements);
}
//Pass the ArrayList filled with WeatherMeasurement objects for every country to the String parse function
parseToString(queryMeasurements);
}
/**
* Calculates the windchill based on the JAG/TI method employed by the KNMI
*
* @param temperature The temperature in C
* @param windspeed The windspeed in km/h
*
* @return The windchill temperature in C
*/
private float calculateWindchill(float temperature, float windspeed){
return (float) (13.12 + (0.6215 * temperature) - (11.37 * (Math.pow(windspeed, 0.16)))
+ (0.3965 * temperature * (Math.pow(windspeed, 0.16))));
}
/**
* Takes the ArrayList filled with WeatherMeasurement objects and turns them into a String to send back to PHP
* according to the query recieved
*
* @param queryMeasurements The ArrayList filled with WeatherMeasurement objects for every country requested
*/
private void parseToString(ArrayList<ArrayList> queryMeasurements){
StringBuilder queryBuilder = new StringBuilder();
String parsedQuery;
//Create the header line that PHP will use to label the data
queryBuilder.append("LOCATION,COUNTRY,WINDCHILL,AIRPRESSURE,LAT,LONG;");
//Loop for every country present in the ArrayList
for (ArrayList<WeatherMeasurement> countryMeasurements : queryMeasurements){
//Convert the ArrayList of a country into an Array for sorting
WeatherMeasurement[] countryMeasurementsArray = new WeatherMeasurement[countryMeasurements.size()];
countryMeasurementsArray = countryMeasurements.toArray(countryMeasurementsArray);
//Sort the measurements of the country by their windchill using the WindchillSorter class
Arrays.sort(countryMeasurementsArray, new WindchillSorter());
StringBuilder tempBuilder = new StringBuilder();
//For every measurement, create a string of the location, country, windchill and airpressure of that measurement
//seperated by a comma and closed by a ; symbol. The amount of measurement Strings per country is dictated by the
//count variable passed during creation of this class
for (int i=0; i < countryMeasurementsArray.length; i++){
if (i >= count) break;
WeatherMeasurement tempMeasurement = countryMeasurementsArray[i];
tempBuilder.append(tempMeasurement.getLocation());
tempBuilder.append(",");
tempBuilder.append(tempMeasurement.getCountry());
tempBuilder.append(",");
String windchillSTR = tempMeasurement.getWindchillString();
tempBuilder.append(windchillSTR.substring(1, windchillSTR.length() -1));
tempBuilder.append(",");
tempBuilder.append(tempMeasurement.getAirStation());
tempBuilder.append(",");
tempBuilder.append(tempMeasurement.getLat());
tempBuilder.append(",");
tempBuilder.append(tempMeasurement.getLng());
tempBuilder.append(";");
}
queryBuilder.append(tempBuilder.toString());
}
//Cut the last ; symbol from the String and send it back to the ConnectionWorker thread that requested the
//parsed query
parsedQuery = queryBuilder.toString().substring(0, queryBuilder.length() - 1);
connectionWorker.setReturnQuery(parsedQuery);
}
}
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sdb.compiler;
import org.apache.jena.sdb.SDB ;
import org.apache.jena.sdb.core.SDBRequest ;
import org.apache.jena.sdb.core.sqlnode.SqlNode ;
import org.apache.jena.sdb.shared.SDBInternalError ;
import org.apache.jena.sparql.core.Quad ;
public class QuadBlockCompilerMain implements QuadBlockCompiler
{
SDBRequest request ;
SlotCompiler slotCompiler ;
static public PatternTable patternTable = null ;
public QuadBlockCompilerMain(SDBRequest request, SlotCompiler slotCompiler)
{
//super(request, slotCompiler) ;
this.request = request ;
this.slotCompiler = slotCompiler ;
}
@Override
public SlotCompiler getSlotCompiler()
{ return slotCompiler ; }
//@Override
@Override
final
public SqlNode compile(QuadBlock quads)
{
SqlNode sqlNode = slotCompiler.start(quads) ;
quads = new QuadBlock(quads) ; // Copy it because it's modified.
// ---- Stage builder
SqlStageList sList = new SqlStageList() ;
// Potential concurrent modification - need to use an explicit index.
for ( int i = 0 ; i < quads.size() ; )
{
Quad q = quads.get(i) ;
if ( patternTable != null && patternTable.trigger(q) )
{
// Removes current quad
SqlStage stage = patternTable.process(i, quads) ;
if ( stage != null )
{
if ( quads.get(i) == q )
throw new SDBInternalError("Pattern table returned a stage but did not remove the first quad") ;
sList.add(stage) ;
continue ;
}
}
sList.add(new SqlStageBasicQuad(q)) ;
i++ ;
}
// ---- and now turn the stages into SqlNodes
SqlNode sqlStages = sList.build(request, slotCompiler) ;
// --- Join the initial node (constants).
sqlNode = SqlBuilder.innerJoin(request, sqlNode, sqlStages) ;
sqlNode = slotCompiler.finish(sqlNode, quads) ;
// Insert DISTINCT if accessing the RDF merge of all named graphs
// An RDF Merge is the DISTINCT results of query over the union of all graphs.
// Or in TransformSDB
boolean needDistinct = false ;
// Either it's the uniongraph ...
if ( quads.getGraphNode().equals(Quad.unionGraph) )
needDistinct = true ;
// Or it's the union graph via redirected defaultGraph
else if ( Quad.isDefaultGraphGenerated(quads.getGraphNode()) &&
request.getContext().isTrue(SDB.unionDefaultGraph))
needDistinct = true ;
if ( needDistinct )
{
// DISTINCT -- over the named variables but not * (which includes the graph node).
String renameName = request.genId("A") ;
//sqlNode = SqlRename.view(renameName, sqlNode) ;
sqlNode = SqlBuilder.view(request, sqlNode) ;
sqlNode = SqlBuilder.distinct(request, sqlNode) ;
}
return sqlNode ;
}
}
|
import os
import sys
path = '/home/foodcy/foodspark' # use your own PythonAnywhere username here
if path not in sys.path:
sys.path.append(path)
os.environ['DJANGO_SETTINGS_MODULE'] = 'foodspark.settings'
from django.core.wsgi import get_wsgi_application
from django.contrib.staticfiles.handlers import StaticFilesHandler
# Configure the application to serve static files
application = StaticFilesHandler(get_wsgi_application()) |
<reponame>devdbrandy/iReporter<filename>src/middleware/validator.js<gh_stars>1-10
import { Request, Response, NextFunction } from 'express';
import createError from 'http-errors';
import { validationResult } from 'express-validator/check';
const singular = param => param.replace(/s$/, '');
const validateTextRule = (param, minLength) => (
{
isLength: {
errorMessage: `${param} should be atleast ${minLength} chars long.`,
options: { min: minLength },
},
ltrim: { options: [[' ', '']] },
rtrim: { options: [[' ', '']] },
}
);
const validateNameRule = param => (
{
isAlpha: true,
isLength: {
errorMessage: `${param} should be at least 3 chars long.`,
options: { min: 3 },
},
ltrim: { options: [[' ', '']] },
rtrim: { options: [[' ', '']] },
}
);
export const validator = {
login: {
username: {
isAlphanumeric: {
errorMessage: 'Username is invalid.',
},
isLength: {
errorMessage: 'Username should be at least 3 chars long.',
options: { min: 3 },
},
},
password: {
isLength: {
errorMessage: 'Password should be at least 6 chars long.',
options: { min: 6 },
},
},
},
signup: {
firstname: validateNameRule('First name'),
lastname: validateNameRule('Last name'),
username: {
isAlphanumeric: {
errorMessage: 'Username is invalid',
},
isLength: {
errorMessage: 'Username should be at least 3 chars long.',
options: { min: 3 },
},
ltrim: { options: [[' ', '']] },
rtrim: { options: [[' ', '']] },
},
othernames: {
ltrim: { options: [[' ', '']] },
rtrim: { options: [[' ', '']] },
},
phoneNumber: {
customSanitizer: {
options: (value, { req }) => {
const sanitizedValue = value.replace(/\s/g, ''); // remove empty space
return sanitizedValue;
},
},
isMobilePhone: {
errorMessage: 'Phone number is invalid.',
},
},
email: {
errorMessage: 'Provide a valid email address.',
isEmail: true,
},
password: {
isLength: {
errorMessage: 'Password should be at least 6 chars long.',
options: { min: 6 },
},
custom: {
options: (value, { req, param }) => {
if (value !== req.body.passwordConfirmation) {
throw new Error("Passwords do not match: 'passwordConfirmation'.");
}
return value;
},
},
},
},
user: {
firstname: validateNameRule('First name'),
lastname: validateNameRule('Last name'),
othernames: {
ltrim: { options: [[' ', '']] },
rtrim: { options: [[' ', '']] },
},
phoneNumber: {
isMobilePhone: {
errorMessage: 'Phone number is invalid.',
},
rtrim: { options: [[' ', '-']] },
},
},
record: {
location: {
errorMessage: 'Invalid coordinates value.',
isLatLong: true,
},
title: validateTextRule('Title', 5),
comment: validateTextRule('Comment', 10),
media: {
errorMessage: 'Media collection is required.',
exists: true,
isArray: {
errorMessage: 'Invalid media collection.',
},
},
status: {
custom: {
options: (value) => {
const status = ['draft', 'published', 'under-investigation', 'resolved', 'rejected'];
if (!status.includes(value)) {
throw new Error(`Invalid string value: '${value}'. Allowed values: ${status}.`);
}
return value;
},
},
},
},
recordStatus: {
status: {
custom: {
options: (value) => {
const status = ['under-investigation', 'resolved', 'rejected'];
if (!status.includes(value)) {
throw new Error(`Invalid string value: '${value}'. Allowed values: ${status}.`);
}
return value;
},
},
},
},
};
/**
* Validates request
*
* @param {Request} req - Request object
* @param {Response} res - Response object
* @param {NextFunction} next - call to next middleware
* @returns {boolean} returns true or false on successful validation
*
*/
export function validateRequest(req, res, next) {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return next(createError(400, '', { errors: errors.array() }));
}
return next();
}
/**
* Validates request type param
*
* @param {Request} req - Request object
* @param {Response} res - Response object
* @param {NextFunction} next - call to next middleware
* @returns {NextFunction} returns next()
*
*/
export function validateType(req, res, next) {
const allowedTypes = ['red-flags', 'interventions'];
let { params: { type } } = req;
if (!allowedTypes.includes(type)) {
return next(createError(404, 'Provided route is invalid.'));
}
type = singular(type);
req.type = type;
return next();
}
/**
* Validates admin access
*
* @param {Request} req - Request object
* @param {Response} res - Response object
* @param {NextFunction} next - call to next middleware
* @returns {NextFunction} returns next()
*
*/
export const isAdmin = (req, res, next) => {
if (!req.user.isAdmin) {
const message = 'Your account is not authorized to access the requested resource.';
return next(createError(403, message));
}
return next();
};
|
<reponame>ch1huizong/learning
class recorder(auto_object):
count = 0
events = attr(list)
def record(self, event):
self.count += 1
self.events.append((self.count, event))
|
'use strict';
const { isExclude } = require('../utils');
const filterPaths = (swagger) => {
swagger.paths && Object
.keys(swagger.paths)
.forEach((key) => {
const path = swagger.paths[key];
if (isExclude(path)){
delete swagger.paths[key];
}
});
return swagger;
};
module.exports = filterPaths;
|
dictionary = {1:'a', 2:'b', 3:'c'}
doubled = {}
for key, value in dictionary.items():
doubled[key] = value * 2
print(doubled) # {1: 'aa', 2: 'bb', 3: 'cc'} |
<gh_stars>0
package com.xiaomo.main;
import java.util.Date;
import org.nutz.dao.Dao;
import org.nutz.dao.util.Daos;
import org.nutz.integration.quartz.NutQuartzCronJobFactory;
import org.nutz.ioc.Ioc;
import org.nutz.mvc.NutConfig;
import org.nutz.mvc.Setup;
import com.xiaomo.main.bean.User;
public class MainSetup implements Setup {
public void init(NutConfig conf) {
Ioc ioc = conf.getIoc();
Dao dao = ioc.get(Dao.class);
// 如果没有createTablesInPackage,请检查nutz版本
// Daos.createTablesInPackage(dao, "com.xiaomo.main", false);
Date date = new Date();
dao.create(User.class, false);
Daos.migration(dao, User.class, true, false);
// 初始化默认根用户
if (dao.count(User.class) == 0) {
User user = new User();
user.setName("admin");
user.setPassword("<PASSWORD>");//<PASSWORD>
user.setSalt("apbb3v");
user.setType("super");
user.setCreateTime(date);
user.setUpdateTime(date);
dao.insert(user);
}
// 获取NutQuartzCronJobFactory从而触发计划任务的初始化与启动
ioc.get(NutQuartzCronJobFactory.class);
}
public void destroy(NutConfig nc) {
// TODO Auto-generated method stub
}
}
|
<reponame>djstaros/qmcpack<filename>nexus/tests/unit/test_optional_dependencies.py
def test_scipy_available():
import versions
assert(versions.scipy_available)
#end def test_scipy_available
def test_h5py_available():
import versions
assert(versions.h5py_available)
#end def test_h5py_available
def test_matplotlib_available():
import versions
assert(versions.matplotlib_available)
#end def test_matplotlib_available
def test_pydot_available():
import versions
assert(versions.pydot_available)
#end def test_pydot_available
def test_spglib_available():
import versions
assert(versions.spglib_available)
#end def test_spglib_available
def test_pycifrw_available():
import versions
assert(versions.pycifrw_available)
#end def test_pycifrw_available
def test_seekpath_available():
import versions
assert(versions.seekpath_available)
#end def test_seekpath_available
|
<reponame>glensand/shared_whiteboard<filename>src/network/Stream.h
/* Copyright (C) 2020 - 2021 <NAME> - All Rights Reserved
* You may use, distribute and modify this code under the
* terms of the MIT license.
*
* You should have received a copy of the MIT license with
* this file. If not, please write to: <EMAIL>, or visit : https://github.com/glensand/visual-studio-compatibility
*/
#pragma once
#include <type_traits>
#include <memory>
#include <functional>
namespace Net
{
template <typename T>
using OnReadCallback = std::function<void(T&& object)>;
using OnActionCallback = std::function<void(size_t availableBytes)>;
class Stream
{
public:
virtual ~Stream() = default;
/**
* \brief It tries to asynchronously write to the stream the information accumulated in the stream buffer,
* it does not block the thread from which it is called, at the end of the operation, it calls the callback function
* \param onFlushCallback callback function, will be called at the end of the operation
*/
virtual void FlushAsync(const OnActionCallback& onFlushCallback) = 0;
/**
* \brief tries to synchronously write to the stream the information accumulated in the stream buffer,
* blocks the thread from which it is called
*/
virtual void Flush() = 0;
/**
* \brief synchronously checks the connection
* \return connection condition
*/
[[nodiscard]] virtual bool IsOpen() const = 0;
/**
* \brief Closing the connection, whole data from queue will be removed completely
*/
virtual void Close() = 0;
/**
* \brief casts object to WT, and writes it to the stream as WT argument
* \tparam T base argument type
* \tparam WT type to which argument will be cast before writing
* \param object data to be written
*/
template <typename WT, typename T>
void WriteAs(const T& object);
/**
* \brief reads some data as RT, casts it into T and returns
* \tparam T required argument type
* \tparam RT type of containing in the stream argument
* \return argument which was read
*/
template <typename RT, typename T>
T ReadAs();
/**
* \brief writes argument to the stream <as is>, argument should be of integral type
* \tparam T
* \param object
* \return
*/
template <typename T>
void Write(const T& object);
/**
* \brief
* \tparam T
* \param object
* \param callback
*/
template <typename T>
void WriteAsync(const T& object, const std::function<void()>& callback);
/**
* \brief reads argument from stream, argument must be of integral type
* \tparam T
* \return
*/
template <typename T>
T Read();
/**
* \brief tries to asynchronously read data of the specified type from the stream
* \tparam T data type must be integral,
* or a pointer to a class object, with the DeserializeFrom method implemented
* \param callback
*/
template <typename T>
void ReadAsync(const OnReadCallback<T>& callback);
/**
* \brief writes row data to the stream
* \param data pointer to the data block
* \param count
*/
virtual void Write(const void* data, size_t count) = 0;
/**
* \brief reads row data from stream
* \param data
* \param count
*/
virtual void Read(void* data, size_t count) = 0;
protected:
/**
* \brief asynchronously starts the passed function
* \param func function to be launched at the end of asynchronous operation
*/
virtual void LaunchAsync(const std::function<void()>& func) = 0;
};
template <typename WT, typename T>
void Stream::WriteAs(const T& object)
{
const auto writableObject = static_cast<WT>(object);
Write(&writableObject, sizeof writableObject);
}
template <typename RT, typename T>
T Stream::ReadAs()
{
RT readObject;
Read(&readObject, sizeof readObject);
return static_cast<T>(readObject);
}
template <typename T>
void Stream::Write(const T& object)
{
if constexpr (std::is_integral_v<T>)
{
Write(&object, sizeof object);
}
else
{
object.SerializeTo(*this);
}
}
template <typename T>
void Stream::WriteAsync(const T& object, const std::function<void()>& callback)
{
LaunchAsync([=]()
{
Write(object);
callback();
});
}
template <typename T>
T Stream::Read()
{
T object;
if constexpr (std::is_integral_v<T>)
{
Read(&object, sizeof(object));
}
else
{
object.DeserializeFrom(*this);
}
return object;
}
template <typename T>
void Stream::ReadAsync(const OnReadCallback<T>& callback)
{
LaunchAsync([this]()
{
callback(Read<T>);
});
}
using StreamPtr = std::unique_ptr<Stream>;
}
|
<reponame>InsideZhou/southern-quiet
package me.insidezhou.southernquiet.throttle.lua;
import me.insidezhou.southernquiet.throttle.Throttle;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.core.script.DefaultRedisScript;
import java.util.Collections;
import java.util.List;
/**
* 使用redis lua脚本实现的计数器节流器
*/
public class RedisLuaCountBasedThrottle implements Throttle {
private final StringRedisTemplate stringRedisTemplate;
private static final DefaultRedisScript<Boolean> redisScript = new DefaultRedisScript<>(
LocalResourceUtil.getSource("/lua/RedisLuaCountBasedThrottle.lua"),
Boolean.class);
private final List<String> keys;
public RedisLuaCountBasedThrottle(StringRedisTemplate stringRedisTemplate, String throttleName) {
this.stringRedisTemplate = stringRedisTemplate;
this.keys = Collections.singletonList(throttleName);
}
/**
* 以次数为依据打开节流器,上次打开之后必须至少节流了指定次数才能再次打开,如果打开失败返回false。
*/
@Override
public boolean open(long threshold) {
Boolean execute = stringRedisTemplate.execute(redisScript, keys, String.valueOf(threshold));
return execute == null ? false : execute;
}
}
|
<filename>src/main/java/com/topcoder/api/entities/User.java<gh_stars>0
/**
* Copyright (c) 2019 TopCoder, Inc. All rights reserved.
*/
package com.topcoder.api.entities;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
import java.util.HashSet;
import java.util.Set;
/**
* The user entity, it is annotated with JPA @Entity and mapped with the "users" database table.
* It extends IdentifiableEntity and additionally defines the 'handle' field and provides a getter and a setter for it.
*
* @author TCSCODER
* @version 1.0
*/
@Entity
@Table(name = "users")
@Getter
@Setter
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
public class User extends IdentifiableEntity {
/**
* The user handle.
*/
@NotNull
@Column(unique = true)
private String handle;
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name = "department_id", nullable = false)
@JsonIgnoreProperties("users")
private Department department;
@ManyToMany(cascade = CascadeType.ALL)
@JoinTable(
name = "user_role",
joinColumns = @JoinColumn(name = "user_id"),
inverseJoinColumns = @JoinColumn(name = "role_id"))
@JsonIgnoreProperties("users")
private Set<Role> roles = new HashSet<>();
public void addRole(Role role) {
this.roles.add(role);
role.addRole(this);
}
}
|
/*
* Copyright 2014-2020 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.development.comments;
import ideal.library.elements.*;
import ideal.library.texts.*;
import ideal.runtime.elements.*;
// This can be either a comment or whitespace.
public class comment extends debuggable implements deeply_immutable_data {
public final comment_type type;
// The content of comment excludes the comment delimeters
public final string content;
// The image of content includes the delimeters
public final string image;
public comment(comment_type type, string content, string image) {
assert type != null;
assert content != null;
assert image != null;
this.type = type;
this.content = content;
this.image = image;
}
}
|
var Promise = require('bluebird');
function insert(table, options, callback) {
var foreignLinkage = this.foreignLinkage[table];
for(var o = 0; o < options.length; ++o)
{
var option = Object.keys(options[o]);
for(var i = 0; i < option.length; ++i) {
if (typeof(options[option]) === 'object') {
if (options[option].type === 'lookup') {
if (!options[option].table) {
if (foreignLinkage[option]) {
options[option].table = foreignLinkage[option].table;
options[option].field = foreignLinkage[option].field;
}
else {
return Promise.reject('missing foreign key linkage!').
nodeify(callback);
}
}
}
}
}
}
return lookup(options, 0)
.catch(callback)
.then(function(){
return this.driver.massInsert(options);
})
.catch(callback)
.nodeify(callback);
}
function lookup(options, i, callback) {
var lookups = [];
for(var o = 0; o < options.length; ++o)
{
var option = Object.keys(options[o]);
for(; i < option.length; ++i) {
if (typeof(options[option]) === 'object') {
if (options[option].type === 'lookup') {
lookups.push(this.driver.lookup(options[option])
.catch(function(err) {
console.log(err);
})
.then(function() {
return loockup(options, i);
}));
}
}
}
}
return Promise.all(lookups).nodeify(callback);
}
|
import {
checkNpmVersions
} from 'meteor/tmeasday:check-npm-versions';
checkNpmVersions({
cookies: "^0.6.2",
mkdirp: "^0.3.5",
eval: ">=0.1.2",
}, 'steedos:app-workflow'); |
#!/bin/bash
#
# $1: path to minimal example binaries...
# if lws is built with -DLWS_WITH_MINIMAL_EXAMPLES=1
# that will be ./bin from your build dir
#
# $2: path for logs and results. The results will go
# in a subdir named after the directory this script
# is in
#
# $3: offset for test index count
#
# $4: total test count
#
# $5: path to ./minimal-examples dir in lws
#
# Test return code 0: OK, 254: timed out, other: error indication
. $5/selftests-library.sh
COUNT_TESTS=22
dotest $1 $2 warmcat
dotest $1 $2 warmcat-pipe -p
dotest $1 $2 warmcat-h1 --h1
dotest $1 $2 warmcat-h1-pipe --h1 -p
dotest $1 $2 warmcat-stag -s
dotest $1 $2 warmcat-pipe-stag -p -s
dotest $1 $2 warmcat-h1-stag --h1 -s
dotest $1 $2 warmcat-h1-pipe-stag --h1 -p -s
dotest $1 $2 warmcat-post --post
dotest $1 $2 warmcat-post-pipe --post -p
dotest $1 $2 warmcat-post-pipe-stag --post -p -s
dotest $1 $2 warmcat-h1-post --post --h1
dotest $1 $2 warmcat-h1-post-pipe --post --h1 -p
dotest $1 $2 warmcat-h1-post-pipe-stag --post --h1 -p -s
spawn "" $5/http-server/minimal-http-server-tls $1/lws-minimal-http-server-tls
dotest $1 $2 localhost -l
spawn $SPID $5/http-server/minimal-http-server-tls $1/lws-minimal-http-server-tls
dotest $1 $2 localhost-pipe -l -p
spawn $SPID $5/http-server/minimal-http-server-tls $1/lws-minimal-http-server-tls
dotest $1 $2 localhost-h1 -l --h1
spawn $SPID $5/http-server/minimal-http-server-tls $1/lws-minimal-http-server-tls
dotest $1 $2 localhost-h1-pipe -l --h1 -p
spawn $SPID $5/http-server/minimal-http-server-tls $1/lws-minimal-http-server-tls
dotest $1 $2 localhost-stag -l -s
spawn $SPID $5/http-server/minimal-http-server-tls $1/lws-minimal-http-server-tls
dotest $1 $2 localhost-pipe-stag -l -p -s
spawn $SPID $5/http-server/minimal-http-server-tls $1/lws-minimal-http-server-tls
dotest $1 $2 localhost-h1-stag -l --h1 -s
spawn $SPID $5/http-server/minimal-http-server-tls $1/lws-minimal-http-server-tls
dotest $1 $2 localhost-h1-pipe-stag -l --h1 -p -s
kill $SPID 2>/dev/null
wait $SPID 2>/dev/null
exit $FAILS
|
<filename>src/main/java/com/testvagrant/ekam/commons/remote/constants/Hub.java<gh_stars>1-10
package com.testvagrant.ekam.commons.remote.constants;
public class Hub {
public static final String P_CLOUDY = "pcloudy";
public static final String QUALITY_KIOSK = "qualitykiosk";
public static final String BROWSERSTACK = "browserstack";
public static final String KOBITON = "kobiton";
public static final String SAUCE_LABS = "saucelabs";
public static final String PERFECTO = "perfecto";
}
|
def search_keyword(data_file, keyword):
"""
This function searches for the given keyword in the data_file and returns true if it is present,
and false if it is absent.
Args:
data_file (str): the name of the data_file containing the data to be searched
keyword (str): the value to be searched
Returns:
bool: True if the keyword is found, False otherwise.
"""
with open(data_file) as f:
for line in f:
if keyword in line:
return True
return False |
<filename>examples/src/components/SearchPanel.tsx
import { Component } from 'react';
import * as React from 'react';
import { BasePanel, IBasePanelProps } from './BasePanel';
import { GithubRetrievalError } from '../githubTraversal';
export interface ISearchPanelProps extends IBasePanelProps {
updateGithubLogin: (login: string) => void
}
export interface ISearchPanelState {
error: Error | undefined
}
export class SearchPanel extends Component<ISearchPanelProps, ISearchPanelState> {
async searchLogin() {
try {
await this.props.updateGithubLogin((document.getElementById('loginInput') as HTMLInputElement).value);
this.setState({ ...this.state, error: undefined });
} catch (error) {
if (error instanceof GithubRetrievalError) {
this.setState({ ...this.state, error: error });
}
console.error(error);
}
}
render() {
return <BasePanel hide={this.props.hide}>
<div id="searchPanel">
<form>
<input type="text" id="loginInput" placeholder="Github pseudo" />
<input type="button" onClick={this.searchLogin.bind(this)} value="Search" />
</form>
{
this.state.error instanceof GithubRetrievalError ?
<p className="error">{"Can't find github account for pseudo "}<i>{this.state.error.login}</i></p> :
null
}
</div>
</BasePanel>
}
readonly state: ISearchPanelState = {
error: undefined
};
} |
package com.zetatwo
import scala.annotation.tailrec
object Day03 {
type Coordinate = (Int, Int)
def main(args: Array[String]): Unit = {
val input: Int = io.StdIn.readLine().trim.toInt
printf("Result 1: %d", distance(input))
printf("Result 2: %d", sums(input))
}
def layerparams(tileindex: Int): (Int, Int) = {
tileindex match {
case 1 => (1, 1)
case _ =>
val layerapprox = math.sqrt(tileindex).toInt
val sidelength = if (layerapprox % 2 == 1) layerapprox else layerapprox - 1
(sidelength + 2, sidelength * sidelength + 1)
}
}
def sidedistance(start: Int, sidelength: Int): Int = {
math.abs((sidelength-1)/2) + math.abs(start - (sidelength-1)/2)
}
def distance(tileindex: Int, sidelength: Int, startidx: Int): Int = {
val offset = tileindex - startidx + 1
val sideoffset = sidelength - 1
sidedistance(offset % sideoffset, sidelength)
}
def distance(input: Int): Int = {
input match {
case 1 => 0
case _ =>
val (sidelength, startidx) = layerparams(input)
distance(input, sidelength, startidx)
}
}
def nextcoord(coord: Coordinate, sidelength: Int): Coordinate = {
val r = (sidelength-1)/2
coord match {
case (0, 0) => (1, 0)
case (x, y) if (x == y) && (y == r) => (x-1, y) // Top right
case (x, y) if (-x == y) && (y == r) => (x, y - 1) // Top left
case (x, y) if (-x == -y) && (-y == r) => (x + 1, y) // Bottom left
case (x, y) if (x == -y) && (-y == r) => (x + 1, y) // Bottom right
case (x, y) if x == r => (x, y + 1) // Right side
case (x, y) if -x == r => (x, y - 1) // Left side
case (x, y) if y == r => (x - 1, y) // Top side
case (x, y) if -y == r => (x + 1, y) // Bottom side
}
}
def nextval(newcoord: Coordinate, cells: Map[Coordinate, Int]): Int = {
val (x, y) = newcoord
cells.getOrElse( (x + 1, y ), 0) +
cells.getOrElse((x + 1, y + 1), 0) +
cells.getOrElse((x , y + 1), 0) +
cells.getOrElse((x - 1, y + 1), 0) +
cells.getOrElse((x - 1, y ), 0) +
cells.getOrElse((x - 1, y - 1), 0) +
cells.getOrElse((x , y - 1), 0) +
cells.getOrElse((x + 1, y - 1), 0)
}
def nextsidelength(newcoord: Coordinate, sidelength: Int): Int = {
val (x, y) = newcoord
if (x > 0 && x+y-1 == 0) 2*x+1 else sidelength
}
def sums(input: Int): Int = {
@tailrec
def loop(limit: Int, cells: Map[Coordinate, Int], coord: Coordinate, sidelength: Int): Int = {
val newcoord = nextcoord(coord, sidelength)
val newval = nextval(newcoord, cells)
val newsidelength = nextsidelength(newcoord, sidelength)
if (newval > limit)
newval
else
loop(limit, cells + (newcoord -> newval), newcoord, newsidelength)
}
loop(input, Map((0,0) -> 1), (0,0), 1)
}
}
|
#!/bin/bash
#PBS -N uparse_closed
#PBS -S /bin/bash
#PBS -q UCTlong
#PBS -l nodes=1:series600:ppn=4
#PBS -V
#PBS -M email@adress
#PBS -d /specify/directory
source /home/kviljoen/activate_qiime.sh #change as appropriate
inDir=/input_directory/uparse_downstream/ #specify input directory
outDir=/output_directory/ #specify output directory
usearch9 -usearch_global $inDir/otus_repsetOUT.fa -db /scratch/DB/bio/qiime/greengenes/gg_13_8_otus/rep_set/97_otus.fasta -id 0.97 -strand plus -uc $outDir/de_novo_repset_to_GG_13_8_map.uc
#NOTE1: now download the resulting .uc file which maps de novo IDs to GG IDs and OTU rownames with GG IDs (for those that map) in R
#NOTE2: next upload the .txt filtered otu table with GG IDs
#convert uploaded otu .txt table to .biom format - the output will be used for PICRUSt
biom convert -i $outDir/GG_13_8_closed_reference_OTU_table_from_de_novo_pipeline.txt -o $outDir/GG_13_8_closed_reference_OTU_table_from_de_novo_pipeline.biom --table-type="otu table"
#
#summarize .biom otu table
biom summarize-table -i $outDir/GG_13_8_closed_reference_OTU_table_from_de_novo_pipeline.biom -o $outDir/GG_13_8_closed_reference_OTU_table_from_de_novo_pipeline.biom.summary
|
import React from "react";
import { API_HOST } from "react-native-dotenv";
import {
AsyncStorage,
View,
Text,
TouchableNativeFeedback,
StyleSheet
} from "react-native";
import axios from "../utils/axios";
export default class StatScreen extends React.Component {
static navigationOptions = {
title: "ESTADISTICAS"
};
constructor(props) {
super(props);
const stat = props.navigation.getParam("stat");
this.state = {
stat: stat
};
}
render() {
const { stat } = this.state;
return (
<View style={styles.container}>
<Text style={styles.developmentModeText}>Tus Estadisticas</Text>
<TouchableNativeFeedback
onPress={this.handleType("ALL")}>
<Text style={styles.developmentModeText}>
Respuestas Totales {stat.cant_total_questions}
</Text>
</TouchableNativeFeedback>
<TouchableNativeFeedback
onPress={this.handleType("CORRECT")}>
<Text style={styles.developmentModeText}>
Correctas {stat.cant_correct_questions}
</Text>
</TouchableNativeFeedback>
<TouchableNativeFeedback
onPress={this.handleType("INCORRECT")}>
<Text style={styles.developmentModeText}>
Incorrectas {stat.cant_incorrect_questions}
</Text>
</TouchableNativeFeedback>
<TouchableNativeFeedback
onPress={this.handleType("UNKNOW")}>
<Text style={styles.developmentModeText}>
No respondidas {stat.cant_unknown_questions}
</Text>
</TouchableNativeFeedback>
</View>
);
}
handleType = type => async () => {
await AsyncStorage.clear();
axios.get("/record/" + type).then(response => {
let options = response.data;
this.props.navigation.navigate("Record", { options });
});
};
}
const styles = StyleSheet.create({
container: {
flex: 1,
marginVertical: 20,
justifyContent: "center"
},
developmentModeText: {
marginBottom: 5,
color: "rgba(0,0,0,0.4)",
fontSize: 25,
lineHeight: 25,
textAlign: "center",
fontWeight: "bold"
}
});
|
<filename>node_modules/react-icons-kit/metrize/slidersVertical.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.slidersVertical = void 0;
var slidersVertical = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\t\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\t\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M176,328c0,13.25-10.75,24-24,24s-24-10.75-24-24s10.75-24,24-24S176,314.75,176,328z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M176,328c0,13.25-10.75,24-24,24s-24-10.75-24-24s10.75-24,24-24S176,314.75,176,328z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M384,280c0,13.25-10.75,24-24,24s-24-10.75-24-24s10.75-24,24-24S384,266.75,384,280z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M384,280c0,13.25-10.75,24-24,24s-24-10.75-24-24s10.75-24,24-24S384,266.75,384,280z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M240,184c0,13.25-10.75,24-24,24s-24-10.75-24-24s10.75-24,24-24S240,170.75,240,184z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M240,184c0,13.25-10.75,24-24,24s-24-10.75-24-24s10.75-24,24-24S240,170.75,240,184z"
},
"children": []
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M208,223.188V384h16V223.188c-2.594,0.531-5.25,0.812-8,0.812S210.594,223.719,208,223.188z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M208,223.188V384h16V223.188c-2.594,0.531-5.25,0.812-8,0.812S210.594,223.719,208,223.188z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M224,144.812V128h-16v16.812c2.594-0.531,5.25-0.812,8-0.812S221.406,144.281,224,144.812z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M224,144.812V128h-16v16.812c2.594-0.531,5.25-0.812,8-0.812S221.406,144.281,224,144.812z"
},
"children": []
}]
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M352,319.188V384h16v-64.812c-2.594,0.531-5.266,0.812-8,0.812S354.594,319.719,352,319.188z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M352,319.188V384h16v-64.812c-2.594,0.531-5.266,0.812-8,0.812S354.594,319.719,352,319.188z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M368,240.812V128h-16v112.812c2.594-0.531,5.266-0.812,8-0.812S365.406,240.281,368,240.812z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M368,240.812V128h-16v112.812c2.594-0.531,5.266-0.812,8-0.812S365.406,240.281,368,240.812z"
},
"children": []
}]
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M144,367.188V384h16v-16.812c-2.594,0.531-5.25,0.812-8,0.812S146.594,367.719,144,367.188z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M144,367.188V384h16v-16.812c-2.594,0.531-5.25,0.812-8,0.812S146.594,367.719,144,367.188z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M160,288.812V128h-16v160.812c2.594-0.531,5.25-0.812,8-0.812S157.406,288.281,160,288.812z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M160,288.812V128h-16v160.812c2.594-0.531,5.25-0.812,8-0.812S157.406,288.281,160,288.812z"
},
"children": []
}]
}]
}]
}, {
"name": "path",
"attribs": {
"d": "M320,224c0,13.266-10.75,24-24,24s-24-10.734-24-24c0-13.25,10.75-24,24-24S320,210.75,320,224z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M320,224c0,13.266-10.75,24-24,24s-24-10.734-24-24c0-13.25,10.75-24,24-24S320,210.75,320,224z"
},
"children": []
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M304,184.812V128h-16v56.812c2.594-0.531,5.266-0.812,8-0.812S301.406,184.281,304,184.812z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M304,184.812V128h-16v56.812c2.594-0.531,5.266-0.812,8-0.812S301.406,184.281,304,184.812z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M288,263.188V384h16V263.188c-2.594,0.531-5.266,0.812-8,0.812S290.594,263.719,288,263.188z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M288,263.188V384h16V263.188c-2.594,0.531-5.266,0.812-8,0.812S290.594,263.719,288,263.188z"
},
"children": []
}]
}]
}]
}]
}]
};
exports.slidersVertical = slidersVertical; |
import cv2
import math
import pymunk as pm
import numpy as np
from PIL import Image
def merge_dicts(*dicts):
"""Merge the given dictionaries. Key-value pairs in later dictionaries will replace pairs in
earlier ones.
"""
ret = {}
for d in dicts:
for k, v in d.iteritems():
ret[k] = v
return ret
def pil_grid(images, grid_size, margin=0):
"""Create a PIL Image grid of the given images
:param images: A sequence of Image objects to tile
:param grid_size: Grid size (w x h)
:param margin: How many blank pixels to place between each image
:return:
"""
# Get max image size
max_dims = [-1, -1]
for image in images:
max_dims[0] = max(image.size[0], max_dims[0])
max_dims[1] = max(image.size[1], max_dims[1])
grid_w, grid_h = grid_size
ret_size = (max_dims[0] * grid_w + margin * (grid_w-1),
max_dims[1] * grid_h + margin * (grid_h-1))
ret = Image.new('RGB', ret_size)
for i, image in enumerate(images):
grid_x = i % grid_w
grid_y = (i - grid_x) / grid_w
ret.paste(image, (grid_x * (margin + max_dims[0]), grid_y * (margin + max_dims[1])))
return ret
def tight_crop(image):
"""Produce a tightly-cropped version of the image, and add alpha channel if needed
:param image: PIL image
:return: PIL image
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
alpha = np.array(image)[:,:,3]
nonzero_points = cv2.findNonZero(alpha)
x, y, w, h = cv2.boundingRect(nonzero_points)
cropped_image = image.crop((x, y, x+w, y+h))
return cropped_image
def compute_pm_hull_vertices(image):
"""Get PyMunk vertices that enclose the alpha channel of a given RGBA image."""
alpha = np.array(image)[:,:,3]
nonzero_points = cv2.findNonZero(alpha)
hull = cv2.convexHull(nonzero_points).squeeze()
# Flip the y-axis since it points up in PyMunk
hull[:, 1] = image.size[1] - hull[:, 1]
return hull
def create_sine_fn(period, amplitude, x_offset, y_offset):
period = float(period)
amplitude = float(amplitude)
x_offset = float(x_offset)
y_offset = float(y_offset)
return lambda x: amplitude * math.sin((x - x_offset) * (2 * math.pi / period)) + y_offset
def create_triangle_fn(period, amplitude, x_offset, y_offset):
p = float(period)
a = float(amplitude)
x_offset = float(x_offset)
y_offset = float(y_offset)
def ret(x):
in_ = math.fmod(x - x_offset, p) + 7*p/4
return 4*a/p * (math.fabs(math.fmod(in_, p) - p/2) - p/4) + y_offset
return ret
def get_closest_axis_vector(v):
"""Map v to whichever of (0, 1), (0, -1), (1, 0), (-1, 0) is closest to v by angle.
:param v: A PyMunk Vec2d object
:return: A PyMunk Vec2d object
"""
# Get angle from (0, 1)
angle = math.degrees(np.arctan2(v[1], v[0]))
if -135 <= angle < -45:
return pm.Vec2d(0, -1)
elif -45 <= angle < 45:
return pm.Vec2d(1, 0)
elif 45 <= angle < 135:
return pm.Vec2d(0, 1)
else:
return pm.Vec2d(-1, 0) |
#!/bin/bash
docker push --all-tags spikeinterface/ironclust-compiled-base
|
package com.medex.database;
import java.util.List;
import javax.persistence.Query;
import org.hibernate.Session;
import org.hibernate.Transaction;
import com.medex.model.Pharmacy;
//This class is specifically for the pharmacies database operations
public class PharmacyDB {
public void insertPharmacy(Pharmacy pharmacy)
{
Transaction transaction = null; //You have to make a transaction object
try (Session session = HibernateUtil.getPharmacySessionFactory().openSession()) //And now we make a session using the HibernateUtil object
{
// start a transaction using the session
transaction = session.beginTransaction();
session.save(pharmacy); //Save transaction allows us to store the pharmacy object into the database (This is like insert with the fields, etc, etc)
//But hibernate knows what to do using the annotation on the pharmacy class
// commit transaction
transaction.commit(); //Finalize transaction
}
catch (Exception e) //If anything goes wrong
{
if (transaction != null)
{
transaction.rollback();
}
e.printStackTrace();
}
}
//This is the update, which pharmacy we want to delete
public void updatePharmacy(Pharmacy pharmacy)
{
Transaction transaction = null; //You have to make a transaction object
try (Session session = HibernateUtil.getPharmacySessionFactory().openSession()) //And now we make a session using the HibernateUtil object
{
// start a transaction using the session
transaction = session.beginTransaction();
session.saveOrUpdate(pharmacy); //Save transaction allows us to store the pharmacy object into the database (This is like insert with the fields, etc, etc)
//But hibernate knows what to do using the annotation on the pharmacy class
// commit transaction
transaction.commit(); //Finalize transaction
}
catch (Exception e) //If anything goes wrong
{
if (transaction != null)
{
transaction.rollback();
}
e.printStackTrace();
}
}
//Id of what you want to delete
public void deletePharmacy(int id)
{
Transaction transaction = null; //You have to make a transaction object
Pharmacy pharmacy = null;
try (Session session = HibernateUtil.getPharmacySessionFactory().openSession()) //And now we make a session using the HibernateUtil object
{
// start a transaction using the session
transaction = session.beginTransaction();
pharmacy = session.get(Pharmacy.class, id); //We have to get the specific pharmacy using the ID from the database, so we can delete it
session.delete(pharmacy); //We delete that pharmacy
// commit transaction
transaction.commit(); //Finalize transaction
}
catch (Exception e) //If anything goes wrong
{
if (transaction != null)
{
transaction.rollback();
}
e.printStackTrace();
}
}
//Retrieve all pharmacies from the database and store them in a list
public List<Pharmacy> getPharmacies()
{
Transaction transaction = null;
List<Pharmacy> pharmacies = null;
try (Session session = HibernateUtil.getPharmacySessionFactory().openSession())
{
transaction = session.beginTransaction();
pharmacies = session.createQuery("from Pharmacy", Pharmacy.class).list(); //This is a hibernate query (Get all pharmacies from the pharmacies database)
//Each returned row is a pharmacy object inserted into the list of pharmacies --> pharmacies
transaction.commit();
}
return pharmacies;
}
public Pharmacy getPharmacy(int id)
{
Transaction transaction = null;
Pharmacy pharmacy = null;
try (Session session = HibernateUtil.getPharmacySessionFactory().openSession())
{
//start a transaction
transaction = session.beginTransaction();
// get one object
String hql = " FROM Pharmacy H WHERE H.id = :id"; //From the pharmacy table
Query query = session.createQuery(hql);
query.setParameter("id", id); //The parameter ":id" is set to the id we passed.
List results = query.getResultList(); //The results are given to us in a list.
//Since the id is unique, we will get a list of one item
//If the result is not null, we get a single pharmacy object
if (results != null && !results.isEmpty())
{
pharmacy = (Pharmacy) results.get(0); //So, we retrieve said pharmacy from the first index in the list
}
//commit transaction
transaction.commit();
}
catch (Exception e)
{
if (transaction != null)
{
transaction.rollback();
}
e.printStackTrace();
}
return pharmacy; //Return the pharmacy object retrieved
}
public Pharmacy getPharmacyLogin(String username, String password)
{
Transaction transaction = null;
Pharmacy pharmacy = null;
try (Session session = HibernateUtil.getPharmacySessionFactory().openSession())
{
//start a transaction
transaction = session.beginTransaction();
// get one object
String hql = "FROM Pharmacy H WHERE H.username = :username and H.password = :password"; //From the patient table
Query query = session.createQuery(hql);
query.setParameter("username", username); //The parameter ":id" is set to the id we passed.
query.setParameter("password", password); //The parameter ":id" is set to the id we passed.
List results = query.getResultList(); //The results are given to us in a list.
//Since the id is unique, we will get a list of one item
//If the result is not null, we get a single patient object
if (results != null && !results.isEmpty())
{
pharmacy = (Pharmacy) results.get(0); //So, we retrieve said patient from the first index in the list
}
//commit transaction
transaction.commit();
}
catch (Exception e)
{
if (transaction != null)
{
transaction.rollback();
}
e.printStackTrace();
}
return pharmacy; //Return the patient object retrieved
}
}
|
/*
* Copyright 2017 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rivieracode.lib.vault;
import java.security.NoSuchAlgorithmException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Shows the reference implementations for the SHA512Hasher.
*
* @author <NAME>
*/
public class SHA512HasherTest {
/**
* Constructs the test
*/
public SHA512HasherTest() {
}
/** */
@BeforeClass
public static void setUpClass() {
}
/** */
@AfterClass
public static void tearDownClass() {
}
/** */
@Before
public void setUp() {
}
/** */
@After
public void tearDown() {
}
/**
* @throws java.security.NoSuchAlgorithmException
*/
@Test
public void testHash512() throws Exception {
String password = "<PASSWORD>";
SHA512Hasher hasher = new SHA512Hasher();
String hash = hasher.hash(password);
assertNotEquals(password, hash);
assertFalse( hasher.check("WrongPassword", hash) );
assertTrue( hasher.check(password, hash) );
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export COMPOSE_DIR
# shellcheck source=/dev/null
source "$COMPOSE_DIR/../testlib.sh"
start_docker_env 4
#Due to the limitation of the current auditparser test, it should be the
#first test in a clean cluster.
execute_robot_test om auditparser
execute_robot_test scm basic/basic.robot
stop_docker_env
generate_report
|
# Push and pop directories on directory stack
alias pu='pushd'
alias po='popd'
# Basic directory operations
alias ...='cd ../..'
alias -- -='cd -'
# Super user
alias _='sudo'
alias please='sudo'
#alias g='grep -in'
# Show history
if [ "$HIST_STAMPS" = "mm/dd/yyyy" ]
then
alias history='fc -fl 1'
elif [ "$HIST_STAMPS" = "dd.mm.yyyy" ]
then
alias history='fc -El 1'
elif [ "$HIST_STAMPS" = "yyyy-mm-dd" ]
then
alias history='fc -il 1'
else
alias history='fc -l 1'
fi
# List direcory contents
alias lsa='ls -lah'
alias l='ls -lah'
alias ll='ls -lh'
alias la='ls -lAh'
alias afind='ack-grep -il'
alias subl='/Applications/Sublime\ Text.app/Contents/SharedSupport/bin/subl'
alias jsc='/System/Library/Frameworks/JavaScriptCore.framework/Versions/Current/Resources/jsc'
# git
alias gl='git pull'
alias gp='git push'
alias gd='git diff'
alias gc='git commit'
alias gca='git commit -a'
alias gco='git checkout'
alias gb='git branch'
alias gs='git status'
alias grm="git status | grep deleted | awk '{print \$3}' | xargs git rm"
alias changelog='git log `git log -1 --format=%H -- CHANGELOG*`..; cat CHANGELOG*'
|
/**
* Signature/interface for an `Offset` object
* @see https://developer.apple.com/documentation/apple_news/offset
*/
export interface Offset {
x: number;
y: number;
}
|
#!/usr/bin/env bash
. "test/test-migrate-fixtures.sh"
. "test/testlib.sh"
begin_test "migrate info (default branch)"
(
set -e
setup_multiple_local_branches
original_head="$(git rev-parse HEAD)"
diff -u <(git lfs migrate info 2>&1 | tail -n 2) <(cat <<-EOF
*.md 140 B 1/1 files(s) 100%
*.txt 120 B 1/1 files(s) 100%
EOF)
migrated_head="$(git rev-parse HEAD)"
assert_ref_unmoved "HEAD" "$original_head" "$migrated_head"
)
end_test
begin_test "migrate info (bare repository)"
(
set -e
setup_multiple_remote_branches
git lfs migrate info --everything
)
end_test
begin_test "migrate info (given branch)"
(
set -e
setup_multiple_local_branches
original_master="$(git rev-parse refs/heads/master)"
original_feature="$(git rev-parse refs/heads/my-feature)"
diff -u <(git lfs migrate info my-feature 2>&1 | tail -n 2) <(cat <<-EOF
*.md 170 B 2/2 files(s) 100%
*.txt 120 B 1/1 files(s) 100%
EOF)
migrated_master="$(git rev-parse refs/heads/master)"
migrated_feature="$(git rev-parse refs/heads/my-feature)"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
assert_ref_unmoved "refs/heads/my-feature" "$original_feature" "$migrated_feature"
)
end_test
begin_test "migrate info (default branch with filter)"
(
set -e
setup_multiple_local_branches
original_head="$(git rev-parse HEAD)"
diff -u <(git lfs migrate info --include "*.md" 2>&1 | tail -n 1) <(cat <<-EOF
*.md 140 B 1/1 files(s) 100%
EOF)
migrated_head="$(git rev-parse HEAD)"
assert_ref_unmoved "refs/heads/master" "$original_head" "$migrated_head"
)
end_test
begin_test "migrate info (given branch with filter)"
(
set -e
setup_multiple_local_branches
original_master="$(git rev-parse refs/heads/master)"
original_feature="$(git rev-parse refs/heads/my-feature)"
diff -u <(git lfs migrate info --include "*.md" my-feature 2>&1 | tail -n 1) <(cat <<-EOF
*.md 170 B 2/2 files(s) 100%
EOF)
migrated_master="$(git rev-parse refs/heads/master)"
migrated_feature="$(git rev-parse refs/heads/my-feature)"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
assert_ref_unmoved "refs/heads/my-feature" "$original_feature" "$migrated_feature"
)
end_test
begin_test "migrate info (default branch, exclude remote refs)"
(
set -e
setup_single_remote_branch
git show-ref
original_remote="$(git rev-parse refs/remotes/origin/master)"
original_master="$(git rev-parse refs/heads/master)"
diff -u <(git lfs migrate info 2>&1 | tail -n 2) <(cat <<-EOF
*.md 50 B 1/1 files(s) 100%
*.txt 30 B 1/1 files(s) 100%
EOF)
migrated_remote="$(git rev-parse refs/remotes/origin/master)"
migrated_master="$(git rev-parse refs/heads/master)"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
assert_ref_unmoved "refs/remotes/origin/master" "$original_remote" "$migrated_remote"
)
end_test
begin_test "migrate info (given branch, exclude remote refs)"
(
set -e
setup_multiple_remote_branches
original_remote="$(git rev-parse refs/remotes/origin/master)"
original_master="$(git rev-parse refs/heads/master)"
original_feature="$(git rev-parse refs/heads/my-feature)"
diff -u <(git lfs migrate info my-feature 2>&1 | tail -n 2) <(cat <<-EOF
*.md 52 B 2/2 files(s) 100%
*.txt 50 B 2/2 files(s) 100%
EOF)
migrated_remote="$(git rev-parse refs/remotes/origin/master)"
migrated_master="$(git rev-parse refs/heads/master)"
migrated_feature="$(git rev-parse refs/heads/my-feature)"
assert_ref_unmoved "refs/remotes/origin/master" "$original_remote" "$migrated_remote"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
assert_ref_unmoved "refs/heads/my-feature" "$original_feature" "$migrated_feature"
)
end_test
begin_test "migrate info (given ref, --skip-fetch)"
(
set -e
setup_single_remote_branch
original_remote="$(git rev-parse refs/remotes/origin/master)"
original_master="$(git rev-parse refs/heads/master)"
git tag pseudo-remote "$original_remote"
# Remove the refs/remotes/origin/master ref, and instruct 'git lfs migrate' to
# not fetch it.
git update-ref -d refs/remotes/origin/master
diff -u <(git lfs migrate info --skip-fetch 2>&1 | tail -n 2) <(cat <<-EOF
*.md 190 B 2/2 files(s) 100%
*.txt 150 B 2/2 files(s) 100%
EOF)
migrated_remote="$(git rev-parse pseudo-remote)"
migrated_master="$(git rev-parse refs/heads/master)"
assert_ref_unmoved "refs/remotes/origin/master" "$original_remote" "$migrated_remote"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
)
end_test
begin_test "migrate info (include/exclude ref)"
(
set -e
setup_multiple_remote_branches
original_master="$(git rev-parse refs/heads/master)"
original_feature="$(git rev-parse refs/heads/my-feature)"
diff -u <(git lfs migrate info \
--include-ref=refs/heads/my-feature \
--exclude-ref=refs/heads/master 2>&1 | tail -n 2) <(cat <<-EOF
*.md 31 B 1/1 files(s) 100%
*.txt 30 B 1/1 files(s) 100%
EOF)
migrated_master="$(git rev-parse refs/heads/master)"
migrated_feature="$(git rev-parse refs/heads/my-feature)"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
assert_ref_unmoved "refs/heads/my-feature" "$original_feature" "$migrated_feature"
)
end_test
begin_test "migrate info (include/exclude ref args)"
(
set -e
setup_multiple_remote_branches
original_master="$(git rev-parse refs/heads/master)"
original_feature="$(git rev-parse refs/heads/my-feature)"
diff -u <(git lfs migrate info \
my-feature ^master 2>&1 | tail -n 2) <(cat <<-EOF
*.md 31 B 1/1 files(s) 100%
*.txt 30 B 1/1 files(s) 100%
EOF)
migrated_master="$(git rev-parse refs/heads/master)"
migrated_feature="$(git rev-parse refs/heads/my-feature)"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
assert_ref_unmoved "refs/heads/my-feature" "$original_feature" "$migrated_feature"
)
end_test
begin_test "migrate info (include/exclude ref with filter)"
(
set -e
setup_multiple_remote_branches
original_master="$(git rev-parse refs/heads/master)"
original_feature="$(git rev-parse refs/heads/my-feature)"
diff -u <(git lfs migrate info \
--include="*.txt" \
--include-ref=refs/heads/my-feature \
--exclude-ref=refs/heads/master 2>&1 | tail -n 1) <(cat <<-EOF
*.txt 30 B 1/1 files(s) 100%
EOF)
migrated_master="$(git rev-parse refs/heads/master)"
migrated_feature="$(git rev-parse refs/heads/my-feature)"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
assert_ref_unmoved "refs/heads/my-feature" "$original_feature" "$migrated_feature"
)
end_test
begin_test "migrate info (nested sub-trees, no filter)"
(
set -e
setup_single_local_branch_deep_trees
original_master="$(git rev-parse refs/heads/master)"
diff -u <(git lfs migrate info 2>/dev/null) <(cat <<-EOF
*.txt 120 B 1/1 files(s) 100%
EOF)
migrated_master="$(git rev-parse refs/heads/master)"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
)
end_test
begin_test "migrate info (above threshold)"
(
set -e
setup_multiple_local_branches
original_head="$(git rev-parse HEAD)"
diff -u <(git lfs migrate info --above=130B 2>&1 | tail -n 1) <(cat <<-EOF
*.md 140 B 1/1 files(s) 100%
EOF)
migrated_head="$(git rev-parse HEAD)"
assert_ref_unmoved "HEAD" "$original_head" "$migrated_head"
)
end_test
begin_test "migrate info (above threshold, top)"
(
set -e
setup_multiple_local_branches
original_head="$(git rev-parse HEAD)"
diff -u <(git lfs migrate info --above=130B --top=1 2>&1 | tail -n 1) <(cat <<-EOF
*.md 140 B 1/1 files(s) 100%
EOF)
migrated_head="$(git rev-parse HEAD)"
assert_ref_unmoved "HEAD" "$original_head" "$migrated_head"
)
end_test
begin_test "migrate info (given unit)"
(
set -e
setup_multiple_local_branches
original_head="$(git rev-parse HEAD)"
diff -u <(git lfs migrate info --unit=kb 2>&1 | tail -n 2) <(cat <<-EOF
*.md 0.1 1/1 files(s) 100%
*.txt 0.1 1/1 files(s) 100%
EOF)
migrated_head="$(git rev-parse HEAD)"
assert_ref_unmoved "HEAD" "$original_head" "$migrated_head"
)
end_test
begin_test "migrate info (doesn't show empty info entries)"
(
set -e
setup_multiple_local_branches
original_head="$(git rev-parse HEAD)"
[ "0" -eq "$(git lfs migrate info --above=1mb 2>/dev/null | wc -l)" ]
migrated_head="$(git rev-parse HEAD)"
assert_ref_unmoved "HEAD" "$original_head" "$migrated_head"
)
end_test
begin_test "migrate info (empty set)"
(
set -e
setup_multiple_local_branches
migrate="$(git lfs migrate info \
--include-ref=refs/heads/master \
--exclude-ref=refs/heads/master 2>/dev/null
)"
[ "0" -eq "$(echo -n "$migrate" | wc -l | awk '{ print $1 }')" ]
)
end_test
begin_test "migrate info (--everything)"
(
set -e
setup_multiple_local_branches
git checkout master
original_master="$(git rev-parse refs/heads/master)"
original_feature="$(git rev-parse refs/heads/my-feature)"
diff -u <(git lfs migrate info --everything 2>&1 | tail -n 2) <(cat <<-EOF
*.md 170 B 2/2 files(s) 100%
*.txt 120 B 1/1 files(s) 100%
EOF)
migrated_master="$(git rev-parse refs/heads/master)"
migrated_feature="$(git rev-parse refs/heads/my-feature)"
assert_ref_unmoved "refs/heads/master" "$original_master" "$migrated_master"
assert_ref_unmoved "refs/heads/my-feature" "$original_feature" "$migrated_feature"
)
end_test
begin_test "migrate info (ambiguous reference)"
(
set -e
setup_multiple_local_branches
# Create an ambiguously named reference sharing the name as the SHA-1 of
# "HEAD".
sha="$(git rev-parse HEAD)"
git tag "$sha"
git lfs migrate info --everything
)
end_test
begin_test "migrate info (--everything with args)"
(
set -e
setup_multiple_local_branches
[ "$(git lfs migrate info --everything master 2>&1)" = \
"fatal: cannot use --everything with explicit reference arguments" ]
)
end_test
begin_test "migrate info (--everything with --include-ref)"
(
set -e
setup_multiple_local_branches
[ "$(git lfs migrate info --everything --include-ref=refs/heads/master 2>&1)" = \
"fatal: cannot use --everything with --include-ref or --exclude-ref" ]
)
end_test
exit 0
begin_test "migrate info (--everything with --exclude-ref)"
(
set -e
setup_multiple_local_branches
[ "$(git lfs migrate info --everything --exclude-ref=refs/heads/master 2>&1)" = \
"fatal: cannot use --everything with --include-ref or --exclude-ref" ]
)
end_test
|
######## setenv.sh ########
#
# Set project specific configuration in setenv.sh
#
# Example:
# - Change filemgr URL to http://locatlhost:1234
# FILEMGR_URL=http://locatlhost:1234
#
# - Set custom job directory
# PROJECT_JOB_DIR=/usr/local/project/data/jobs
#
############################
export BIGTRANSLATE_HOME=/usr/local/bigtranslate
export FILEMGR_URL=http://localhost:9000
export WORKFLOW_URL=http://localhost:9001
export RESMGR_URL=http://localhost:9002
export FILEMGR_HOME=$BIGTRANSLATE_HOME/filemgr
export PGE_HOME=$BIGTRANSLATE_HOME/pge
export PCS_HOME=$BIGTRANSLATE_HOME/pcs
export FMPROD_HOME=$BIGTRANSLATE_HOME/tomcat/webapps/fmprod/WEB-INF/classes/
export TIKA_SERVER_CLASSPATH=$BIGTRANSLATE_HOME/tika-server/language-keys/:$BIGTRANSLATE_HOME/tika-server/tika-server-1.13.jar |
#!/bin/bash
# Check Installation supports this example
checkinstall.exe -p install.pkg --nobanner || exit
CROSS=OR1K
make -C application CROSS=${CROSS}
make -C module NOVLNV=1
make -C harness NOVLNV=1
ANSWER=
echo "Do you want to open a browser to see the visualization [y/N]"
read ANSWER
if [ "${ANSWER}" = "y" ]; then
if [ -x /usr/bin/firefox ]; then
(sleep 1; firefox http://localhost:8000) &
elif [ "$(uname)" = "MINGW32_NT-6.1" ]; then
(sleep 1; explorer http://localhost:8000) &
else
echo "Use 'firefox http://localhost:8000' to see visualization"
fi
fi
harness/harness.${IMPERAS_ARCH}.exe \
--program top/u1/cpu1=application/application1.${CROSS}.elf \
--program top/u1/cpu2=application/application2.${CROSS}.elf \
--verbose --output imperas.log \
--stoponcontrolc \
--Q 0.0001 \
\
--httpvis \
$*
|
<filename>src/main/java/org/usfirst/frc157/FRC2019/OutriggerTask.java
/*----------------------------------------------------------------------------*/
/* Copyright (c) 2018 FIRST. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package org.usfirst.frc157.FRC2019;
/**
* Add your docs here.
*/
public class OutriggerTask {
public double speed;
public double position;
public int priority;
public double tolerance;
public boolean landing = false;
public boolean accepted = false;
public boolean finished = false;
public OutriggerTask(double position, int tolerance, int priority, double speed)
{
this.position = position;
this.tolerance = tolerance;
this.priority = priority;
this.speed = speed;
}
public boolean isFinished(int current)
{
return ((position-tolerance)<= current && current <= (position+tolerance));
}
}
|
#!/bin/bash
# inst_gen
rm ./inst_gen/openpose.insts
rm ./inst_gen/params.h
rm ./inst_gen/weight_offset.dat
rm ./inst_gen/bias_offset.dat
# HLS_project
rm -rf ./HLS_project/HLS_kernel/output
rm ./HLS_project/2D*
rm ./HLS_project/common*
rm ./HLS_project/params.h
rm -rf ./HLS_project/pose_prj
# SDx_project
rm ./SDx_project/src/hw_kernel.cpp
rm ./SDx_project/src/hw_kernel0.cpp
rm ./SDx_project/src/params.h
cd ./SDx_project/System
make clean
cd -
# data
rm ./data/bias_reorg.bin
rm ./data/weight_reorg.bin
|
#!/bin/bash
set -euo pipefail
echo -e "Iterating...\n"
nodes=$(kubectl get node --no-headers -o custom-columns=NAME:.metadata.name)
for node in $nodes; do
echo "Node: $node"
kubectl describe node "$node" | sed '1,/Non-terminated Pods/d'
echo
done |
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.domain.order;
import java.sql.Timestamp;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.ofbiz.base.util.Debug;
import org.opentaps.base.entities.OrderShipmentInfoSummary;
import org.opentaps.domain.party.Party;
import org.opentaps.foundation.entity.Entity;
import org.opentaps.foundation.entity.EntityNotFoundException;
import org.opentaps.foundation.repository.RepositoryException;
/**
* Order Item Ship Group entity.
*/
public class OrderItemShipGroup extends org.opentaps.base.entities.OrderItemShipGroup {
private static final String MODULE = OrderItemShipGroup.class.getName();
private Party supplier;
private Timestamp estimatedShipDate;
private List<Shipment> primaryShipments;
private List<OrderItemShipGrpInvRes> shipGroupInventoryReservations;
private List<OrderShipmentInfoSummary> shipmentInfoSummaries;
private Set<Map<String, Object>> groupedShipmentInfoSummaries;
/**
* Default constructor.
*/
public OrderItemShipGroup() {
super();
}
/**
* Get the specification object which contains enumerations and logical checking for Orders.
* @return the <code>OrderSpecificationInterface</code>
*/
public OrderSpecificationInterface getOrderSpecification() {
return getRepository().getOrderSpecification();
}
/**
* Is this order item ship group set with an "unknown" address.
* @return a <code>Boolean</code> value
*/
public Boolean hasUnknownPostalAddress() {
return getOrderSpecification().getUnknownShippingAddress().equals(getContactMechId());
}
/**
* Gets the supplier <code>Party</code> for this order item ship group.
* @return the supplier party domain object
* @exception RepositoryException if an error occurs
*/
public Party getSupplier() throws RepositoryException {
if (supplier == null && getSupplierPartyId() != null) {
try {
supplier = getRepository().getPartyById(getSupplierPartyId());
} catch (EntityNotFoundException e) {
// remove this object supplierPartyId in case we want recall this method
setSupplierPartyId(null);
}
}
return supplier;
}
/**
* Gets the primary shipments for this order item ship group.
* Returns the order domain object instead of the base entity.
* @return list of <code>OrderShipment</code>
* @throws RepositoryException if an error occurs
*/
@Override
public List<Shipment> getPrimaryShipments() throws RepositoryException {
if (primaryShipments == null) {
primaryShipments = getRelated(Shipment.class, "PrimaryShipment");
}
return primaryShipments;
}
/**
* Gets the inventory reservations for this order item ship group.
* Returns the order domain object instead of the base entity.
* @return list of <code>OrderItemShipGrpInvRes</code>
* @throws RepositoryException if an error occurs
*/
@Override
public List<OrderItemShipGrpInvRes> getOrderItemShipGrpInvReses() throws RepositoryException {
if (shipGroupInventoryReservations == null) {
shipGroupInventoryReservations = getRelated(OrderItemShipGrpInvRes.class, "OrderItemShipGrpInvRes");
}
return shipGroupInventoryReservations;
}
/**
* Gets the inventory reservations for this order item ship group.
* This is an alias for {@link #getOrderItemShipGrpInvReses}
* @return list of <code>OrderItemShipGrpInvRes</code>
* @throws RepositoryException if an error occurs
*/
public List<OrderItemShipGrpInvRes> getInventoryReservations() throws RepositoryException {
return this.getOrderItemShipGrpInvReses();
}
/**
* Gets the shipments info summary for this order item ship group.
* @return list of <code>OrderShipmentInfoSummary</code>
* @throws RepositoryException if an error occurs
* @see #getGroupedShipmentInfoSummaries
*/
public List<OrderShipmentInfoSummary> getShipmentInfoSummaries() throws RepositoryException {
if (shipmentInfoSummaries == null) {
shipmentInfoSummaries = getRepository().getRelatedOrderShipmentInfoSummaries(this);
}
return shipmentInfoSummaries;
}
/**
* Gets a subset of the list of <code>OrderShipmentInfoSummary</code>.
* Groups the values by:
* - shipmentPackageSeqId
* - trackingCode
* - boxNumber
* - carrierPartyId
* @return a <code>Set</code> containing the grouped <code>OrderShipmentInfoSummary</code>
* @throws RepositoryException if an error occurs
* @see #getShipmentInfoSummaries
*/
public Set<Map<String, Object>> getGroupedShipmentInfoSummaries() throws RepositoryException {
if (groupedShipmentInfoSummaries == null) {
groupedShipmentInfoSummaries = Entity.getDistinctFieldValues(getShipmentInfoSummaries(), Arrays.asList("shipmentPackageSeqId", "trackingCode", "boxNumber", "carrierPartyId"));
}
return groupedShipmentInfoSummaries;
}
private OrderRepositoryInterface getRepository() {
return OrderRepositoryInterface.class.cast(repository);
}
}
|
#!/bin/bash
if [ $# -ne 1 ]
then
echo "Usage: $0 VERSION"
exit 0
fi
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
MINOR=`awk -F . '{print $2}' <<< $1`
MAJOR=`awk -F . '{print $1}' <<< $1`
cd $DIR/../../..
dch -c debian/changelog -v $1
sed -i -e "s/set(ASTROBEE_VERSION_MAJOR .*)/set(ASTROBEE_VERSION_MAJOR $MAJOR)/g" -e "s/set(ASTROBEE_VERSION_MINOR .*)/set(ASTROBEE_VERSION_MINOR $MINOR)/g" CMakeLists.txt
sed -i -e "s/\# Astrobee Robot Software v1/\# Astrobee Robot Software v1\n\n\#\# Release $1\n\nINSERT DESCRIPTION HERE/g" RELEASE.md
sed -i -e "s/^PROJECT_NUMBER.*/PROJECT_NUMBER = $1/g" freeflyer.doxyfile
$EDITOR RELEASE.md
|
#!/bin/bash
# Debug_GDB_Dhrystone.sh
# Check Environment
if [ -z ${IMPERAS_HOME} ]; then
echo "IMPERAS_HOME not set. Please check environment setup."
exit
fi
${IMPERAS_ISS} --verbose --output imperas.log \
--program ../../../Applications/dhrystone/dhrystone.RISCV32-O0-g.elf \
--processorvendor riscv.ovpworld.org --processorname riscv --variant RV32GCB \
--numprocessors 1 \
--override iss/cpu0/verbose=1 \
--gdbconsole --gdbinit dhrystone.gdb \
"$@"
|
import React, { Component } from "react";
import { markTodo, getIncompleteTodos, getAllTodos } from "../queries";
import { Mutation } from "react-apollo";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { faCheck } from "@fortawesome/free-solid-svg-icons";
import { Button } from "react-bootstrap";
class MarkTodo extends Component {
constructor(props) {
super(props);
this.state = {};
}
marktodo_completed(update_todos) {
update_todos({
variables: this.props,
refetchQueries: [{ query: getIncompleteTodos }, { query: getAllTodos }]
});
}
render() {
return (
<Mutation mutation={markTodo}>
{(update_todos, { data }) => (
<Button
onClick={e => {
e.preventDefault();
this.marktodo_completed(update_todos);
}}
>
<FontAwesomeIcon icon={faCheck} style={{ color: "green" }} />
</Button>
)}
</Mutation>
);
}
}
export default MarkTodo;
|
<gh_stars>0
package com.github.simondan.svl.app.communication;
import android.content.*;
import androidx.security.crypto.*;
import com.github.simondan.svl.communication.auth.*;
import de.adito.ojcms.rest.auth.api.AuthenticationRequest;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.time.Instant;
import java.util.Optional;
import static com.github.simondan.svl.communication.auth.SVLAuthenticationResponse.USER_ROLE;
import static de.adito.ojcms.rest.auth.api.AuthenticationResponse.*;
/**
* @author <NAME>, 16.11.2019
*/
final class SecurePreferencesCredentialsStore implements ICredentialsStore
{
private static final String TOKEN_KEY = "tokenKey";
private static final String USER_MAIL_KEY = "userMailKey";
private static final String NEXT_PASSWORD_KEY = "<PASSWORD>PasswordKey";
private static final String USER_ROLE_KEY = "nextPasswordKey";
private static final String RESTORE_TIMESTAMP_KEY = "restoreTimestampKey";
private static final String MASTER_KEY;
private final SharedPreferences sharedPreferences;
static
{
try
{
MASTER_KEY = MasterKeys.getOrCreate(MasterKeys.AES256_GCM_SPEC);
}
catch (GeneralSecurityException | IOException pE)
{
throw new RuntimeException(pE);
}
}
SecurePreferencesCredentialsStore(Context pContext)
{
try
{
sharedPreferences = EncryptedSharedPreferences.create(
"secret_shared_prefs",
MASTER_KEY,
pContext,
EncryptedSharedPreferences.PrefKeyEncryptionScheme.AES256_SIV,
EncryptedSharedPreferences.PrefValueEncryptionScheme.AES256_GCM
);
}
catch (GeneralSecurityException | IOException pE)
{
throw new RuntimeException(pE);
}
}
@Override
public boolean isUserMailInitialized()
{
return sharedPreferences.contains(USER_MAIL_KEY);
}
@Override
public boolean areCredentialsInitialized()
{
return sharedPreferences.contains(USER_MAIL_KEY) &&
sharedPreferences.contains(NEXT_PASSWORD_KEY) &&
sharedPreferences.contains(TOKEN_KEY);
}
@Override
public String getUserMail()
{
return _read(USER_MAIL_KEY);
}
@Override
public String getActiveToken()
{
return _read(TOKEN_KEY);
}
@Override
public AuthenticationRequest buildAuthenticationRequest()
{
final String userMail = getUserMail();
return new AuthenticationRequest(userMail, _read(NEXT_PASSWORD_KEY));
}
@Override
public EUserRole getUserRole()
{
return EUserRole.valueOf(_read(USER_ROLE_KEY));
}
@Override
public Instant getLastRestoreCodeTimestamp()
{
return Instant.ofEpochMilli(Long.parseLong(_read(RESTORE_TIMESTAMP_KEY)));
}
@Override
public void setUserMail(String pUserMail)
{
final SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(USER_MAIL_KEY, pUserMail);
editor.apply();
}
@Override
public void saveNewAuthData(SVLAuthenticationResponse pAuthenticationResponse)
{
final SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(TOKEN_KEY, pAuthenticationResponse.getValue(TOKEN));
editor.putString(NEXT_PASSWORD_KEY, pAuthenticationResponse.getValue(NEXT_PASSWORD));
editor.putString(USER_ROLE_KEY, pAuthenticationResponse.getValue(USER_ROLE).name());
editor.apply();
}
@Override
public void setLastRestoreCodeTimestamp(Instant pTimestamp)
{
final SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(RESTORE_TIMESTAMP_KEY, String.valueOf(pTimestamp.toEpochMilli()));
editor.apply();
}
@Override
public void reset()
{
final SharedPreferences.Editor editor = sharedPreferences.edit();
editor.remove(USER_MAIL_KEY);
editor.remove(TOKEN_KEY);
editor.remove(NEXT_PASSWORD_KEY);
editor.remove(USER_ROLE_KEY);
editor.apply();
}
private String _read(final String pKey)
{
return Optional.ofNullable(sharedPreferences.getString(pKey, null))
.orElseThrow(() -> new RuntimeException("No value for key " + pKey));
}
}
|
import pandas as pd
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.naive_bayes import MultinomialNB
# read data
df = pd.read_csv('data.csv')
# transform data into features
vectorizer = CountVectorizer()
x = vectorizer.fit_transform(df['text'])
# train Naive Bayes model
clf = MultinomialNB()
clf.fit(x, df['label'])
# predict on unseen data
example = vectorizer.transform(['I am a spam email'])
prediction = clf.predict(example)
print(prediction[0]) |
#!/bin/bash
# Job name:
#SBATCH --job-name=en+de+fr+ko_non_token
#
# Project:
#SBATCH --account=nn9447k
#SBATCH --partition=accel --gres=gpu:1
#
# Wall time limit:
#SBATCH --time=7-00:00:00
#
# Other parameters:
#SBATCH --mem-per-cpu=16G
#SBATCH --ntasks=1
#SBATCH --mail-type=ALL
#SBATCH --mail-user=shifei.chen.2701@student.uu.se
## Set up job environment:
set -o errexit # Exit the script on any error
set -o nounset # Treat any unset variables as an error
module --quiet purge # Reset the modules to the system default
module load PyTorch/1.3.1-fosscuda-2019b-Python-3.7.4
module list
cd $USERWORK
source thesis_env/bin/activate
export DEFAULT_REPORT_PATH="output/reports/{EXP}"
xnmt --dynet-gpu exp_settings/non_token/merged+ko.yaml
|
mkdir texdoclet_output
rm texdoclet_output/TeXDoclet.aux
javadoc -docletpath ../../target/TeXDoclet.jar \
-doclet org.stfm.texdoclet.TeXDoclet \
-noindex \
-hyperref \
-texinit texdoclet_include/preamble.tex \
-texsetup texdoclet_include/setup.tex \
-texintro texdoclet_include/intro.tex \
-texfinish texdoclet_include/finish.tex \
-imagespath ".." \
-output texdoclet_output/TeXDoclet.tex \
-sourcepath ../../src/main/java \
-subpackages org \
-sectionlevel section \
-docclass article \
-nosummaries
cd texdoclet_output
pdflatex TeXDoclet.tex
pdflatex TeXDoclet.tex
mkdir ../javadoc
cp TeXDoclet.pdf ../javadoc
|
####### CSE 312 grading script #######
####### Author : <NAME> (<EMAIL>) #######
####### Winter 2022, CSE 312 @ UW instructed by <NAME> #######
# Description: A script which takes in the grades csv from Gradescope and computes their grades
# according to the syllabus. The late deductions on the psets are already factored in to
# their gradescope records, but Concept Checks are yet have late penalty applied.
# Returns a csv of name, UW id, pset percent, quiz percent, cc percent, and overall percent
# according to the weighting in syllabus
import numpy as np
import gradelib as glib # NOTE: install first with `pip install git+https://github.com/eldridgejm/gradelib`
import pandas as pd
########## read in the records ##########
records = pd.read_csv('~/312_grades.csv')
dict = records.to_dict()
# all the fields we need in the final result
results = {'name' : list() ,
'uwid' : list(),
'pset_per' : list(),
'quiz_per' : list(),
'cc_per' : np.zeros(len(records)).tolist(),
'overall_per': list()
}
for i in range(len(records)):
results['name'].append( records['First Name'][i] + " " + records['Last Name'][i] )
results['uwid'].append( records['Email'][i].split('@')[0])
##################
####### external library #######
#read and create gradebook. We don't allow any lateness fudge on submissions
gradebook = glib.Gradebook.from_gradescope('~/312_grades.csv', lateness_fudge=0)
########## Define the different types of assignments ##########
#Handle Extra Credit (EC)
ecs = gradebook.assignments.containing('extra credit')
gradebook_ec = gradebook.keep_assignments(ecs)
#gradebook without EC problems
gradebook_abs = gradebook.remove_assignments(ecs)
####### Applying late policy to assignmets which don't have
####### late penalty counted (Concept checks / CCs for our case) #######
ccs = gradebook_abs.assignments.containing('concept check')
gradebook_ccs = gradebook_abs.keep_assignments(ccs)
cc_dict = gradebook_ccs.points.fillna(0).to_dict()
cc_max = gradebook_ccs.maximums.to_dict()
max = sum(cc_max.values())
late_dict = gradebook_ccs.late.to_dict()
for assignment in late_dict:
for i,sid in enumerate(late_dict[assignment].keys()):
if (late_dict[assignment][sid]):
results['cc_per'][i] += (cc_dict[assignment][sid]) * 0.5
else:
results['cc_per'][i] += (cc_dict[assignment][sid])
#Handle EC CCs or more EC assignments
ec_dict = gradebook_ec.points.fillna(0).to_dict()
for assignment in ec_dict:
for i,sid in enumerate(ec_dict[assignment]):
results['cc_per'][i] += (ec_dict[assignment][sid])
#get fraction (between 0 and 1) score
for i in range(len(results['cc_per'])):
results['cc_per'][i] = results['cc_per'][i] / max
##################
########## Assignments for which Late penalty is already accounted for or is 0 ###########
# forgiving all lates in other assignments
gradebook_abs = (
gradebook_abs
.forgive_lates(99999999)
)
# Other scores:
psets = gradebook_abs.assignments.containing('pset')
gradebook_psets = gradebook_abs.keep_assignments(psets)
pset_score = gradebook_psets.score(psets).fillna(0).to_list()
quizzes = gradebook_abs.assignments.containing('quiz')
gradebook_quizzes = gradebook_abs.keep_assignments(quizzes)
quiz_score = gradebook_quizzes.score(quizzes).fillna(0).to_list()
for i in range(len(pset_score)):
results['pset_per'].append(pset_score[i])
results['quiz_per'].append(quiz_score[i])
##################
####### Overall percentage/ fraction score #######
for i in range(len(records)):
results['overall_per'].append( results['pset_per'][i] * 0.5 + results['quiz_per'][i] * 0.4 +
results['cc_per'][i] * 0.1 )
##################
####### Save as csv #######
result_csv = pd.DataFrame.from_dict(results)
result_csv.to_csv('~/312_results.csv', index=False)
##################
##########################################
|
#!/usr/bin/env bash
# shellcheck disable=SC2230
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2016-01-22 20:54:53 +0000 (Fri, 22 Jan 2016)
#
# https://github.com/harisekhon/bash-tools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn and optionally send me feedback
#
# https://www.linkedin.com/in/harisekhon
#
# This really only checks basic syntax, if you're made command errors this won't catch it
set -euo pipefail
[ -n "${DEBUG:-}" ] && set -x
srcdir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# shellcheck source=lib/utils.sh
. "$srcdir/lib/utils.sh"
if [ $# -eq 0 ]; then
if [ -z "$(find "${1:-.}" -type f -iname '*.sh')" ]; then
return 0 &>/dev/null || :
exit 0
fi
fi
section "Shell Syntax Checks"
check_shell_syntax(){
echo -n "checking shell syntax: $1 "
if grep -q '#!/bin/bas[h]' "$1"; then
# quotes in middle disrupt warning on our own script
echo "WARNING: '#!""/bin/bash' detected, consider using '#!/usr/bin/env bash' instead"
fi
bash -n "$1"
if type -P shellcheck &>/dev/null; then
local basename="${1##*/}"
local dirname
dirname="$(dirname "$1")"
# this allows following source hints relative to the source file to be safe to run from any $PWD
if ! pushd "$dirname" &>/dev/null; then
echo "ERROR: failed to pushd to $dirname"
exit 1
fi
# -x allows to follow source hints for files not given as arguments
shellcheck -x "$basename" || :
if ! popd &>/dev/null; then
echo "ERROR: failed to popd from $dirname"
fi
fi
echo "=> OK"
}
recurse_dir(){
for x in $(find "${1:-.}" -type f -iname '*.sh' | sort); do
isExcluded "$x" && continue
[[ "$x" =~ ${EXCLUDED:-} ]] && continue
check_shell_syntax "$x"
done
}
start_time="$(start_timer)"
if ! type -P shellcheck &>/dev/null; then
echo "WARNING: shellcheck not installed, will only do basic checks"
echo
fi
if [ $# -gt 0 ]; then
for x in "$@"; do
if [ -d "$x" ]; then
recurse_dir "$x"
else
check_shell_syntax "$x"
fi
done
else
recurse_dir .
fi
time_taken "$start_time"
section2 "All Shell programs passed syntax check"
echo
|
<filename>posuto-server/src/models/PostModel.js
import mongoose from 'mongoose';
const PositionSchema = new mongoose.Schema({
width: Number,
height: Number,
x: Number,
y: Number,
z: Number,
});
const postSchema = new mongoose.Schema(
{
title: {
type: String,
trim: true,
maxlength: 50,
},
contents: String,
position: PositionSchema,
isDraggable: {
type: Boolean,
default: true,
},
createdBy: {
type: mongoose.SchemaTypes.ObjectId,
ref: 'Users',
required: true,
},
},
{ timestamps: true },
);
// postSchema.index({ Users: 1, title: 1 }, { unique: true });
const PostModel = mongoose.model('Posts', postSchema);
export default PostModel;
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.01.22 at 02:23:57 PM MST
//
package net.opengis.cat.csw._202;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
* Requests the actual values of some specified request parameter
* or other data element.
*
* <p>Java class for GetDomainType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="GetDomainType">
* <complexContent>
* <extension base="{http://www.opengis.net/cat/csw/2.0.2}RequestBaseType">
* <sequence>
* <choice>
* <element name="PropertyName" type="{http://www.w3.org/2001/XMLSchema}anyURI"/>
* <element name="ParameterName" type="{http://www.w3.org/2001/XMLSchema}anyURI"/>
* </choice>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "GetDomainType", propOrder = {
"propertyName",
"parameterName"
})
public class GetDomainType
extends RequestBaseType
{
@XmlElement(name = "PropertyName")
@XmlSchemaType(name = "anyURI")
protected String propertyName;
@XmlElement(name = "ParameterName")
@XmlSchemaType(name = "anyURI")
protected String parameterName;
/**
* Gets the value of the propertyName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPropertyName() {
return propertyName;
}
/**
* Sets the value of the propertyName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPropertyName(String value) {
this.propertyName = value;
}
/**
* Gets the value of the parameterName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getParameterName() {
return parameterName;
}
/**
* Sets the value of the parameterName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setParameterName(String value) {
this.parameterName = value;
}
}
|
import re
def extract_css_paths(html_file):
with open(html_file, 'r') as file:
html_content = file.read()
css_paths = re.findall(r'<link.*?href=["\']([^"\']+)["\'].*?type=["\']text/css["\'].*?>', html_content)
unique_css_paths = list(set(css_paths))
for path in unique_css_paths:
print(path)
# Example usage
extract_css_paths('index.html') |
package com.github.robindevilliers.cascade.annotations;
import com.github.robindevilliers.cascade.Completeness;
import java.lang.annotation.*;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Inherited
public @interface CompletenessLevel {
Completeness value();
} |
import React from 'react';
interface ITableHeadItemProps {
children?: string;
isAction?: boolean;
}
export const TableHeadItem = ({
children = '',
isAction = false,
}: ITableHeadItemProps): JSX.Element => {
if (isAction) {
return <th className="px-6 py-3 bg-gray-50" aria-label="Actions" />;
}
return (
<th
className="px-6 py-3 bg-gray-50 text-left text-xs leading-4 font-medium text-gray-500 uppercase tracking-wider max-w-xs"
aria-label={children}
>
{children}
</th>
);
};
|
import requests
from bs4 import BeautifulSoup
# make a get request to fetch the contents of the page
r = requests.get('https://example.com')
# parse the html content
soup = BeautifulSoup(r.content, 'html.parser')
# print out the result
print(soup.prettify()) |
class ReplacePrisonsWithTableThatUsesUuidPrimaryKey < ActiveRecord::Migration[4.2]
def change
create_table :prisons, id: :uuid do |t|
t.string :name, null: false
t.string :nomis_id, limit: 3, null: false
t.boolean :enabled, default: true, null: false
t.integer :booking_window, default: 28, null: false
t.text :address
t.string :estate
t.string :email_address
t.string :phone_no
t.json :slot_details, default: {}, null: false
t.timestamps null: false
end
end
end
|
<reponame>premss79/zignaly-webapp
import React from "react";
import { Helmet } from "react-helmet";
import { Provider } from "react-redux";
import { store } from "./src/store/store.js";
export default ({ element }) => {
// Instantiating store in `wrapRootElement` handler ensures:
// - there is fresh store for each SSR page
// - it will be called only once in browser, when React mounts
// Add notranslate meta to prevent Chrome translation tool mutating React virtual DOM.
// See: https://github.com/facebook/react/issues/11538
return (
<Provider store={store}>
<Helmet>
<meta content="notranslate" name="google" />
</Helmet>
{element}
</Provider>
);
};
|
#!/bin/bash
# Copyright 2018 SMF Authors
#
set -e
if [ -n "${CI}" ]; then
set -x
fi
# shellcheck disable=SC1091
source /etc/os-release
function apt_gcc {
local gccver=$1
local cc="gcc-${gccver}"
local cxx="g++-${gccver}"
apt-get install -y "${cc}" "${cxx}"
if [ -n "${CI}" ]; then
update-alternatives \
--install /usr/bin/gcc gcc "/usr/bin/${cc}" 800 \
--slave /usr/bin/g++ g++ "/usr/bin/${cxx}"
fi
}
function debs() {
apt-get update -y
local extra=""
if [ -n "${USE_CLANG}" ]; then
extra=clang
else
# ensure gcc is installed so we can test its version
apt-get install -y build-essential
if ! command -v add-apt-repository; then
apt-get -y install software-properties-common
fi
apt-get update -y
if [ "$(apt-cache search '^gcc-9$' | awk '{print $1}')" == "gcc-9" ]; then
apt_gcc 9
else
gcc_ver=$(gcc -dumpfullversion -dumpversion)
if dpkg --compare-versions "${gcc_ver}" lt 8.0; then
# as of may 29, 2019, ubuntu:disco did not have this ppa enabled
add-apt-repository -y ppa:ubuntu-toolchain-r/test
apt_gcc 8
fi
fi
fi
if [ "${UBUNTU_CODENAME}" == "xenial" ] && [ -n "${CI}" ]; then
cmake_version="3.14.0-rc2"
cmake_full_name="cmake-${cmake_version}-Linux-x86_64.sh"
apt-get install -y wget
wget https://github.com/Kitware/CMake/releases/download/v${cmake_version}/${cmake_full_name} -O /tmp/${cmake_full_name}
chmod +x /tmp/${cmake_full_name}
/tmp/${cmake_full_name} --skip-license --prefix=/usr
else
apt-get install -y cmake
fi
apt-get install -y \
build-essential \
libtool \
m4 \
ninja-build \
automake \
pkg-config \
xfslibs-dev \
systemtap-sdt-dev \
valgrind \
ragel ${extra}
}
function rpms() {
yumdnf="yum"
if command -v dnf > /dev/null; then
yumdnf="dnf"
fi
case ${ID} in
centos|rhel)
MAJOR_VERSION="$(echo "$VERSION_ID" | cut -d. -f1)"
$SUDO yum-config-manager --add-repo https://dl.fedoraproject.org/pub/epel/"$MAJOR_VERSION"/x86_64/
$SUDO yum install --nogpgcheck -y epel-release
$SUDO rpm --import /etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-"$MAJOR_VERSION"
$SUDO rm -f /etc/yum.repos.d/dl.fedoraproject.org*
if test "$ID" = centos -a "$MAJOR_VERSION" = 7 ; then
yum install -y centos-release-scl
yum install -y devtoolset-8
dts_ver=8
fi
;;
esac
if [ -n "${USE_CLANG}" ]; then
extra=clang
fi
cmake="cmake"
case ${ID} in
centos|rhel)
MAJOR_VERSION="$(echo "$VERSION_ID" | cut -d. -f1)"
if test "$MAJOR_VERSION" = 7 ; then
cmake="cmake3"
fi
esac
${yumdnf} install -y \
${cmake} \
gcc-c++ \
ninja-build \
m4 \
libtool \
make \
ragel \
xfsprogs-devel \
systemtap-sdt-devel \
libasan \
libubsan \
libatomic \
valgrind-devel \
doxygen ${extra}
if [ -n "$dts_ver" ]; then
if test -t 1; then
# interactive shell
cat <<EOF
Your GCC is too old. Please run following command to add DTS to your environment:
scl enable devtoolset-8 bash
Or add following line to the end of ~/.bashrc to add it permanently:
source scl_source enable devtoolset-8
see https://www.softwarecollections.org/en/scls/rhscl/devtoolset-8/ for more details.
EOF
else
# non-interactive shell
# shellcheck disable=SC1090
source /opt/rh/devtoolset-"$dts_ver"/enable
fi
fi
}
case $ID in
debian|ubuntu|linuxmint)
debs
;;
centos|fedora)
rpms
;;
*)
echo "$ID not supported. Install dependencies manually."
exit 1
;;
esac
|
# Install OVOS Skills
echo "**** Installing Skills ****"
mkdir -p /home/mycroft/.local/share/mycroft/skills/
(cd /home/mycroft/.local/share/mycroft/skills && git clone https://github.com/OpenVoiceOS/skill-ovos-setup skill-ovos-setup.openvoiceos)
(cd /home/mycroft/.local/share/mycroft/skills/skill-ovos-pairing.openvoiceos && pip3 install -r requirements.txt)
(cd /home/mycroft/.local/share/mycroft/skills && git clone https://github.com/OpenVoiceOS/skill-ovos-mycroftgui skill-ovos-mycroftgui.openvoiceos)
(cd /home/mycroft/.local/share/mycroft/skills/skill-ovos-mycroftgui.openvoiceos && pip3 install -r requirements.txt)
(cd /home/mycroft/.local/share/mycroft/skills && git clone https://github.com/OpenVoiceOS/skill-ovos-homescreen skill-ovos-homescreen.openvoiceos)
(cd /home/mycroft/.local/share/mycroft/skills/skill-ovos-homescreen.openvoiceos && pip3 install -r requirements.txt)
(cd /home/mycroft/.local/share/mycroft/skills && git clone https://github.com/OpenVoiceOS/skill-balena-wifi-setup skill-balena-wifi-setup.openvoiceos)
(cd /home/mycroft/.local/share/mycroft/skills/skill-balena-wifi-setup.openvoiceos && pip3 install -r requirements.txt)
(cd /home/mycroft/.local/share/mycroft/skills && git clone https://github.com/OpenVoiceOS/skill-weather skill-weather.openvoiceos)
(cd /home/mycroft/.local/share/mycroft/skills/skill-weather.openvoiceos && pip3 install -r requirements.txt)
(cd /home/mycroft/.local/share/mycroft/skills && git clone https://github.com/MycroftAI/skill-date-time skill-date-time.mycroftai)
(cd /home/mycroft/.local/share/mycroft/skills/skill-date-time.mycroftai && pip3 install -r requirements.txt)
(cd /home/mycroft/.local/share/mycroft/skills && git clone https://github.com/OpenVoiceOS/ovos-skills-info ovos-skills-info.openvoiceos)
(cd /home/mycroft/.local/share/mycroft/skills && git clone https://github.com/OpenVoiceOS/skill-ovos-timer skill-ovos-timer.openvoiceos)
(cd /home/mycroft/.local/share/mycroft/skills/skill-ovos-timer.openvoiceos && pip3 install -r requirements.txt)
(cd /home/mycroft/.local/share/mycroft/skills && git clone https://github.com/OpenVoiceOS/skill-camera skill-camera.openvoiceos)
chown -R 32011:32011 /home/mycroft/.local/share/mycroft/skills/
chown -R 32011:32011 /home/mycroft/.mycroft
chown -R 32011:32011 /home/mycroft/
chmod -R ugo+x /usr/local/sbin
chmod -R ugo+x /opt/ovos
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.