text
stringlengths 1
1.05M
|
|---|
/** @module ecs/raw/emitter/Emitter */
import { initFromConfig } from "/scripts/util/config.js"
export default class Emitter {
static defaultCreate = () => null
static defaultTimer = null
static defaultEnabled = true
constructor(config = {}) {
initFromConfig(this, config, Emitter, "create", "timer", "enabled")
}
emitIfReady(dt, ts) {
this.update(dt, ts)
if (this.ready) {
this.timer?.reset()
return this.emit(dt, ts)
}
return null
}
update(dt) {
this.timer?.update(dt)
return this
}
emit(dt, ts) {
return this.create?.(dt, ts)
}
get ready() {
return this.enabled && (this.timer?.expired ?? true)
}
}
|
<filename>backend/tests/wallet_tests/services/system/test_sync_until_specific_version.py
from tests.wallet_tests.services.system.utils import (
check_balance,
add_incoming_transaction_to_blockchain,
add_outgoing_transaction_to_blockchain,
check_number_of_transactions,
)
from tests.wallet_tests.services.system.utils import (
setup_outgoing_transaction,
setup_incoming_transaction,
setup_inventory_with_initial_transaction,
)
from wallet.services.system import sync_db
from wallet.storage import Transaction
OTHER_ADDRESS_1 = "257e50b131150fdb56aeab4ebe4ec2b9"
OTHER_ADDRESS_2 = "176b73399b04d9231769614cf22fb5df"
OTHER_ADDRESS_3 = "0498D148D9A4DCCF893A480B32FF08DA"
OTHER_ADDRESS_4 = "A95A3513300B2C8C1F530CF17D6819F7"
OTHER_ADDRESS_5 = "D5AD1B71EFD4EE463BAF01FC7F281A8B"
SUB_ADDRESS_1 = "8e298f642d08d1af"
SUB_ADDRESS_2 = "a4d5bd88ec5be7a8"
SUB_ADDRESS_3 = "3b3b97168de2f9de"
SUB_ADDRESS_4 = "c59a28326b9caa2a"
SUB_ADDRESS_5 = "6e17b494e79dab75"
def test_sync_until_specific_version(patch_blockchain):
"""
Setup:
DB:
1. inventory account with 1 incoming initial transaction of 1000 coins
2. 2 users accounts with outgoing transaction
3. 1 user account with incoming transaction --> highest version - 10139
Blockchain:
1. 1 inventory incoming transaction
2. 2 users incoming transactions --> 1 transaction with higher version from DB highest version (10151)
3. 3 users outgoing transactions --> 1 transaction with lower version from DB highest version (10132)
Action: sync_db() expected:
1. Add transaction with version 4 into LRW DB
2. Remove transaction with version 5 from LRW DB
"""
setup_inventory_with_initial_transaction(
patch_blockchain, 1000, mock_blockchain_initial_balance=880
)
NO_CHANGE_VERSION_1 = 10131
setup_outgoing_transaction(
patch_blockchain=patch_blockchain,
sender_sub_address=SUB_ADDRESS_1,
amount=100,
receiver_address=OTHER_ADDRESS_1,
sequence=0,
version=NO_CHANGE_VERSION_1,
name="test_account",
)
NO_CHANGE_VERSION_2 = 10137
setup_outgoing_transaction(
patch_blockchain=patch_blockchain,
sender_sub_address=SUB_ADDRESS_2,
amount=75,
receiver_address=OTHER_ADDRESS_2,
sequence=1,
version=NO_CHANGE_VERSION_2,
name="test_account_2",
)
HIGHEST_VERSION_IN_DB = 10139
setup_incoming_transaction(
patch_blockchain=patch_blockchain,
receiver_sub_address=SUB_ADDRESS_3,
amount=80,
sender_address=OTHER_ADDRESS_3,
sequence=12,
version=HIGHEST_VERSION_IN_DB,
name="test_account_3",
)
HIGHER_THAN_DB_HIHEST_VERSION = 10151
add_incoming_transaction_to_blockchain(
patch_blockchain,
SUB_ADDRESS_4,
25,
OTHER_ADDRESS_4,
7,
HIGHER_THAN_DB_HIHEST_VERSION,
)
ADDED_VERSION = 10132
add_outgoing_transaction_to_blockchain(
patch_blockchain,
SUB_ADDRESS_5,
50,
OTHER_ADDRESS_5,
2,
ADDED_VERSION,
)
check_balance(905)
check_number_of_transactions(4)
sync_db()
check_number_of_transactions(5)
check_balance(855)
assert (
Transaction.query.filter_by(blockchain_version=NO_CHANGE_VERSION_1).first()
is not None
)
assert (
Transaction.query.filter_by(blockchain_version=NO_CHANGE_VERSION_2).first()
is not None
)
assert (
Transaction.query.filter_by(blockchain_version=HIGHEST_VERSION_IN_DB).first()
is not None
)
assert (
Transaction.query.filter_by(blockchain_version=ADDED_VERSION).first()
is not None
)
assert (
Transaction.query.filter_by(
blockchain_version=HIGHER_THAN_DB_HIHEST_VERSION
).first()
is None
)
|
package io.opensphere.controlpanels.layers.groupby;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.impl.GroupCategorizer;
/**
* The Class GroupByAvailableActiveLayersTreeBuilder.
*/
public class GroupByAvailableActiveLayersTreeBuilder extends GroupByDefaultTreeBuilder
{
@Override
public String getGroupByName()
{
return "Active";
}
@Override
public GroupCategorizer getGroupCategorizer()
{
return new GroupCategorizer()
{
@Override
public List<String> getAllCategories()
{
List<String> allCats = New.list(getCategories());
Collections.sort(allCats, new Comparator<String>()
{
@Override
public int compare(String o1, String o2)
{
return o1.toUpperCase().compareTo(o2.toUpperCase());
}
});
return allCats;
}
@Override
public Set<String> getGroupCategories(DataGroupInfo dgi)
{
String category = dgi.activationProperty().isActiveOrActivating() ? "Active" : "Inactive";
getCategories().add(category);
return Collections.singleton(category);
}
@Override
public Set<String> getTypeCategories(DataTypeInfo dti)
{
return Collections.emptySet();
}
};
}
}
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=/Users/rxlabz/dev/tools/flutter"
export "FLUTTER_APPLICATION_PATH=/Users/rxlabz/dev/projects/audioplayer2/audioplayer/audioplayer"
export "FLUTTER_BUILD_DIR=build"
export "FLUTTER_FRAMEWORK_DIR=/Users/rxlabz/dev/tools/flutter/bin/cache/artifacts/engine/darwin-x64"
export "FLUTTER_BUILD_NAME=0.6.0"
export "FLUTTER_BUILD_NUMBER=0.6.0"
|
-- --------------------------------------------------------
-- 主机: 192.168.215.70
-- 服务器版本: 5.5.18 - MySQL Community Server (GPL)
-- 服务器操作系统: Win32
-- HeidiSQL 版本: 9.3.0.4984
-- --------------------------------------------------------
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET NAMES utf8mb4 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-- 导出 表 newaudimedia.media 结构
CREATE TABLE IF NOT EXISTS `media` (
`media_name` varchar(200) COLLATE utf8_unicode_ci NOT NULL COMMENT '媒体名称',
`media_category` varchar(200) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '媒体大类',
`media_type` varchar(200) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '媒体类别',
`region` varchar(200) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '区域',
`city` varchar(200) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '城市',
`media_level` varchar(200) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '媒体级别',
`media_impact_index` varchar(200) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT '媒体影响力指数',
`updated_at` datetime DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`uid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci COMMENT='媒体';
-- 数据导出被取消选择。
/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */;
/*!40014 SET FOREIGN_KEY_CHECKS=IF(@OLD_FOREIGN_KEY_CHECKS IS NULL, 1, @OLD_FOREIGN_KEY_CHECKS) */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
|
<gh_stars>10-100
package com.telenav.osv.upload.operation;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.util.Consumer;
import androidx.core.util.Pair;
import com.telenav.osv.common.event.SimpleEventBus;
import com.telenav.osv.data.score.model.ScoreHistory;
import com.telenav.osv.data.sequence.model.details.SequenceDetails;
import com.telenav.osv.data.sequence.model.details.reward.SequenceDetailsRewardBase;
import com.telenav.osv.data.sequence.model.details.reward.SequenceDetailsRewardPoints;
import com.telenav.osv.item.KVFile;
import com.telenav.osv.network.KVApi;
import com.telenav.osv.network.model.generic.ResponseNetworkBase;
import com.telenav.osv.network.model.metadata.ResponseModelUploadMetadata;
import com.telenav.osv.network.util.NetworkRequestConverter;
import com.telenav.osv.upload.progress.model.UploadUpdateDisk;
import com.telenav.osv.upload.progress.model.UploadUpdateProgress;
import com.telenav.osv.utils.Log;
import com.telenav.osv.utils.StringUtils;
import com.telenav.osv.utils.Utils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.Map;
import io.reactivex.Completable;
import okhttp3.MultipartBody;
import okhttp3.RequestBody;
/**
* The operation which will upload a metadata file to the network.
* <p> This will generate a stream via {@link #getStream()} method which is the only public entry point to this operation.
* @author horatiuf
* @see UploadOperationBase
* @see #getStream()
*/
public class UploadOperationMetadata extends UploadOperationBase {
/**
* Metadata throwable message for when the file was not found when creating the stream before network call.
*/
public static final String THROWABLE_MESSAGE_METADATA_FILE_NOT_FOUND = "Metadata file not found";
/**
* Identifier for the current class.
*/
private static final String TAG = UploadOperationMetadata.class.getSimpleName();
/**
* The name used for encoding a {@code ScoreHistory} coverage.
*/
private static final String SCORE_HISTORY_NAME_COVERAGE = "coverage";
/**
* The name used for encoding a {@code ScoreHistory} photo count.
*/
private static final String SCORE_HISTORY_NAME_PHOTO_COUNT = "photo";
/**
* The name used for encoding a {@code ScoreHistory} obd photo count.
*/
private static final String SCORE_HISTORY_NAME_OBD_PHOTO_COUNT = "obdPhoto";
/**
* The name used for encoding a {@code Sequence} metadata file compressed by zip.
*/
private static final String METADATA_NAME_ZIP = "track.txt.gz";
/**
* The name used for encoding a {@code Sequence} metadata file compressed by txt.
*/
private static final String METADATA_NAME_TXT_FILE = "track.txt";
/**
* The name used for encoding a {@code Sequence} metadata file in a request body of type multipart body.
*/
private static final String METADATA_NAME_MULTI_PART_BODY = "metaData";
/**
* The value for when the sequence was recorded with obd required by the network in this format.
*/
private static final int VALUE_OBD_TRUE = 1;
/**
* The value for when the obd was <i>not</i> recorded required by the network in this format.
*/
private static final int VALUE_OBD_FALSE = 0;
/**
* The source representing from where the network operation is made for identification purposes.
*/
private final String SOURCE = "Android";
/**
* The OS version required for the network operation.
*/
private final String OS_VERSION = Build.VERSION.RELEASE;
/**
* Reference to the metadata file used in both pre and post request processing.
*/
private KVFile metadataFile;
/**
* Action which represent specific handling cases to be performed on success behaviour.
*/
private Consumer<Long> consumerSuccess;
/**
* The reward information of the sequence, used for networking information passing.
* @see SequenceDetailsRewardBase
*/
@Nullable
private SequenceDetailsRewardBase rewardBase;
/**
* The details of the sequence, used for networking information processing.
* @see SequenceDetails
*/
private SequenceDetails details;
/**
* The sequence folder which will be used for metadata file search.
*/
private KVFile sequenceFolder;
/**
* Default constructor for the current class.
*/
UploadOperationMetadata(@NonNull String accessToken,
@NonNull KVApi api,
@NonNull KVFile sequenceFolder,
@NonNull SequenceDetails details,
@NonNull SimpleEventBus eventBus,
@Nullable SequenceDetailsRewardBase rewardBase,
@Nullable Consumer<Long> consumerSuccess,
@Nullable Consumer<Throwable> consumerError) {
super(accessToken, api, eventBus, consumerError);
this.details = details;
this.rewardBase = rewardBase;
this.consumerSuccess = consumerSuccess;
this.sequenceFolder = sequenceFolder;
this.consumerError = consumerError;
}
/**
* @return {@code Completable} composed of:
* <ul>
* <li> network call by using internally {@link #uploadMetadataStream()}</li>
* <li> error logging and handling </li>
* </ul>
*/
public Completable getStream() {
return Completable.defer(this::uploadMetadataStream)
.doOnError(throwable -> Log.d(TAG, String.format("getStream. Status: error. Message: %s.", throwable.getLocalizedMessage())));
}
@Override
public void dispose() {
}
/**
* @return Completable which will process for complete either:
* <ul>
* <li> nothing since the id is already set</li>
* <li> process of data for the upload metadata network request
* {@link KVApi#createSequence(RequestBody, RequestBody, RequestBody, RequestBody, RequestBody, RequestBody, RequestBody, RequestBody, RequestBody, MultipartBody.Part)}.
* <p>On success the response will be processed internally via {@link #requestOnlineIdAndMetadataUploadNetworkCompletable(String, String, MultipartBody.Part)} method.
* </li>
* </ul>
*/
private Completable uploadMetadataStream() {
long onlineSequenceId = details.getOnlineId();
//checks if the online id is set or not, in case it is it will return a complete without any processing
if (SequenceDetails.ONLINE_ID_NOT_SET != onlineSequenceId) {
Log.d(TAG, "uploadMetadataStream. Status: found online id. Message: Returning on complete. Nothing will be performed.");
removeMetadataFileIfExists();
return Completable.complete();
}
//get the metadata part
MultipartBody.Part metadataPart = getMetadataPart();
//if metadata file does not exist there is no point in calling the network, therefore there will be signaled an error.
if (metadataPart == null) {
Throwable throwable = new Throwable(THROWABLE_MESSAGE_METADATA_FILE_NOT_FOUND);
if (consumerError != null) {
consumerError.accept(throwable);
}
return Completable.error(throwable);
}
Pair<String, String> scoreData = getScoreData(rewardBase);
return requestOnlineIdAndMetadataUploadNetworkCompletable(scoreData.first, scoreData.second, metadataPart);
}
/**
* @param score the score in {@code String} format.
* @param encodedScore the encoded score which is required by the network request.
* @param metadataPart the metadata part of the request which will be uploaded to the server.
* @return the {@code Completable} which will process the api request and the response for online id and upload metadata.
* <p> This will in turn also implement a retry mechanism for when the request will signal an error by having a finite retry given by the {@link #RETRIES_LIMIT} field from
* the base class.</p>
*/
private Completable requestOnlineIdAndMetadataUploadNetworkCompletable(String score, String encodedScore, MultipartBody.Part metadataPart) {
return api.createSequence(
NetworkRequestConverter.generateTextRequestBody(accessToken),
NetworkRequestConverter.generateTextRequestBody(SOURCE),
NetworkRequestConverter.generateTextRequestBody(getLocationAsString(details.getInitialLocation())),
NetworkRequestConverter.generateTextRequestBody(String.valueOf(details.isObd() ? VALUE_OBD_TRUE : VALUE_OBD_FALSE)),
NetworkRequestConverter.generateTextRequestBody(score),
NetworkRequestConverter.generateTextRequestBody(SOURCE),
NetworkRequestConverter.generateTextRequestBody(OS_VERSION),
NetworkRequestConverter.generateTextRequestBody(details.getAppVersion()),
NetworkRequestConverter.generateTextRequestBody(encodedScore),
metadataPart)
.flatMapCompletable(response -> handleDefaultResponse(response, handleUploadMetadataSuccessResponse(), null))
.retryWhen(this::handleDefaultRetryFlowableWithTimer);
}
/**
* Process the success response by calling {@link #consumerSuccess} if set and {@link #removeMetadataFileIfExists()} internally.
*/
private Consumer<ResponseNetworkBase> handleUploadMetadataSuccessResponse() {
return (response) -> {
if (consumerSuccess != null) {
ResponseModelUploadMetadata responseModelUploadMetadata = (ResponseModelUploadMetadata) response;
consumerSuccess.accept(responseModelUploadMetadata.osv.sequence.id);
}
removeMetadataFileIfExists();
};
}
/**
* Remove the metadata file in exists. This will be performed before a {@link Completable#complete()} manual callback.
* <p> The method will internally call progress updates with the size removed from the disk in case the remove of the file is successful.</p>
*/
private void removeMetadataFileIfExists() {
//loads the file for metadata which is in zip format
loadMetadataFileIntoMemory();
//if metadata file exists remove and send a progress update
if (metadataFile.exists()) {
long metadataSize = Utils.fileSize(metadataFile);
boolean metadataFileRemove = metadataFile.delete();
Log.d(TAG,
String.format("removeMetadataFileIfExists. Status : %s. Message: Remove physical metadata file from the device. Size: %s.", metadataFileRemove, metadataSize));
updateEventBus.post(new UploadUpdateDisk(metadataSize));
updateEventBus.post(new UploadUpdateProgress(metadataSize, metadataSize));
}
}
/**
* Loads either the zip or the txt file of the metadata into memory in case it is not already
*/
private void loadMetadataFileIntoMemory() {
if (metadataFile != null && metadataFile.exists()) {
return;
}
metadataFile = new KVFile(sequenceFolder, METADATA_NAME_ZIP);
if (!metadataFile.exists()) {
metadataFile = new KVFile(sequenceFolder, METADATA_NAME_TXT_FILE);
}
}
/**
* @param rewardBase the sequence reward information from which the reward will be extracted.
* @return the score data in both normal (simple String) and encoded format.
*/
private Pair<String, String> getScoreData(SequenceDetailsRewardBase rewardBase) {
int score = 0;
String encodedScore = StringUtils.EMPTY_STRING;
if (rewardBase != null) {
score = (int) rewardBase.getValue();
if (rewardBase instanceof SequenceDetailsRewardPoints) {
SequenceDetailsRewardPoints SequenceDetailsRewardPoints = (SequenceDetailsRewardPoints) rewardBase;
Map<Integer, ScoreHistory> scoreHistory = SequenceDetailsRewardPoints.getScoreHistory();
if (scoreHistory != null) {
encodedScore = getEncodedScoreHistory(scoreHistory);
}
}
}
return new Pair<>(String.valueOf(score), encodedScore);
}
/**
* @return {@code MultipartBody.Part} representing the part of the request which create for the metadata file a request body or null if the metadata file does not exist.
*/
@Nullable
private MultipartBody.Part getMetadataPart() {
//loads the file for metadata which is in zip format
loadMetadataFileIntoMemory();
//creates the media type network body which process a zip file
if (metadataFile.exists()) {
@NetworkRequestConverter.MediaTypesDef String mediaType =
metadataFile.getName().equals(METADATA_NAME_ZIP) ?
NetworkRequestConverter.REQUEST_MEDIA_TYPE_ZIP :
NetworkRequestConverter.REQUEST_MEDIA_TYPE_PLAIN_TEXT;
//creates the media type network body which process a txt file
return NetworkRequestConverter.generateMultipartBodyPart(mediaType, METADATA_NAME_MULTI_PART_BODY, metadataFile, null);
}
return null;
}
/**
* @param histories the score histories which will be encoded via a json array into a string. Each history will represent on entry in the array.
* @return {@code String} which is internally a Json array object with score histories mapped by network specific required fields.
*/
private String getEncodedScoreHistory(Map<Integer, ScoreHistory> histories) {
JSONArray array = new JSONArray();
for (ScoreHistory history : histories.values()) {
JSONObject obj = new JSONObject();
try {
obj.put(SCORE_HISTORY_NAME_COVERAGE, String.valueOf(history.getCoverage()));
obj.put(SCORE_HISTORY_NAME_PHOTO_COUNT, String.valueOf(history.getPhotoCount()));
obj.put(SCORE_HISTORY_NAME_OBD_PHOTO_COUNT, String.valueOf(history.getObdPhotoCount()));
} catch (JSONException e) {
e.printStackTrace();
Log.d(TAG, String.format("getEncodedScoreHistory. Status: error. Message: %s.", e.getLocalizedMessage()));
}
array.put(obj);
}
return array.toString();
}
}
|
package com.infamous.zod.storage.repository.impl;
import com.infamous.framework.persistence.DataStoreManager;
import com.infamous.framework.persistence.dao.AbstractDAO;
import com.infamous.framework.persistence.dao.EntityDAO;
import com.infamous.zod.storage.model.StorageFile;
import com.infamous.zod.storage.model.StorageFileKey;
import com.infamous.zod.storage.repository.StorageFileDAO;
import com.infamous.zod.storage.repository.StorageFileDataStore;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.TypedQuery;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@SuppressWarnings("unchecked")
@Component
public class StorageFileDAOImpl extends AbstractDAO<StorageFile, StorageFileKey> implements
EntityDAO<StorageFile, StorageFileKey>, StorageFileDAO {
@Autowired
public StorageFileDAOImpl(DataStoreManager dataStoreManager) {
super(dataStoreManager, StorageFile.class, StorageFileDataStore.DS_NAME);
}
@Override
public List<StorageFile> findById(List<StorageFileKey> storageFileKeys) {
TypedQuery<Object[]> q = (TypedQuery<Object[]>) createNativeQuery(
"SELECT id, fileName, enabled FROM StorageFile WHERE id IN (:ids)");
q.setParameter("ids", storageFileKeys.stream().map(StorageFileKey::getId).collect(Collectors.toList()));
List<Object[]> list = q.getResultList();
return list.stream()
.map(this::buildStoreFile)
.collect(Collectors.toList());
}
@Override
public List<StorageFile> findAll() {
List<Object[]> objects = (List<Object[]>) findByNativeQuery(
"SELECT id, fileName, enabled FROM StorageFile WHERE enabled = 1");
return objects.stream()
.map(this::buildStoreFile)
.collect(Collectors.toList());
}
@Override
public StorageFile findByChecksum(String checksum) {
TypedQuery<Object[]> q = (TypedQuery<Object[]>) createNativeQuery(
"SELECT DISTINCT id, fileName, enabled, checksum FROM StorageFile WHERE checksum = (:checksum)");
q.setParameter("checksum", checksum);
Object[] objects = q.getSingleResult();
return buildStoreFile(objects);
}
@Override
public StorageFile findByFileName(String fileName) {
TypedQuery<Object[]> q = (TypedQuery<Object[]>) createNativeQuery(
"SELECT DISTINCT id, fileName, enabled FROM StorageFile WHERE fileName = (:fileName)");
q.setParameter("fileName", fileName);
Object[] objects = q.getSingleResult();
return buildStoreFile(objects);
}
private StorageFile buildStoreFile(Object[] objArr) {
StorageFile res = StorageFile.builder()
.id((String) objArr[0])
.fileName((String) objArr[1])
.enabled((Boolean) objArr[2])
.build();
if (objArr.length > 3) {
res.setChecksum((String) objArr[3]);
}
return res;
}
}
|
<reponame>axinc-ai/kaggle-facial-keypoints
import torch
import sys
from torch.autograd import Variable
def torch2onnx(filename, image_size):
try:
model = torch.load(filename, map_location={"cuda:0": "cpu"})
except IOError:
print("Could not find " + filename)
sys.exit(0)
model.train(False)
dummy = Variable(torch.randn(1, 1, image_size, image_size))
torch.onnx.export(
model, dummy, 'resnet_facial_feature.onnx',
verbose=True, opset_version=10
)
print("Export is done")
if __name__ == "__main__":
filename = input("PyTorch model save file name : ")
image_size = int(input("96 | 226 : "))
torch2onnx(filename, image_size)
|
#!/usr/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
rm -rf ClassyVision
git clone https://github.com/facebookresearch/ClassyVision.git
cd ClassyVision || exit
git reset --hard 4785d5ee19d3bcedd5b28c1eb51ea1f59188b54d
cd ../ || exit
rm -rf ../../../classy_vision
cp -r ClassyVision/classy_vision ../../../classy_vision
rm -rf fairscale
git clone https://github.com/facebookresearch/fairscale.git
cd fairscale || exit
git reset --hard df7db85cef7f9c30a5b821007754b96eb1f977b6
cd ../ || exit
rm -rf ../../../fairscale
cp -r fairscale/fairscale ../../../fairscale
docker run --rm -v $PWD/../../..:/inside pytorch/conda-cuda bash inside/dev/packaging/vissl_pip/inside.sh
|
<gh_stars>1-10
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import CloneSvg from '@rsuite/icon-font/lib/legacy/Clone';
const Clone = createSvgIcon({
as: CloneSvg,
ariaLabel: 'clone',
category: 'legacy',
displayName: 'Clone'
});
export default Clone;
|
<gh_stars>1-10
#include "include/quick_scanner/quick_scanner_plugin.h"
// This must be included before many other Windows headers.
#include <windows.h>
#include <winrt/Windows.Foundation.h>
#include <winrt/Windows.Foundation.Collections.h>
#include <winrt/Windows.Devices.Enumeration.h>
#include <winrt/Windows.Devices.Scanners.h>
#include <winrt/Windows.Storage.h>
// For getPlatformVersion; remove unless needed for your plugin implementation.
#include <VersionHelpers.h>
#include <flutter/method_channel.h>
#include <flutter/plugin_registrar_windows.h>
#include <flutter/standard_method_codec.h>
#include <map>
#include <memory>
#include <sstream>
using namespace winrt;
using namespace Windows::Foundation;
using namespace Windows::Foundation::Collections;
using namespace Windows::Devices::Enumeration;
using namespace Windows::Devices::Scanners;
using namespace Windows::Storage;
namespace {
class QuickScannerPlugin : public flutter::Plugin {
public:
static void RegisterWithRegistrar(flutter::PluginRegistrarWindows *registrar);
QuickScannerPlugin();
virtual ~QuickScannerPlugin();
private:
// Called when a method is called on this plugin's channel from Dart.
void HandleMethodCall(
const flutter::MethodCall<flutter::EncodableValue> &method_call,
std::unique_ptr<flutter::MethodResult<flutter::EncodableValue>> result);
DeviceWatcher deviceWatcher{ nullptr };
winrt::event_token deviceWatcherAddedToken;
void DeviceWatcher_Added(DeviceWatcher sender, DeviceInformation info);
winrt::event_token deviceWatcherRemovedToken;
void DeviceWatcher_Removed(DeviceWatcher sender, DeviceInformationUpdate infoUpdate);
std::vector<std::string> scanners_{};
winrt::fire_and_forget ScanFileAsync(std::string device_id, std::string directory,
std::unique_ptr<flutter::MethodResult<flutter::EncodableValue>> result
);
};
// static
void QuickScannerPlugin::RegisterWithRegistrar(
flutter::PluginRegistrarWindows *registrar) {
auto channel =
std::make_unique<flutter::MethodChannel<flutter::EncodableValue>>(
registrar->messenger(), "quick_scanner",
&flutter::StandardMethodCodec::GetInstance());
auto plugin = std::make_unique<QuickScannerPlugin>();
channel->SetMethodCallHandler(
[plugin_pointer = plugin.get()](const auto &call, auto result) {
plugin_pointer->HandleMethodCall(call, std::move(result));
});
registrar->AddPlugin(std::move(plugin));
}
QuickScannerPlugin::QuickScannerPlugin() {
deviceWatcher = DeviceInformation::CreateWatcher(DeviceClass::ImageScanner);
deviceWatcherAddedToken = deviceWatcher.Added({ this, &QuickScannerPlugin::DeviceWatcher_Added });
deviceWatcherRemovedToken = deviceWatcher.Removed({ this, &QuickScannerPlugin::DeviceWatcher_Removed });
}
QuickScannerPlugin::~QuickScannerPlugin() {
deviceWatcher.Added(deviceWatcherAddedToken);
deviceWatcher.Removed(deviceWatcherRemovedToken);
deviceWatcher = nullptr;
}
void QuickScannerPlugin::HandleMethodCall(
const flutter::MethodCall<flutter::EncodableValue> &method_call,
std::unique_ptr<flutter::MethodResult<flutter::EncodableValue>> result) {
if (method_call.method_name().compare("getPlatformVersion") == 0) {
std::ostringstream version_stream;
version_stream << "Windows ";
if (IsWindows10OrGreater()) {
version_stream << "10+";
} else if (IsWindows8OrGreater()) {
version_stream << "8";
} else if (IsWindows7OrGreater()) {
version_stream << "7";
}
result->Success(flutter::EncodableValue(version_stream.str()));
} else if (method_call.method_name().compare("startWatch") == 0) {
deviceWatcher.Start();
result->Success(nullptr);
} else if (method_call.method_name().compare("stopWatch") == 0) {
deviceWatcher.Stop();
result->Success(nullptr);
} else if (method_call.method_name().compare("getScanners") == 0) {
flutter::EncodableList list{};
for (auto scanner : scanners_) {
list.push_back(flutter::EncodableValue(scanner));
}
result->Success(list);
} else if (method_call.method_name().compare("scanFile") == 0) {
auto args = std::get<flutter::EncodableMap>(*method_call.arguments());
auto device_id = std::get<std::string>(args[flutter::EncodableValue("deviceId")]);
auto directory = std::get<std::string>(args[flutter::EncodableValue("directory")]);
ScanFileAsync(device_id, directory, std::move(result));
//result->Success(nullptr);
} else {
result->NotImplemented();
}
}
void QuickScannerPlugin::DeviceWatcher_Added(DeviceWatcher sender, DeviceInformation info) {
std::cout << "DeviceWatcher_Added " << winrt::to_string(info.Id()) << std::endl;
auto device_id = winrt::to_string(info.Id());
auto it = std::find(scanners_.begin(), scanners_.end(), device_id);
if (it == scanners_.end()) {
scanners_.push_back(device_id);
}
}
void QuickScannerPlugin::DeviceWatcher_Removed(DeviceWatcher sender, DeviceInformationUpdate infoUpdate) {
std::cout << "DeviceWatcher_Removed " << winrt::to_string(infoUpdate.Id()) << std::endl;
auto device_id = winrt::to_string(infoUpdate.Id());
auto it = std::find(scanners_.begin(), scanners_.end(), device_id);
if (it != scanners_.end()) {
scanners_.erase(it);
}
}
winrt::fire_and_forget QuickScannerPlugin::ScanFileAsync(std::string device_id, std::string directory,
std::unique_ptr<flutter::MethodResult<flutter::EncodableValue>> result) {
auto scanner = co_await ImageScanner::FromIdAsync(winrt::to_hstring(device_id));
if (scanner.IsScanSourceSupported(ImageScannerScanSource::Feeder)) {
// TODO
}
auto flatbedConfiguration = scanner.FlatbedConfiguration();
auto supportGrayscale = flatbedConfiguration.IsColorModeSupported(ImageScannerColorMode::Grayscale);
auto supportColor = flatbedConfiguration.IsColorModeSupported(ImageScannerColorMode::Color);
if (!supportGrayscale && !supportColor) {
flatbedConfiguration.ColorMode(ImageScannerColorMode::Monochrome);
} else {
// TODO other mode
flatbedConfiguration.ColorMode(ImageScannerColorMode::Color);
}
try {
auto storageFolder = co_await StorageFolder::GetFolderFromPathAsync(winrt::to_hstring(directory));
auto scanResult = co_await scanner.ScanFilesToFolderAsync(ImageScannerScanSource::Flatbed, storageFolder);
auto path = scanResult.ScannedFiles().First().Current().Path();
result->Success(flutter::EncodableValue(winrt::to_string(path)));
} catch (winrt::hresult_error const& ex) {
result->Error(std::to_string(ex.code()), winrt::to_string(ex.message()));
}
}
} // namespace
void QuickScannerPluginRegisterWithRegistrar(
FlutterDesktopPluginRegistrarRef registrar) {
QuickScannerPlugin::RegisterWithRegistrar(
flutter::PluginRegistrarManager::GetInstance()
->GetRegistrar<flutter::PluginRegistrarWindows>(registrar));
}
|
/* TODO(davedash): Clean this up, it's copied straight from Remora */
var collections = {};
/**
* These members need to be set on the collections object:
* subscribe_url
* unsubscribe_url
* adding_text
* removing_text
* add_text
* remove_text
*
* Optional:
* adding_img
* removing_img
* remove_img
* add_img
*/
(function() {
var $c = $('h2.collection[data-collectionid]');
if ($c.length) {
$c.find('img').click(function() {
window.location.hash = 'id=' + $c.attr('data-collectionid');
})
}
/** Helpers for recently_viewed. **/
RECENTLY_VIEWED_LIMIT = 5;
/* jQuery extras */
jQuery.extend({
keys: function(obj) {
var a = [];
$.each(obj, function(k) { a.push(k); });
return a;
},
values: function(obj) {
var a = [];
$.each(obj, function(k, v) { a.push(v); });
return a;
},
items: function(obj) {
var a = [];
$.each(obj, function(k, v) { a.push([k, v]); })
return a;
},
/* Same as the built-in jQuery.map, but doesn't flatten returned arrays.
* Sometimes you really do want a list of lists.
*/
fmap: function(arr, callback) {
var a = [];
$.each(arr, function(index, el) { a.push(callback(el, index)); });
return a;
},
/* Turn a list of (key, value) pairs into an object. */
dict: function(pairs) {
var o = {};
$.each(pairs, function(i, pair) { o[pair[0]] = pair[1]; });
return o;
}
});
/* Return a new array of all the unique elements in `arr`. Order is preserved,
* duplicates are dropped from the end of the array.
*
* `keyfunc` is called once per element before determining uniqueness, so it
* can be used to pull out a piece of a larger object. It defaults to the
* identity function.
*/
var unique = function(arr, keyfunc) {
if (keyfunc === undefined) {
var keyfunc = function(e) { return e; };
}
/* Iterate backwards so dupes at the back are removed. */
var o = {};
$.each(arr.reverse(), function(index, element) {
o[keyfunc(element)] = [index, element];
});
/* Sort by the original indexes, then return the elements. */
var s = $.values(o).sort(function(a, b){ return a[0] - b[0]; });
return $.fmap(s.reverse(), function(e) { return e[1]; });
};
/* Maintains a list of unique objects in (local)Storage sorted by date-added
* (descending).
*
* Options:
* limit: max number of items to keep in Storage (default: 10)
* storageKey: the key used for Storage (default: "recently-viewed")
* uniqueFunc: the function passed to `unique` to determine uniqueness
* of items (default: the whole object, without date-added)
*/
RecentlyViewed = function(options) {
var defaults = {
limit: 10,
storage: z.Storage(),
storageKey: 'recently-viewed',
uniqueFunc: function(e) { return e[1]; }
};
$.extend(this, defaults, options);
};
RecentlyViewed.prototype = {
/* Add a new object to the recently viewed items.
*
* Returns the new list of saved items.
*/
add: function(obj) {
var arr = this._list();
/* Date.parse turns Date into an integer for better parsing. */
arr.push([Date.parse(new Date()), obj]);
/* Sort by Date added. */
arr.sort(function(a, b) { return b[0] - a[0]; });
arr = unique(arr, this.uniqueFunc);
return this._save(arr);
},
/* Fetch the list of saved objects.*/
list: function() {
return $.fmap(this._list(), function(x) { return x[1]; });
},
/* Save an array to Storage, maintaining the storage limit. */
_save: function(arr) {
arr = arr.slice(0, this.limit);
this.storage.set(this.storageKey, JSON.stringify(arr));
return arr;
},
/* Fetch the internal list of (date, object) tuples. */
_list: function() {
var val = this.storage.get(this.storageKey);
if (val === null || val === undefined) {
return [];
} else {
return JSON.parse(val);
}
},
clear: function() {
this.storage.remove(this.storageKey);
}
};
collections.recently_viewed = function() {
var recentlyViewed = new RecentlyViewed({
storageKey: 'recently-viewed-collections',
uniqueFunc: function(e) { return e[1].uuid; }
});
var add_recent = $('#add-to-recents');
if (add_recent.size()) {
var o = $.dict($.fmap(['disp', 'url', 'uuid'], function(key){
return [key, $.trim(add_recent.attr('data-' + key))];
}));
var current_uuid = o.uuid;
// If the collection has a visible name.
if (o.disp) {
recentlyViewed.add(o);
}
} else {
var current_uuid = '';
}
var list = $.map(recentlyViewed.list(), function(e) {
if (e.uuid != current_uuid) {
return $('<li></li>').append(
$('<a class="collectionitem" href="' + e.url + '"></a>')
.text(e.disp)
)[0];
}
});
if (list.length != 0) {
list = list.slice(0, RECENTLY_VIEWED_LIMIT);
var $ul = $('<ul class="addon-collections"></ul>').append($(list));
$('#recently-viewed')
.append($ul)
.append('<a id="clear-recents" href="#">' +
gettext('clear recently viewed') +
"</a>")
.show();
$('#clear-recents').click(function (e) {
e.preventDefault();
recentlyViewed.clear();
$('#recently-viewed').hide();
});
}
};
/** Helpers for hijack_favorite_button. **/
var sum = function(arr) {
var ret = 0;
$.each(arr, function(_, i) { ret += i; });
return ret;
};
var modal = function(content) {
if ($.cookie('collections-leave-me-alone'))
return;
var e = $('<div class="modal-subscription">' + content + '</div>');
e.appendTo(document.body).jqm().jqmAddClose('a.close-button').jqmShow();
e.find('#bothersome').change(function(){
// Leave me alone for 1 year (doesn't handle leap years).
$.cookie('collections-leave-me-alone', true,
{expires: 365, path: collections.cookie_path});
e.jqmHide();
});
};
collections.hijack_favorite_button = function() {
var c = collections;
/* Hijack form.favorite for some ajax fun. */
$('form.favorite').submit(function(event){
event.preventDefault();
var action = $(this).attr('action') + "/ajax";
// `this` is the form.
var fav_button = $(this).find('button');
var previous = fav_button.html();
var is_fav = fav_button.hasClass('fav');
/* Kind should be in ['adding', 'removing', 'add', 'remove'] */
var button = function(kind) {
var text = c[kind + '_text'];
/* The listing page doesn't have an inline image, detail page does. */
if (fav_button.find('img').length) {
var img = c[kind + '_img'];
fav_button.html('<img src="' + img + '"/>' + text);
} else {
fav_button.html(text);
}
};
/* We don't want the button to shrink when the contents
* inside change. */
fav_button.css('min-width', fav_button.outerWidth());
fav_button.addClass('loading-fav').attr('disabled', 'disabled');
button(is_fav ? 'removing' : 'adding');
fav_button.css('min-width', fav_button.outerWidth());
$.ajax({
type: "POST",
data: $(this).serialize(),
url: action,
success: function(content){
if (is_fav) {
fav_button.removeClass('fav');
button('add');
} else{
modal(content);
fav_button.addClass('fav');
button('remove');
}
// Holla back at the extension.
bandwagonRefreshEvent();
},
error: function(){
fav_button.html(previous);
},
complete: function(){
fav_button.attr('disabled', '');
fav_button.removeClass('loading-fav');
}
});
});
};
$(document).ready(collections.recently_viewed);
$(document).ready(function() {
/* Hijack the voting forms to submit over xhr.
*
* On success we update the vote counts,
* and show/hide the 'Remove' link.
*/
var vote_in_progress = false;
var callback = function(e) {
e.preventDefault();
if (vote_in_progress) return;
vote_in_progress=true;
var the_form = $(this);
$.post($(this).attr('action'), $(this).serialize(), function(content, status, xhr) {
vote_in_progress = false
if (xhr.status == 200) {
var barometer = the_form.closest('.barometer');
var oldvote = $('input.voted', barometer);
var newvote = $('input[type="submit"]', the_form);
//If the vote cancels an existing vote, cancel said vote
if (oldvote.length) {
oldvote.get(0).value--;
oldvote.removeClass('voted');
}
//Render new vote if it wasn't a double
if (oldvote.get(0) !== newvote.get(0)) {
newvote.get(0).value++;
newvote.addClass('voted');
}
}
});
};
if (z.anonymous) {
$('.barometer form').submit(function(e) {
e.preventDefault();
var the_form = this;
var dropdown = $('.collection-rate-dropdown', $(the_form).closest('.barometer'));
if ($(the_form).hasClass('downvote')) {
dropdown.addClass('left');
} else {
dropdown.removeClass('left');
}
dropdown.detach().appendTo(the_form).show();
// Clear popup when we click outside it.
setTimeout(function(){
function cb(e) {
_root = dropdown.get(0);
// Bail if the click was somewhere on the popup.
if (e.type == 'click' &&
_root == e.target ||
_.indexOf($(e.target).parents(), _root) != -1) {
return;
}
dropdown.hide();
$(document.body).unbind('click newPopup', cb);
}
$(document.body).bind('click newPopup', cb);
}, 0);
});
} else {
$('.barometer form').submit(callback);
}
});
$(document).ready(function(){
var c = collections;
c.adding_img = '/img/icons/white-loading-16x16.gif';
c.adding_text = gettext('Adding to Favorites…');
c.removing_img = '/img/icons/white-loading-16x16.gif';
c.removing_text = gettext('Removing Favorite…');
c.add_img = '/img/icons/buttons/plus-orange-16x16.gif';
c.add_text = gettext('Add to Favorites');
c.remove_img = '/img/icons/buttons/minus-orange-16x16.gif';
c.remove_text = gettext('Remove from Favorites');
c.cookie_path = '/';
collections.hijack_favorite_button();
});
/* Autocomplete for collection add form. */
addon_ac = $('#addon-ac');
if (addon_ac.length) {
addon_ac.autocomplete({
minLength: 3,
width: 300,
source: function(request, response) {
$.getJSON($('#addon-ac').attr('data-src'), {
q: request.term
}, response);
},
focus: function(event, ui) {
$('#addon-ac').val(ui.item.name);
return false;
},
select: function(event, ui) {
$('#addon-ac').val(ui.item.name).attr('data-id', ui.item.id)
.attr('data-icon', ui.item.icon);
return false;
}
}).data('autocomplete')._renderItem = function(ul, item) {
if (!$("#addons-list input[value=" + item.id + "]").length) {
return $('<li>')
.data('item.autocomplete', item)
.append('<a><img src="' + item.icon + '">' + item.name + '</a>')
.appendTo(ul);
}
};
}
$('#addon-ac').keydown(function(e) {
if (e.which == 13) {
e.preventDefault();
$('#addon-select').click();
}
});
$('#addon-select').click(function(e) {
e.preventDefault();
var id = $('#addon-ac').attr('data-id');
var name = $('#addon-ac').val();
var icon = $('#addon-ac').attr('data-icon');
// Verify that we aren't listed already
if ($('input[name=addon][value='+id+']').length) {
return false;
}
if (id && name && icon) {
var tr = template('<tr>' +
'<td class="item">' +
'<input name="addon" value="{id}" type="hidden">' +
'<img src="{icon}"><h3>{name}</h3>' +
'<p class="comments">' +
'<textarea name="addon_comment"></textarea>' +
'</p></td>' +
'<td>' + gettext('Pending') + '</td>' +
'<td><a title="' + gettext('Add a comment') + '" class="comment">' + gettext('Comment') + '</a></td>' +
'<td class="remove"><a title="' + gettext('Remove this add-on from the collection') + '" class="remove">' + gettext('Remove') + '</a></td>' +
'</tr>'
);
var str = tr({id: id, name: name, icon: icon});
$('#addon-select').closest('tbody').append(str);
}
$('#addon-ac').val('');
});
var table = $('#addon-ac').closest('table')
table.delegate(".remove", "click", function() {
$(this).closest('tr').remove();
})
.delegate(".comment", "click", function() {
var row = $(this).closest('tr');
row.find('.comments').show();
$('.comments textarea', row).focus();
});
})();
if ($('body.collections-contributors')) {
var user_row = template('<tr>' +
'<td>' +
'<input name="contributor" value="{id}" type="hidden">' +
'{name}' +
'</td><td>{email}</td>' +
'<td class="contributor">Contributor</td>' +
'<td class="remove"><a title="' + gettext("Remove this user as a contributor") + '" class="remove">' + gettext("Remove") + '</a></td>' +
'</tr>'
);
$('#contributor-ac-button').click(function(e) {
e.preventDefault();
var email = $('#contributor-ac').val();
var src = $('#contributor-ac').attr('data-src');
var my_id = $('#contributor-ac').attr('data-owner');
var $contributor_error = $('#contributor-ac-error');
if (!email) {
$contributor_error.html(gettext('An email address is required.')).addClass('error');
return;
}
$('#contributor-ac').addClass("ui-autocomplete-loading");
// TODO(potch): Add a fancy failure case.
$.get(src, {q: email}, function(d) {
$('#contributor-ac').removeClass("ui-autocomplete-loading");
if (d.status) {
if ($('input[name=contributor][value='+d.id+']').length == 0 &&
my_id != d.id) {
var str = user_row({id: d.id, name: d.name, email: email});
$('#contributor-ac-button').closest('tbody').append(str);
$contributor_error.html('').removeClass('error');
} else if (d.id == my_id) {
$contributor_error.html(gettext('You cannot add yourself as a contributor.')).addClass('error');
} else {
$contributor_error.html(gettext('You have already added that user.')).addClass('error');
}
} else {
$contributor_error.html(d.message).addClass('error');
}
$('#contributor-ac').val('');
});
});
var table = $('#contributors-list');
table.delegate(".remove", "click", function() {
$(this).closest('tr').remove();
})
$("#change-owner").popup(".make-owner", {
callback: function (obj) {
var $popup = this,
ct = $(obj.click_target);
$popup.delegate("#change-owner-cancel", "click", function(e) {
e.preventDefault();
$popup.hideMe();
});
$popup.attr("data-newowner", ct.parents(".contributor")
.children("input[name='contributor']").val()
);
return { pointTo: ct };
}
});
$("#change-owner-submit").click(function(e) {
e.preventDefault();
var owner_id = $("#change-owner").attr("data-newowner");
$("#users-edit form").append('<input type="hidden" name="new_owner" value="' + owner_id + '">').submit();
});
}
$(document).ready(function() {
$('#remove_icon').click(function(){
$.post($(this).attr('href'), {}, function(d){
$('#icon_upload .icon_preview img').attr('src', d.icon);
});
$(this).hide();
return false;
});
});
$(document).ready(function () {
var name_val = $('#id_name').val();
$(document).bind('unicode_loaded', function() {
$('#id_slug').attr('data-customized', (!!$('#id_slug').val() &&
($('#id_slug').val() != makeslug(name_val))) ? 1 : 0);
slugify();
});
$('#details-edit form, .collection-create form')
.delegate('#id_name', 'keyup', slugify)
.delegate('#id_name', 'blur', slugify)
.delegate('#edit_slug', 'click', show_slug_edit)
.delegate('#id_slug', 'change', function() {
$('#id_slug').attr('data-customized', 1);
if (!$('#id_slug').val()) {
$('#id_slug').attr('data-customized', 0);
slugify();
}
});
/* Add to collection initialization */
var loginHtml = $("#add-to-collection").html();
$("#add-to-collection").popup(".widgets .collection-add", {
width: 200,
offset: {x: 8},
callback: function(obj) {
var $widget = this,
ct = $(obj.click_target),
list_url = ct.attr('data-listurl'),
remove_url = ct.attr('data-removeurl'),
add_url = ct.attr('data-addurl'),
form_url = ct.attr('data-newurl'),
addon_id = ct.attr('data-addonid');
if (z.anonymous) {
return {pointTo: ct};
}
function loadList(e) {
if (e) e.preventDefault();
ct.addClass("ajax-loading");
// Make a call to /collections/ajax/list with addon_id
$.ajax({
url: list_url,
data: {'addon_id': addon_id},
success: renderList,
error: function() {
renderList(loginHtml);
},
dataType: 'html'
});
}
function renderList(data) {
$widget.html(data);
$widget.show();
ct.removeClass("ajax-loading");
$("a.outlink", $widget).click(stopPropagation);
if (!$(".errorlist li", $widget).length)
$widget.setWidth(200);
$widget.setPos(ct);
$widget.render();
}
function handleToggle(e) {
e.preventDefault();
var tgt = $(this);
var data = {'addon_id': addon_id,
'id': tgt.attr('data-id')};
var url = this.className == "selected" ? remove_url
: add_url;
if (tgt.hasClass('ajax-loading')) return;
tgt.addClass('ajax-loading');
$.post(url, data, function(data) {
$widget.html(data);
$("a.outlink", $widget).click(stopPropagation);
}, 'html');
}
var handleSubmit = function(e) {
e.preventDefault();
var tgt = $(this);
if (ct.hasClass('ajax-loading')) return;
ct.addClass('ajax-loading');
form_data = $('#add-to-collection form').serialize();
$.post(form_url + '?addon_id=' + addon_id, form_data, renderList, 'html');
};
var handleNew = function(e) {
e.preventDefault();
var tgt = $(this);
$.get(form_url, {'addon_id': addon_id}, function(d) {
$widget.html(d);
$widget.setWidth(410);
$widget.setPos(ct);
$("#id_name").focus();
});
};
$widget.hideMe();
$widget.unbind('click.popup', stopPropagation);
$widget.bind('click.popup', stopPropagation);
$widget.delegate('#ajax_collections_list li', 'click', handleToggle)
.delegate('#ajax_new_collection', 'click', handleNew)
.delegate('#collections-new-cancel', 'click', loadList)
.delegate('#add-to-collection form', 'submit', handleSubmit)
.delegate('#id_name', 'keyup', slugify)
.delegate('#id_name', 'blur', slugify)
.delegate('#edit_slug', 'click', show_slug_edit)
.delegate('#id_slug', 'change', function() {
$('#id_slug').attr('data-customized', 1);
if (!$('#id_slug').val()) {
$('#id_slug').attr('data-customized', 0);
slugify();
}
});
loadList();
return false;
}
});
function stopPropagation(e) {
e.stopPropagation();
}
});
$(document).ready(function () {
// Add to favorites functionality
$(".widget.favorite").click(function(e) {
e.preventDefault();
var widget = $(this);
var data = {'addon_id': widget.attr('data-addonid')};
var faved = widget.hasClass("faved");
var url = faved ? widget.attr('data-removeurl') : widget.attr('data-addurl');
var condensed = widget.hasClass("condensed");
if (widget.hasClass('ajax-loading')) return;
widget.addClass('ajax-loading');
$.ajax({
url: url,
data: data,
type: 'post',
success: function(data) {
widget.removeClass('ajax-loading');
if (faved) {
widget.removeClass("faved");
if (condensed) widget.attr('title', gettext('Add to favorites'));
else widget.text(widget.attr('data-unfavedtext'));
} else {
widget.addClass("faved");
if (condensed) widget.attr('title', gettext('Remove from favorites'));
else widget.text(widget.attr('data-favedtext'));
}
widget.trigger("tooltip_change");
},
error: function(xhr) {
widget.removeClass('ajax-loading');
}
});
});
// Colleciton following
$(".collection_widgets .watch").click(function(e) {
e.preventDefault();
var widget = $(this);
if (widget.hasClass('ajax-loading')) return;
widget.addClass('ajax-loading');
var follow_text = gettext("Follow this Collection");
$.ajax({
url: $(this).attr('href'),
type: 'POST',
success: function(data) {
widget.removeClass('ajax-loading');
if (data.watching) {
widget.addClass("watching");
follow_text = gettext("Stop following");
} else {
widget.removeClass("watching");
}
if (widget.hasClass('condensed')) {
widget.attr("title", follow_text);
widget.trigger("tooltip_change");
} else {
widget.text(follow_text);
}
},
error: function() {
widget.removeClass('ajax-loading');
}
});
});
//New sharing interaction
$("#sharing-popup").popup(".share.widget", {
width: 280,
offset: {x: 8},
callback: function(obj) {
var ret = {};
var el = $(obj.click_target);
var $popup = this;
var base_url = el.attr('data-base-url');
var counts = $.parseJSON(el.attr("data-share-counts"));
$popup.hideMe();
if (counts) {
for (s in counts) {
if (!counts.hasOwnProperty(s)) continue;
var c = counts[s];
var $li = $("li." + s, this);
$(".share-count", $li).text(c);
$(".uniquify", $li).attr("href", base_url + s);
}
} else {
return false;
}
ret.pointTo = obj.click_target;
return ret;
}
});
if ($('#details-edit').length && $('div.notification-box').length) {
$(document).scrollTop($("div.primary").position().top);
}
});
|
package io.opensphere.hud.glswing;
import java.awt.AWTEvent;
import java.awt.Container;
import java.awt.Toolkit;
import java.io.Serializable;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import io.opensphere.core.control.ui.InternalComponentRegistry;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.ref.WeakReference;
/**
* A Singleton class to handle AWTEvents and make sure that the panels with the
* highest Z-Order which can use the events gets the events.
*/
public final class GLSwingEventManager
{
/** Events which need to be forwarded for GLSwing interaction. */
private static long ourEventMask = AWTEvent.MOUSE_EVENT_MASK | AWTEvent.MOUSE_MOTION_EVENT_MASK
| AWTEvent.MOUSE_WHEEL_EVENT_MASK;
/** The singleton instance. */
private static GLSwingEventManager ourInstance = new GLSwingEventManager();
/** Listener for AWT events. */
private final GLSwingAWTEventListener myAWTListener;
/** A list of frames registered with me for forwarded AWT events. */
private final List<WeakReference<GLSwingInternalFrame>> myFrames = New.list();
/**
* Get the singleton instance.
*
* @return The event manager.
*/
public static GLSwingEventManager getInstance()
{
return ourInstance;
}
/** Disallow instantiation. */
private GLSwingEventManager()
{
final String newtString = System.getProperty("opensphere.pipeline.jogl.nativeWindows");
final boolean newt = Boolean.getBoolean(newtString);
if (newt)
{
myAWTListener = null;
}
else
{
myAWTListener = new GLSwingAWTEventListener(this);
}
final Toolkit kit = Toolkit.getDefaultToolkit();
kit.addAWTEventListener(myAWTListener, ourEventMask);
}
/**
* De-register for forwarded AWT events.
*
* @param frame The frame no longer interested in events.
*/
public void deregisterFrame(GLSwingInternalFrame frame)
{
synchronized (myFrames)
{
final Collection<WeakReference<GLSwingInternalFrame>> removes = New.set();
for (final WeakReference<GLSwingInternalFrame> ref : myFrames)
{
if (frame == ref.get())
{
removes.add(ref);
}
}
myFrames.removeAll(removes);
}
if (myAWTListener != null)
{
myAWTListener.clearHeld();
}
}
/** Handle any necessary cleanup for when a frame has been popped. */
public void framePopped()
{
if (myAWTListener != null)
{
myAWTListener.clearHeld();
}
}
/**
* Perform any required initialization including setting the event
* listener's component registry.
*
* @param registry The component registry to be used by my event listener.
*/
public void init(InternalComponentRegistry registry)
{
myAWTListener.setComponentRegistry(registry);
}
/**
* Register for forwarded AWT events.
*
* @param frame The frame interested in events.
*/
public void registerFrame(GLSwingInternalFrame frame)
{
synchronized (myFrames)
{
myFrames.add(new WeakReference<>(frame));
}
}
/** Tell all active frames to validate their render order. */
public void validateRenderOrders()
{
final List<WeakReference<GLSwingInternalFrame>> framesCopy = New.list(myFrames.size());
synchronized (myFrames)
{
framesCopy.addAll(myFrames);
}
for (final WeakReference<GLSwingInternalFrame> frame : framesCopy)
{
final GLSwingInternalFrame glFrame = frame.get();
if (glFrame != null)
{
glFrame.validateRenderOrder();
}
}
}
/**
* Get a copy of all valid frames which I manage.
*
* @return Currently managed frames.
*/
Set<GLSwingInternalFrame> getFrames()
{
final List<WeakReference<GLSwingInternalFrame>> removes = New.list();
final Set<GLSwingInternalFrame> frames = new TreeSet<>(new FrameZOrderComparator());
final List<WeakReference<GLSwingInternalFrame>> framesCopy = New.list(myFrames.size());
synchronized (myFrames)
{
framesCopy.addAll(myFrames);
}
for (final WeakReference<GLSwingInternalFrame> frame : framesCopy)
{
final GLSwingInternalFrame glFrame = frame.get();
if (glFrame == null)
{
removes.add(frame);
}
else
{
frames.add(glFrame);
}
}
if (!removes.isEmpty())
{
synchronized (myFrames)
{
myFrames.removeAll(removes);
}
}
return frames;
}
/**
* Comparator for internal frames which orders the frames by their relative
* z-order. This is used to ensure that frames which are in front will get
* events before frames which are behind.
*/
private static class FrameZOrderComparator implements Serializable, Comparator<GLSwingInternalFrame>
{
/** serialVersionUID. */
private static final long serialVersionUID = 1L;
@Override
public int compare(GLSwingInternalFrame frame1, GLSwingInternalFrame frame2)
{
final Container frameParent = frame1.getHUDFrame().getInternalFrame().getParent();
if (frameParent != null)
{
final int z1 = frameParent.getComponentZOrder(frame1.getHUDFrame().getInternalFrame());
final int z2 = frameParent.getComponentZOrder(frame2.getHUDFrame().getInternalFrame());
if (z1 < z2)
{
return -1;
}
else if (z2 < z1)
{
return 1;
}
}
return 0;
}
}
}
|
# Copyright 2021, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from absl.testing import parameterized
import portpicker
import tensorflow as tf
from pybind11_abseil import status
from tensorflow_federated.python.core.api import computations
from tensorflow_federated.python.core.api import test_case
from tensorflow_federated.python.core.impl.executors import executor_bindings
from tensorflow_federated.python.core.impl.executors import serialization_bindings
from tensorflow_federated.python.core.impl.executors import value_serialization
from tensorflow_federated.python.core.impl.types import computation_types
from tensorflow_federated.python.core.impl.types import placements
from tensorflow_federated.python.core.impl.types import type_conversions
TensorType = computation_types.TensorType
StructType = computation_types.StructType
SequenceType = computation_types.SequenceType
def _test_map_integers(tensor):
"""Map an integer tensor via a lookup table."""
# Used for testing resources.
table = tf.lookup.StaticHashTable(
tf.lookup.KeyValueTensorInitializer(
keys=list(range(5)),
values=list(reversed(range(5))),
key_dtype=tf.int64,
value_dtype=tf.int64),
default_value=-1)
return table.lookup(tensor)
class TensorFlowExecutorBindingsTest(parameterized.TestCase,
test_case.TestCase):
def test_create(self):
try:
executor_bindings.create_tensorflow_executor()
except Exception as e: # pylint: disable=broad-except
self.fail(f'Exception: {e}')
def test_create_value(self):
executor = executor_bindings.create_tensorflow_executor()
# 1. Test a simple tensor.
expected_type_spec = TensorType(shape=[3], dtype=tf.int64)
value_pb, _ = value_serialization.serialize_value([1, 2, 3],
expected_type_spec)
value = executor.create_value(value_pb)
self.assertIsInstance(value, executor_bindings.OwnedValueId)
self.assertEqual(value.ref, 0)
self.assertEqual(str(value), '0')
self.assertEqual(repr(value), r'<OwnedValueId: 0>')
materialized_value = executor.materialize(value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
self.assert_types_identical(type_spec, expected_type_spec)
self.assertAllEqual(deserialized_value, [1, 2, 3])
# 2. Test a struct of tensors, ensure that we get a different ID.
expected_type_spec = StructType([
('a', TensorType(shape=[3], dtype=tf.int64)),
('b', TensorType(shape=[], dtype=tf.float32))
])
value_pb, _ = value_serialization.serialize_value(
collections.OrderedDict(a=tf.constant([1, 2, 3]), b=tf.constant(42.0)),
expected_type_spec)
value = executor.create_value(value_pb)
self.assertIsInstance(value, executor_bindings.OwnedValueId)
# Assert the value ID was incremented.
self.assertEqual(value.ref, 1)
self.assertEqual(str(value), '1')
self.assertEqual(repr(value), r'<OwnedValueId: 1>')
materialized_value = executor.materialize(value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
# Note: here we've lost the names `a` and `b` in the output. The output
# is a more _strict_ type.
self.assert_type_assignable_from(expected_type_spec, type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, expected_type_spec)
self.assertAllClose(deserialized_value,
collections.OrderedDict(a=(1, 2, 3), b=42.0))
# 3. Test creating a value from a computation.
@computations.tf_computation(tf.int32, tf.int32)
def foo(a, b):
return tf.add(a, b)
value_pb, _ = value_serialization.serialize_value(foo)
value = executor.create_value(value_pb)
self.assertIsInstance(value, executor_bindings.OwnedValueId)
# Assert the value ID was incremented again.
self.assertEqual(value.ref, 2)
self.assertEqual(str(value), '2')
self.assertEqual(repr(value), '<OwnedValueId: 2>')
# Note: functions are not materializable, no addition assertions.
@parameterized.named_parameters(
('range', tf.data.Dataset.range(5)),
('shuffled_range', tf.data.Dataset.range(5).shuffle(3)),
('mapped_with_resource_range',
tf.data.Dataset.range(5).map(_test_map_integers)),
('mapped_range', tf.data.Dataset.range(5).map(lambda x: x)),
('batched_range', tf.data.Dataset.range(5).batch(2,
drop_remainder=False)),
('tensor_slices', tf.data.Dataset.from_tensor_slices(list(range(5)))),
)
def test_create_value_sequence(self, dataset):
executor = executor_bindings.create_tensorflow_executor()
sequence_type = SequenceType(dataset.element_spec)
arg_value_pb, _ = value_serialization.serialize_value(
dataset, sequence_type)
arg = executor.create_value(arg_value_pb)
@computations.tf_computation(sequence_type)
def sum_examples(ds):
return ds.reduce(
tf.constant(0, ds.element_spec.dtype),
lambda s, x: s + tf.reduce_sum(x))
comp_pb = serialization_bindings.Value(
computation=sum_examples.get_proto(sum_examples))
comp = executor.create_value(comp_pb)
result = executor.create_call(comp.ref, arg.ref)
output_pb = executor.materialize(result.ref)
result, result_type_spec = value_serialization.deserialize_value(output_pb)
self.assert_types_identical(result_type_spec,
TensorType(sequence_type.element.dtype))
self.assertEqual(result, sum(range(5)))
def test_create_struct(self):
executor = executor_bindings.create_tensorflow_executor()
expected_type_spec = TensorType(shape=[3], dtype=tf.int64)
value_pb, _ = value_serialization.serialize_value(
tf.constant([1, 2, 3]), expected_type_spec)
value = executor.create_value(value_pb)
self.assertEqual(value.ref, 0)
# 1. Create a struct from duplicated values.
struct_value = executor.create_struct([value.ref, value.ref])
self.assertEqual(struct_value.ref, 1)
materialized_value = executor.materialize(struct_value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
struct_type_spec = computation_types.to_type(
[expected_type_spec, expected_type_spec])
self.assert_types_equivalent(type_spec, struct_type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, struct_type_spec)
self.assertAllClose([(1, 2, 3), (1, 2, 3)], deserialized_value)
# 2. Create a struct from the struct and another value.
new_struct_value = executor.create_struct([struct_value.ref, value.ref])
materialized_value = executor.materialize(new_struct_value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
struct_type_spec = computation_types.to_type(
[struct_type_spec, expected_type_spec])
self.assert_types_equivalent(type_spec, struct_type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, struct_type_spec)
self.assertAllClose([[(1, 2, 3), (1, 2, 3)], (1, 2, 3)], deserialized_value)
def test_create_selection(self):
executor = executor_bindings.create_tensorflow_executor()
expected_type_spec = TensorType(shape=[3], dtype=tf.int64)
value_pb, _ = value_serialization.serialize_value(
tf.constant([1, 2, 3]), expected_type_spec)
value = executor.create_value(value_pb)
self.assertEqual(value.ref, 0)
# 1. Create a struct from duplicated values.
struct_value = executor.create_struct([value.ref, value.ref])
self.assertEqual(struct_value.ref, 1)
materialized_value = executor.materialize(struct_value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
struct_type_spec = computation_types.to_type(
[expected_type_spec, expected_type_spec])
self.assert_types_equivalent(type_spec, struct_type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, struct_type_spec)
self.assertAllClose([(1, 2, 3), (1, 2, 3)], deserialized_value)
# 2. Select the first value out of the struct.
new_value = executor.create_selection(struct_value.ref, 0)
materialized_value = executor.materialize(new_value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
self.assert_types_equivalent(type_spec, expected_type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, struct_type_spec)
self.assertAllClose((1, 2, 3), deserialized_value)
def test_call_with_arg(self):
executor = executor_bindings.create_tensorflow_executor()
value_pb, _ = value_serialization.serialize_value(
tf.constant([1, 2, 3]), TensorType(shape=[3], dtype=tf.int64))
value_ref = executor.create_value(value_pb)
arg = executor.create_struct((value_ref.ref, value_ref.ref))
@computations.tf_computation(tf.int64, tf.int64)
def foo(a, b):
return tf.add(a, b)
comp_pb = serialization_bindings.Value(computation=foo.get_proto(foo))
comp = executor.create_value(comp_pb)
result = executor.create_call(comp.ref, arg.ref)
result_value_pb = executor.materialize(result.ref)
result_tensor, _ = value_serialization.deserialize_value(result_value_pb)
self.assertAllEqual(result_tensor, [2, 4, 6])
def test_call_no_arg(self):
executor = executor_bindings.create_tensorflow_executor()
@computations.tf_computation
def foo():
return tf.constant(123.0)
comp_pb = serialization_bindings.Value(computation=foo.get_proto(foo))
comp = executor.create_value(comp_pb)
result = executor.create_call(comp.ref, None)
result_value_pb = executor.materialize(result.ref)
result_tensor, _ = value_serialization.deserialize_value(result_value_pb)
self.assertEqual(result_tensor, 123.0)
def test_materialize_on_unkown_fails(self):
executor = executor_bindings.create_tensorflow_executor()
with self.assertRaisesRegex(status.StatusNotOk, 'NOT_FOUND'):
executor.materialize(0)
class ReferenceResolvingExecutorBindingsTest(test_case.TestCase):
def test_create(self):
try:
executor_bindings.create_reference_resolving_executor(
executor_bindings.create_tensorflow_executor())
except Exception as e: # pylint: disable=broad-except
self.fail(f'Exception: {e}')
def test_create_value(self):
executor = executor_bindings.create_reference_resolving_executor(
executor_bindings.create_tensorflow_executor())
# 1. Test a simple tensor.
expected_type_spec = TensorType(shape=[3], dtype=tf.int64)
value_pb, _ = value_serialization.serialize_value([1, 2, 3],
expected_type_spec)
value = executor.create_value(value_pb)
self.assertIsInstance(value, executor_bindings.OwnedValueId)
self.assertEqual(value.ref, 0)
self.assertEqual(str(value), '0')
self.assertEqual(repr(value), r'<OwnedValueId: 0>')
materialized_value = executor.materialize(value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
self.assert_types_identical(type_spec, expected_type_spec)
self.assertAllEqual(deserialized_value, [1, 2, 3])
# 2. Test a struct of tensors, ensure that we get a different ID.
expected_type_spec = StructType([
('a', TensorType(shape=[3], dtype=tf.int64)),
('b', TensorType(shape=[], dtype=tf.float32))
])
value_pb, _ = value_serialization.serialize_value(
collections.OrderedDict(a=tf.constant([1, 2, 3]), b=tf.constant(42.0)),
expected_type_spec)
value = executor.create_value(value_pb)
self.assertIsInstance(value, executor_bindings.OwnedValueId)
# Assert the value ID was incremented.
self.assertEqual(value.ref, 1)
self.assertEqual(str(value), '1')
self.assertEqual(repr(value), r'<OwnedValueId: 1>')
materialized_value = executor.materialize(value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
# Note: here we've lost the names `a` and `b` in the output. The output
# is a more _strict_ type.
self.assert_type_assignable_from(expected_type_spec, type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, expected_type_spec)
self.assertAllClose(deserialized_value,
collections.OrderedDict(a=(1, 2, 3), b=42.0))
# 3. Test creating a value from a computation.
@computations.tf_computation(tf.int32, tf.int32)
def foo(a, b):
return tf.add(a, b)
value_pb, _ = value_serialization.serialize_value(foo)
value = executor.create_value(value_pb)
self.assertIsInstance(value, executor_bindings.OwnedValueId)
# Assert the value ID was incremented again.
self.assertEqual(value.ref, 2)
self.assertEqual(str(value), '2')
self.assertEqual(repr(value), '<OwnedValueId: 2>')
# Note: functions are not materializable, no addition assertions.
def test_create_struct(self):
executor = executor_bindings.create_reference_resolving_executor(
executor_bindings.create_tensorflow_executor())
expected_type_spec = TensorType(shape=[3], dtype=tf.int64)
value_pb, _ = value_serialization.serialize_value(
tf.constant([1, 2, 3]), expected_type_spec)
value = executor.create_value(value_pb)
self.assertEqual(value.ref, 0)
# 1. Create a struct from duplicated values.
struct_value = executor.create_struct([value.ref, value.ref])
self.assertEqual(struct_value.ref, 1)
materialized_value = executor.materialize(struct_value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
struct_type_spec = computation_types.to_type(
[expected_type_spec, expected_type_spec])
self.assert_types_equivalent(type_spec, struct_type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, struct_type_spec)
self.assertAllClose([(1, 2, 3), (1, 2, 3)], deserialized_value)
# 2. Create a struct from the struct and another value.
new_struct_value = executor.create_struct([struct_value.ref, value.ref])
materialized_value = executor.materialize(new_struct_value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
struct_type_spec = computation_types.to_type(
[struct_type_spec, expected_type_spec])
self.assert_types_equivalent(type_spec, struct_type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, struct_type_spec)
self.assertAllClose([[(1, 2, 3), (1, 2, 3)], (1, 2, 3)], deserialized_value)
def test_create_selection(self):
executor = executor_bindings.create_reference_resolving_executor(
executor_bindings.create_tensorflow_executor())
expected_type_spec = TensorType(shape=[3], dtype=tf.int64)
value_pb, _ = value_serialization.serialize_value(
tf.constant([1, 2, 3]), expected_type_spec)
value = executor.create_value(value_pb)
self.assertEqual(value.ref, 0)
# 1. Create a struct from duplicated values.
struct_value = executor.create_struct([value.ref, value.ref])
self.assertEqual(struct_value.ref, 1)
materialized_value = executor.materialize(struct_value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
struct_type_spec = computation_types.to_type(
[expected_type_spec, expected_type_spec])
self.assert_types_equivalent(type_spec, struct_type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, struct_type_spec)
self.assertAllClose([(1, 2, 3), (1, 2, 3)], deserialized_value)
# 2. Select the first value out of the struct.
new_value = executor.create_selection(struct_value.ref, 0)
materialized_value = executor.materialize(new_value.ref)
deserialized_value, type_spec = value_serialization.deserialize_value(
materialized_value)
self.assert_types_equivalent(type_spec, expected_type_spec)
deserialized_value = type_conversions.type_to_py_container(
deserialized_value, struct_type_spec)
self.assertAllClose((1, 2, 3), deserialized_value)
def test_call_with_arg(self):
executor = executor_bindings.create_reference_resolving_executor(
executor_bindings.create_tensorflow_executor())
value_pb, _ = value_serialization.serialize_value(
tf.constant([1, 2, 3]), TensorType(shape=[3], dtype=tf.int64))
value_ref = executor.create_value(value_pb)
arg = executor.create_struct((value_ref.ref, value_ref.ref))
@computations.tf_computation(tf.int64, tf.int64)
def foo(a, b):
return tf.add(a, b)
comp_pb = serialization_bindings.Value(computation=foo.get_proto(foo))
comp = executor.create_value(comp_pb)
result = executor.create_call(comp.ref, arg.ref)
result_value_pb = executor.materialize(result.ref)
result_tensor, _ = value_serialization.deserialize_value(result_value_pb)
self.assertAllEqual(result_tensor, [2, 4, 6])
def test_call_no_arg(self):
executor = executor_bindings.create_reference_resolving_executor(
executor_bindings.create_tensorflow_executor())
@computations.tf_computation
def foo():
return tf.constant(123.0)
comp_pb = serialization_bindings.Value(computation=foo.get_proto(foo))
comp = executor.create_value(comp_pb)
result = executor.create_call(comp.ref, None)
result_value_pb = executor.materialize(result.ref)
result_tensor, _ = value_serialization.deserialize_value(result_value_pb)
self.assertEqual(result_tensor, 123.0)
def test_materialize_on_unkown_fails(self):
executor = executor_bindings.create_tensorflow_executor()
with self.assertRaisesRegex(status.StatusNotOk, 'NOT_FOUND'):
executor.materialize(0)
class FederatingExecutorBindingsTest(test_case.TestCase):
def test_construction_placements_casters(self):
with self.subTest('placement_literal_keys'):
try:
executor_bindings.create_federating_executor(
executor_bindings.create_tensorflow_executor(),
{placements.CLIENTS: 10})
except Exception as e: # pylint: disable=broad-except
self.fail(f'Exception: {e}')
with self.subTest('fails_non_dict'):
with self.assertRaisesRegex(TypeError, 'must be a `Mapping`'):
executor_bindings.create_federating_executor(
executor_bindings.create_tensorflow_executor(),
[(placements.CLIENTS, 10)])
with self.subTest('fails_non_placement_keys'):
with self.assertRaisesRegex(TypeError, '`PlacementLiteral`'):
executor_bindings.create_federating_executor(
executor_bindings.create_tensorflow_executor(), {'clients': 10})
with self.assertRaisesRegex(TypeError, '`PlacementLiteral`'):
executor_bindings.create_federating_executor(
executor_bindings.create_tensorflow_executor(), {10: 10})
with self.subTest('fails_non_int_value'):
with self.assertRaisesRegex(TypeError, r'`int` values'):
executor_bindings.create_federating_executor(
executor_bindings.create_tensorflow_executor(),
{placements.CLIENTS: 0.5})
class RemoteExecutorBindingsTest(test_case.TestCase):
def test_insecure_channel_construction(self):
remote_ex = executor_bindings.create_remote_executor(
executor_bindings.create_insecure_grpc_channel('localhost:{}'.format(
portpicker.pick_unused_port())),
cardinalities={placements.CLIENTS: 10})
self.assertIsInstance(remote_ex, executor_bindings.Executor)
class ComposingExecutorBindingsTest(test_case.TestCase):
def test_construction(self):
server = executor_bindings.create_tensorflow_executor()
children = [
executor_bindings.create_composing_child(
executor_bindings.create_tensorflow_executor(),
{placements.CLIENTS: 0})
]
composing_ex = executor_bindings.create_composing_executor(server, children)
self.assertIsInstance(composing_ex, executor_bindings.Executor)
if __name__ == '__main__':
test_case.main()
|
import { Component, Input } from '@angular/core';
import { SpotifyService } from '../../../services/spotify.service';
import { AuthService } from '../../../services/auth.service';
import { User } from '../../../models/user.model';
@Component({
selector: 'link-spotify-form',
template: `
<div *ngIf="!linkedToSpotify">
<p>This league requires you have a Spotify account</p>
<button (click)="signIntoSpotify()">Link to Spotify</button>
</div>
<div *ngIf="linkedToSpotify">
<p>This league will use your Spotify account</p>
</div>
`
})
export class LinkSpotifyForm {
private linkedToSpotify: Boolean = false;
constructor(
private spotifyService: SpotifyService,
private authService: AuthService
){
let self = this;
authService.user.subscribe( (user:User) => {
user.mediaSource.map(function(_mediaSource) {
if(_mediaSource.source === 'spotify'){
self.linkedToSpotify = true;
}
})
});
}
signIntoSpotify() {
this.spotifyService.signIntoSpotify().subscribe( (spotifyAuthUri) => {
window.location.href = spotifyAuthUri;
}, (error) => {
console.log(error);
});
}
}
|
-- phpMyAdmin SQL Dump
-- version 4.6.5.2
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Czas generowania: 30 Gru 2017, 13:35
-- Wersja serwera: 10.1.21-MariaDB
-- Wersja PHP: 5.6.30
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Baza danych: `studies`
--
-- --------------------------------------------------------
--
-- Struktura tabeli dla tabeli `backoffice`
--
CREATE TABLE `backoffice` (
`idd` int(11) NOT NULL,
`user` text COLLATE utf8_polish_ci NOT NULL,
`passw` text COLLATE utf8_polish_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_polish_ci;
--
-- Zrzut danych tabeli `backoffice`
--
INSERT INTO `backoffice` (`idd`, `user`, `passw`) VALUES
(1, 'admin', <PASSWORD>');
-- --------------------------------------------------------
--
-- Struktura tabeli dla tabeli `candidates`
--
CREATE TABLE `candidates` (
`id` int(11) NOT NULL,
`name` text COLLATE utf8_polish_ci NOT NULL,
`surname` text COLLATE utf8_polish_ci NOT NULL,
`pass` text COLLATE utf8_polish_ci NOT NULL,
`date_of_birth` date NOT NULL,
`city` text COLLATE utf8_polish_ci NOT NULL,
`street` text COLLATE utf8_polish_ci NOT NULL,
`no_house` varchar(6) COLLATE utf8_polish_ci NOT NULL,
`no_premises` varchar(6) COLLATE utf8_polish_ci NOT NULL,
`postal_code` varchar(6) COLLATE utf8_polish_ci NOT NULL,
`telephone` varchar(11) COLLATE utf8_polish_ci NOT NULL,
`e_mail` text COLLATE utf8_polish_ci NOT NULL,
`high_school` text COLLATE utf8_polish_ci NOT NULL,
`amount_of_points` int(11) NOT NULL,
`field_of_study` text COLLATE utf8_polish_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_polish_ci ROW_FORMAT=DYNAMIC;
--
-- Zrzut danych tabeli `candidates`
--
INSERT INTO `candidates` (`id`, `name`, `surname`, `pass`, `date_of_birth`, `city`, `street`, `no_house`, `no_premises`, `postal_code`, `telephone`, `e_mail`, `high_school`, `amount_of_points`, `field_of_study`) VALUES
(16, 'dfsfds', 'fsfsf', '$2y$10$DIwBQfc4DBxhOMR8I1HuHuXvfcTRYQfkvDPwe3i0VoPEApltt3.E.', '2017-11-04', 'łąłąłą', 'ąłłąłąłą', '23', '23', '44-444', '444-444-444', '<EMAIL>', 'łąłą', 23, 'Informatyka'),
(20, 'dfsfds', 'fsfsf', '$2y$10$oYHAsoRP1vtylt5gDoPMWOvSbLV61KdmLQ59yZOeKWhB7/DX9u3le', '2017-11-04', 'łąłąłą', 'ąłąłąłą', '23', '23', '44-444', '444-444-444', '<EMAIL>', 'dsfsfsf', 23, 'Informatyka'),
(24, 'sddsf', 'sdfdsf', '$2y$10$tXD6G0ZqESG4xbM9Zk1IKeioNoYYy7K6LKIb3QTYN6iZmLmLz2SAa', '2017-12-02', 'dfgdfg', 'fdgdgfd', '23', '23', '44-444', '444-444-444', '<EMAIL>', 'sdsf fsdsfsfs', 44, 'Matematyka'),
(27, 'sad asdasd', 'sadasdasd', '$2y$10$dZK82G64mpoes.90lPETSel0TR7rJGUBJWvjzFyMwX3EB8FojFpye', '2017-12-02', 'sfd sdf', 'dsfsf', '33', '23', '44-444', '444-444-444', '<EMAIL>', 'sf sf sf sf', 23, 'Zarządzanie projektami'),
(28, 'Dominik', 'sdfdsf', '$2y$10$uQ2WUsrtlfJu6zbKUN4Bpu6LWexJcmWNtVCGazdEG6aL9kiIIawyS', '2017-12-03', 'we c', 'sdf ', '2', 'd', '44-444', '444-444-444', '<EMAIL>', 'sf sf sf', 23, 'Zarządzanie'),
(29, 'Mirek', 'Nowak', '$2y$10$1c3R7KgfCTC3Ynda5fWm.e33NpXk9CnnGXkTcNApv0LZKBQsWiCVa', '2017-12-01', 'kraków', 'wąska', '2', '4', '30-811', '543-222-222', '<EMAIL>', 'ZSE nr 1 kraków', 55, 'Elektronika'),
(30, 'askdjkajk', 'jkdksjfk', '$2y$10$rnmjnjSSEivH/MEziRCa1e2CdXj6.xMT99izIME2ltATkvNU8d9/6', '2017-12-02', 'sfds', 'sfds', '23', '23', '44-444', '444-444-444', '<EMAIL>', 'dsfk knfk k sf ', 23, 'Zarządzanie'),
(31, 'fsj', 'jsdkfj', '$2y$10$Dmw.Ie2otI0S6IOXbHxJN.L7U4GEdcEz64or74oPRB4Yog/EpS9OK', '2017-12-09', 'sdf', 'sdf', '32', '32', '44-444', '444-444-444', '<EMAIL>', 'sf sfs dsd', 32, 'Matematyka'),
(32, 'sfds', 'fdsf', '$2y$10$Qrwne4TCWPEp.tTbOTiBsOwYlmK905zQQtaa.3BsIPGDEWL072rQG', '2017-12-02', 'sdfsf', 'sdfs', '32', '23', '44-444', '444-444-444', '<EMAIL>', 'sfd sd fsf', 32, 'Elektrotechnika');
--
-- Indeksy dla zrzutów tabel
--
--
-- Indexes for table `backoffice`
--
ALTER TABLE `backoffice`
ADD PRIMARY KEY (`idd`);
--
-- Indexes for table `candidates`
--
ALTER TABLE `candidates`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT dla tabeli `backoffice`
--
ALTER TABLE `backoffice`
MODIFY `idd` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT dla tabeli `candidates`
--
ALTER TABLE `candidates`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=33;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
import json
from tqdm import tqdm
from torch.utils.tensorboard import SummaryWriter
def write_loss_to_tensorboard(log_file_path: str):
tb_writer = SummaryWriter() # Initialize TensorBoard writer
with open(log_file_path, 'r') as file:
lines = file.readlines()
for epoch, line in tqdm(enumerate(lines)):
line = line.strip()
if line == '':
break
info = json.loads(line)
tb_writer.add_scalar('Loss/train', info['train_loss'], epoch)
tb_writer.add_scalar('Loss_bbox/train', info['train_loss_bbox_unscaled'], epoch)
tb_writer.add_scalar('Loss_ce/train', info['train_loss_ce_unscaled'], epoch)
tb_writer.add_scalar('Loss/test', info['test_loss'], epoch)
tb_writer.add_scalar('Loss_bbox/test', info['test_loss_bbox_unscaled'], epoch)
tb_writer.close() # Close TensorBoard writer after writing all loss values
|
<reponame>Richienb/p-tap<gh_stars>0
'use strict';
const pTap = tapHandler => async value => {
await tapHandler(value);
return value;
};
module.exports = pTap;
// TODO: Remove this for the next major release
module.exports.default = pTap;
module.exports.catch = tapHandler => async error => {
await tapHandler(error);
throw error;
};
|
const { logger } = require('@vtfk/logger')
const withTokenAuth = require('../lib/token-auth')
const { getStudent } = require('../lib/api/students')
const { getTeachers } = require('../lib/api/teachers')
const repackTeacher = require('../lib/repack-teacher')
const returnStudents = async function (context, req) {
const { id } = context.bindingData
try {
// Get students matching the provided username
const student = await getStudent(context, id)
if (!student) {
logger('warn', ['pifu-api', 'student', 'get contactteachers', 'student not found', id])
context.res = {
status: 403,
body: `Student not found: ${id}`
}
return
}
const { kontaktlarergruppeIds } = student
// Get teachers matching the contact class ids
const teachers = await getTeachers(context, {
groupIds: { $in: kontaktlarergruppeIds }
})
logger('info', ['pifu-api', 'students', 'get contactteachers', student.username])
const repackedTeachers = teachers.map((teacher) => repackTeacher(context, teacher))
context.res = {
body: repackedTeachers
}
} catch (error) {
logger('error', ['pifu-api', 'students', 'get contactteachers', 'error', error.message])
context.res = {
status: 500,
body: error.message
}
}
}
module.exports = (context, request) => withTokenAuth(context, request, returnStudents)
|
package com.wix.sms.bulksms.model
/**
* Response status codes.
* @see <a href="http://developer.bulksms.com/eapi/submission/send_sms/#returns">Returns</a>
*/
object StatusCodes {
/** A normal message submission, with no error encountered so far. */
val inProgress = "0"
val scheduled = "1"
val internalFatalError = "22"
val authenticationFailure = "23"
val dataValidationFailed = "24"
val insufficientCredits = "25"
val upstreamCreditsNotAvailable = "26"
val dailyQuotaExceeded = "27"
val upstreamQuotaExceeded = "28"
val temporarilyUnavilable = "40"
val maximumBatchSizeExceeded = "201"
}
|
/**
*
* RoadMap
*
*/
import React from 'react';
// import styled from 'styled-components';
import { FormattedMessage } from 'react-intl';
import messages from './messages';
class RoadMap extends React.Component { // eslint-disable-line react/prefer-stateless-function
constructor(props){
super(props);
this.state = {
currentEvent: "l1"
}
}
mouseEnter(evt){
this.setState({
currentEvent: evt
})
}
mouseLeave(){
this.setState({
currentEvent: "l1"
})
}
render() {
return (
<div>
<div className="roadmap desktop-view">
<h3>ROADMAP</h3>
<p className="mid-dot-top">·</p>
<div className="inner-container">
<div className="left-side">
<div className="box">
<div className="inner-box active">
<div className="data">
<h3><FormattedMessage {...messages.q12018head}/></h3>
<p><FormattedMessage {...messages.q12018title}/></p>
<ul>
<li><FormattedMessage {...messages.q12018l1}/></li>
<li><FormattedMessage {...messages.q12018l2}/></li>
</ul>
</div>
<div className="image">
<img src={require("../../images/gifs/solv_logo_animated.gif")}/>
</div>
</div>
<div className="hr-box">
<hr/>
</div>
</div>
<div className="box">
<div className="inner-box">
<div className="data">
<h3><FormattedMessage {...messages.q32018head}/></h3>
<p><FormattedMessage {...messages.q32018title}/></p>
<ul>
<li><FormattedMessage {...messages.q32018l1}/> </li>
<li><FormattedMessage {...messages.q32018l2}/></li>
</ul>
</div>
<div className="image">
<img src={require("../../images/gifs/Key-render-gif.gif")}/>
</div>
</div>
<div className="hr-box">
<hr/>
</div>
</div>
<div className="box">
<div className="inner-box">
<div className="data">
<h3><FormattedMessage {...messages.q12019head}/></h3>
<p><FormattedMessage {...messages.q12019title}/></p>
<ul>
<li><FormattedMessage {...messages.q12019l1}/></li>
</ul>
</div>
<div className="image">
<img src={require("../../images/gifs/Heart-Logo-gif.gif")}/>
</div>
</div>
<div className="hr-box">
<hr/>
</div>
</div>
<div className="box">
<div className="inner-box">
<div className="data">
<h3><FormattedMessage {...messages.q42019head}/></h3>
<p><FormattedMessage {...messages.q42019title}/></p>
<ul>
<li><FormattedMessage {...messages.q42019l1}/></li>
<li><FormattedMessage {...messages.q42019l2}/></li>
</ul>
</div>
<div className="image">
<img src={require("../../images/gifs/Gaming-logo-gif.gif")}/>
</div>
</div>
<div className="hr-box">
<hr/>
</div>
</div>
</div>
<div className="right-side">
<div className="box">
<div className="hr-box">
<hr/>
</div>
<div className="inner-box">
<div className="image">
<img src={require("../../images/gifs/Ethereum-render.gif")}/>
</div>
<div className="data">
<h3><FormattedMessage {...messages.q22018head}/></h3>
<p><FormattedMessage {...messages.q22018title}/></p>
<ul>
<li><FormattedMessage {...messages.q22018l1}/></li>
</ul>
</div>
</div>
</div>
<div className="box">
<div className="hr-box">
<hr/>
</div>
<div className="inner-box">
<div className="image">
<img src={require("../../images/gifs/Reward-logo-Gif.gif")}/>
</div>
<div className="data">
<h3><FormattedMessage {...messages.q42018head}/></h3>
<p><FormattedMessage {...messages.q42018title}/></p>
<ul>
<li><FormattedMessage {...messages.q42018l1}/></li>
<li><FormattedMessage {...messages.q42018l2}/></li>
</ul>
</div>
</div>
</div>
<div className="box">
<div className="hr-box">
<hr/>
</div>
<div className="inner-box"><div className="image">
<img src={require("../../images/gifs/Water-Logo-gif.gif")}/>
</div>
<div className="data">
<h3><FormattedMessage {...messages.q32019head}/></h3>
<p><FormattedMessage {...messages.q32019title}/></p>
<ul>
<li><FormattedMessage {...messages.q32019l1}/></li>
<li><FormattedMessage {...messages.q32019l2}/></li>
</ul>
</div>
</div>
</div>
</div>
</div>
<p className="mid-dot-bottom">·</p>
</div>
{/*<div className="roadmap">*/}
{/*<div className="outer-container">*/}
{/*<div className="inner-container">*/}
{/*<div className="inner-row one">*/}
{/*<div className="box">*/}
{/*<p>Q2</p>*/}
{/*<img onMouseEnter={this.mouseEnter.bind(this, "l2")}*/}
{/*onMouseLeave={this.mouseLeave.bind(this)} className="img-1"*/}
{/*src={require("../../images/gifs/Ethereum-render.gif")}/>*/}
{/*</div>*/}
{/*<div className="box">*/}
{/*<p>Q1</p>*/}
{/*<img onMouseEnter={this.mouseEnter.bind(this, "l1")}*/}
{/*onMouseLeave={this.mouseLeave.bind(this)} className="img-2"*/}
{/*src={require("../../images/solution-logo.png")}/>*/}
{/*</div>*/}
{/*</div>*/}
{/*<div className="first-dot"></div>*/}
{/*<hr className="hr1"/>*/}
{/*<div className="inner-row two">*/}
{/*<div className="box">*/}
{/*<p>Q4</p>*/}
{/*<img onMouseEnter={this.mouseEnter.bind(this, "l4")}*/}
{/*onMouseLeave={this.mouseLeave.bind(this)} className="img-3"*/}
{/*src={require("../../images/gifs/Key-render-gif.gif")}/>*/}
{/*</div>*/}
{/*<div className="box">*/}
{/*<p>Q3</p>*/}
{/*<img onMouseEnter={this.mouseEnter.bind(this, "l3")}*/}
{/*onMouseLeave={this.mouseLeave.bind(this)} className="img-4"*/}
{/*src={require("../../images/gifs/Reward-logo-Gif.gif")}/>*/}
{/*</div>*/}
{/*</div>*/}
{/*<hr className="hr2"/>*/}
{/*<div className="inner-row three">*/}
{/*<div className="box">*/}
{/*<p>Q7</p>*/}
{/*<img onMouseEnter={this.mouseEnter.bind(this, "l7")}*/}
{/*onMouseLeave={this.mouseLeave.bind(this)} className="img-5"*/}
{/*src={require("../../images/gifs/Gaming-logo-gif.gif")}/>*/}
{/*</div>*/}
{/*<div className="box">*/}
{/*<p>Q6</p>*/}
{/*<img onMouseEnter={this.mouseEnter.bind(this, "l6")}*/}
{/*onMouseLeave={this.mouseLeave.bind(this)} className="img-6"*/}
{/*src={require("../../images/gifs/Water-Logo-gif.gif")}/>*/}
{/*</div>*/}
{/*<div className="box">*/}
{/*<p>Q5</p>*/}
{/*<img onMouseEnter={this.mouseEnter.bind(this, "l5")}*/}
{/*onMouseLeave={this.mouseLeave.bind(this)} className="img-7"*/}
{/*src={require("../../images/gifs/Heart-Logo-gif.gif")}/>*/}
{/*</div>*/}
{/*</div>*/}
{/*<div className="last-dot"></div>*/}
{/*<hr className="hr3"/>*/}
{/*<div className="inner-row four">*/}
{/*</div>*/}
{/*</div>*/}
{/*<div className="des-container">*/}
{/*<div className="outer-container">*/}
{/*<ul className={this.state.currentEvent === "l1" ? "l1 animated fadeInUp" : 'animated hide'}>*/}
{/*<li><p className="date">Q1 - 2018</p></li>*/}
{/*<li><p className="title">Whitepaper & Marketing</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*</ul>*/}
{/*<ul className={this.state.currentEvent === "l2" ? "l2 animated fadeInUp" : 'animated hide'}>*/}
{/*<li><p className="date">Q2 - 2018</p></li>*/}
{/*<li><p className="title">PRE-SALE Stage Ⅰ ⅠⅠ ⅠⅠⅠ</p>*/}
{/*</li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*</ul>*/}
{/*<ul className={this.state.currentEvent === "l3" ? "l3 animated fadeInUp" : 'animated hide'}>*/}
{/*<li><p className="date">Q3 - 2018</p></li>*/}
{/*<li><p className="title">Stage ⅠⅠⅠ & Solution Key</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*</ul>*/}
{/*<ul className={this.state.currentEvent === "l4" ? "l4 animated fadeInUp" : 'animated hide'}>*/}
{/*<li><p className="date">Q4 - 2018</p></li>*/}
{/*<li><p className="title">Solution Wallet</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*</ul>*/}
{/*<ul className={this.state.currentEvent === "l5" ? "l5 animated fadeInUp" : 'animated hide'}>*/}
{/*<li><p className="date">Q1 - 2019</p></li>*/}
{/*<li><p className="title">Operation Humanity</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*</ul>*/}
{/*<ul className={this.state.currentEvent === "l6" ? "l6 animated fadeInUp" : 'animated hide'}>*/}
{/*<li><p className="date">Q3 - 2019</p></li>*/}
{/*<li><p className="title">Water Solutions</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*</ul>*/}
{/*<ul className={this.state.currentEvent === "l7" ? "l7 animated fadeInUp" : 'animated hide'}>*/}
{/*<li><p className="date">Q4 - 2019</p></li>*/}
{/*<li><p className="title">Gaming</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*<li><p>Blah Blah blah</p></li>*/}
{/*</ul>*/}
{/*</div>*/}
{/*</div>*/}
{/*</div>*/}
{/*</div>*/}
</div>
)
}
}
RoadMap.propTypes = {
};
export default RoadMap;
|
#!/bin/sh
# author: snikons
set -e
function build {
echo "building..."
exec > _build.log
exec 2>&1
./build.sh
}
function unit_tests {
echo "running unit tests..."
bin/packet-analyzer-unit-tests
}
function system_tests {
echo "running system tests..."
for pcap_file in test/data/*.pcap; do
echo "analyzing $pcap_file..."
bin/packet-analyzer -f tcp-mb-device-info -p "$pcap_file"
done
}
(build)
echo ""
(unit_tests)
echo ""
(system_tests)
|
#!/usr/bin/env bash
YW=`echo "\033[33m"`
BL=`echo "\033[36m"`
RD=`echo "\033[01;31m"`
CM='\xE2\x9C\x94\033'
GN=`echo "\033[1;92m"`
CL=`echo "\033[m"`
while true; do
read -p "This will create a New Zigbee2MQTT LXC. Proceed(y/n)?" yn
case $yn in
[Yy]* ) break;;
[Nn]* ) exit;;
* ) echo "Please answer yes or no.";;
esac
done
clear
function header_info {
echo -e "${YW}
_______ _ ___ __ __ ____ _______ _______
|___ (_) | | |__ \| \/ |/ __ \__ __|__ __|
/ / _ __ _| |__ ___ ___ ) | \ / | | | | | | | |
/ / | |/ _ | _ \ / _ \/ _ \ / /| |\/| | | | | | | | |
/ /__| | (_| | |_) | __/ __// /_| | | | |__| | | | | |
/_____|_|\__, |____/ \___|\___|____|_| |_|\___\_\ |_| |_|
__/ |
|___/
${CL}"
}
header_info
show_menu(){
printf " ${YW} 1)${GN} Privileged ${CL}\n"
printf " ${YW} 2)${RD} Unprivileged (no device passthrough) ${CL}\n"
printf "Please choose a Install Method and hit enter or ${RD}x${CL} to exit."
read opt
}
option_picked(){
message1=${@:-"${CL}Error: No message passed"}
printf " ${YW}${message1}${CL}\n"
}
show_menu
while [ $opt != '' ]
do
if [ $opt = '' ]; then
exit;
else
case $opt in
1) clear;
header_info;
option_picked "Using Privileged Install";
IM=0
break;
;;
2) clear;
header_info;
option_picked "Using Unprivileged Install";
IM=1
break;
;;
x)exit;
;;
\n)exit;
;;
*)clear;
option_picked "Please choose a Install Method from the menu";
show_menu;
;;
esac
fi
done
show_menu2(){
printf " ${YW} 1)${GN} Use Automatic Login ${CL}\n"
printf " ${YW} 2)${GN} Use Password (changeme) ${CL}\n"
printf "Please choose a Password Type and hit enter or ${RD}x${CL} to exit."
read opt
}
option_picked(){
message2=${@:-"${CL}Error: No message passed"}
printf " ${YW}${message1}${CL}\n"
printf " ${YW}${message2}${CL}\n"
}
show_menu2
while [ $opt != '' ]
do
if [ $opt = '' ]; then
exit;
else
case $opt in
1) clear;
header_info;
option_picked "Using Automatic Login";
PW=" "
break;
;;
2) clear;
header_info;
option_picked "Using Password (changeme)";
PW="-password changeme"
break;
;;
x)exit;
;;
\n)exit;
;;
*)clear;
option_picked "Please choose a Password Type from the menu";
show_menu2;
;;
esac
fi
done
show_menu3(){
printf " ${YW} 1)${GN} Automatic DHCP ${CL}\n"
printf " ${YW} 2)${GN} Manual DHCP ${CL}\n"
printf "Please choose a DHCP Type and hit enter or ${RD}x${CL} to exit."
read opt
}
option_picked(){
message3=${@:-"${CL}Error: No message passed"}
printf " ${YW}${message1}${CL}\n"
printf " ${YW}${message2}${CL}\n"
printf " ${YW}${message3}${CL}\n"
}
show_menu3
while [ $opt != '' ]
do
if [ $opt = '' ]; then
exit;
else
case $opt in
1) clear;
header_info;
option_picked "Using Automatic DHCP";
DHCP=" "
break;
;;
2) clear;
header_info;
option_picked "Using Manual DHCP";
DHCP="1"
break;
;;
x)exit;
;;
\n)exit;
;;
*)clear;
option_picked "Please choose a DHCP Type from the menu";
show_menu3;
;;
esac
fi
done
set -o errexit
set -o errtrace
set -o nounset
set -o pipefail
shopt -s expand_aliases
alias die='EXIT=$? LINE=$LINENO error_exit'
trap die ERR
trap cleanup EXIT
function error_exit() {
trap - ERR
local DEFAULT='Unknown failure occured.'
local REASON="\e[97m${1:-$DEFAULT}\e[39m"
local FLAG="\e[91m[ERROR] \e[93m$EXIT@$LINE"
msg "$FLAG $REASON"
[ ! -z ${CTID-} ] && cleanup_ctid
exit $EXIT
}
function warn() {
local REASON="\e[97m$1\e[39m"
local FLAG="\e[93m[WARNING]\e[39m"
msg "$FLAG $REASON"
}
function info() {
local REASON="$1"
local FLAG="\e[36m[INFO]\e[39m"
msg "$FLAG $REASON"
}
function msg() {
local TEXT="$1"
echo -e "$TEXT"
}
function cleanup_ctid() {
if $(pct status $CTID &>/dev/null); then
if [ "$(pct status $CTID | awk '{print $2}')" == "running" ]; then
pct stop $CTID
fi
pct destroy $CTID
elif [ "$(pvesm list $STORAGE --vmid $CTID)" != "" ]; then
pvesm free $ROOTFS
fi
}
function cleanup() {
popd >/dev/null
rm -rf $TEMP_DIR
}
if [ "$IM" == "1" ]; then
FEATURES="nesting=1,keyctl=1"
else
FEATURES="nesting=1"
fi
TEMP_DIR=$(mktemp -d)
pushd $TEMP_DIR >/dev/null
export CTID=$(pvesh get /cluster/nextid)
export PCT_OSTYPE=debian
export PCT_OSVERSION=11
export PCT_DISK_SIZE=4
export PCT_OPTIONS="
-features $FEATURES
-hostname zigbee2mqtt
-net0 name=eth0,bridge=vmbr0,ip=dhcp
-onboot 1
-cores 2
-memory 1024
-unprivileged ${IM}
${PW}
"
bash -c "$(wget -qLO - https://github.com/docmcfreckles/Proxmox/raw/local/ct/create_lxc.sh)" || exit
STORAGE_TYPE=$(pvesm status -storage $(pct config $CTID | grep rootfs | awk -F ":" '{print $2}') | awk 'NR>1 {print $2}')
if [ "$STORAGE_TYPE" == "zfspool" ]; then
warn "Some addons may not work due to ZFS not supporting 'fallocate'."
fi
LXC_CONFIG=/etc/pve/lxc/${CTID}.conf
cat <<EOF >> $LXC_CONFIG
lxc.cgroup2.devices.allow: a
lxc.cap.drop:
lxc.cgroup2.devices.allow: c 188:* rwm
lxc.cgroup2.devices.allow: c 189:* rwm
lxc.mount.entry: /dev/serial/by-id dev/serial/by-id none bind,optional,create=dir
lxc.mount.entry: /dev/ttyUSB0 dev/ttyUSB0 none bind,optional,create=file
lxc.mount.entry: /dev/ttyACM0 dev/ttyACM0 none bind,optional,create=file
EOF
if [ "$DHCP" == "1" ]; then
MAC=$(pct config $CTID \
| grep -i hwaddr \
| awk '{print substr($2, 31, length($3) 17 ) }') \
echo -e "MAC Address ${BL}$MAC${CL}"
dhcp_reservation(){
printf "Please set DHCP reservation and press Enter."
read
}
dhcp_reservation
fi
echo -en "${GN} Starting LXC Container... "
pct start $CTID
echo -e "${CM}${CL} \r"
alias lxc-cmd="lxc-attach -n $CTID --"
lxc-cmd bash -c "$(wget -qLO - https://github.com/docmcfreckles/Proxmox/raw/local/setup/zigbee2mqtt-install.sh)" || exit
IP=$(pct exec $CTID ip a s dev eth0 | sed -n '/inet / s/\// /p' | awk '{print $2}')
echo -e "${GN}Successfully created Zigbee2MQTT LXC to${CL} ${BL}$CTID${CL}. \n"
|
def calculate_sum(numbers):
return sum(numbers)
|
package me.illuminator3.aar;
import org.bukkit.command.CommandExecutor;
public class Utils
{
public static double cut(double value, int precision)
{
String s = Double.toString(value);
return Double.parseDouble(s.substring(0, s.indexOf(".") + precision + 1));
}
public static CommandExecutor createMessageExecutor(String... messages)
{
return (a0, a1, a2, a3) -> {
a0.sendMessage(messages);
return true;
};
}
}
|
<reponame>rovedit/Fort-Candle
#pragma once
#include <array>
#include <memory>
#include <string>
#include <glow/common/shared.hh>
#include <glow/fwd.hh>
#include <glow/std140.hh>
#include "../../fwd.hh"
#include "../../lights/Light.hh"
#include "../RenderStage.hh"
#include "../StageCamera.hh"
namespace glow
{
namespace pipeline
{
GLOW_SHARED(class, OpaqueStage);
class OpaqueStage : public RenderStage
{
private:
// == Render Targets ==
SharedTextureRectangle mAllocTargetHdr;
SharedTextureRectangle mAllocTargetVelocity;
// == FBOs ==
SharedFramebuffer mFboHdr;
SharedFramebuffer mFboHdrOnly;
SharedFramebuffer mFboVelocity;
// == Shaders ==
SharedProgram mShaderVelocityInit;
SharedProgram mShaderEdgeOutline;
// == Dependencies ==
SharedDepthPreStage mStageDepthPre;
SharedShadowStage mStageShadow;
SharedAOStage mStageAo;
// == Samplers ==
SharedSampler mSamplerAo;
protected:
/// Called when the stage is supposed to run
void onExecuteStage(RenderContext const& ctx, RenderCallback& rc) override;
/// Called when a new frame begins
void onBeginNewFrame(RenderPipeline& pipeline) override;
public:
OpaqueStage(SharedDepthPreStage const& depthPreStage, SharedShadowStage const& shadowStage, SharedAOStage const& aoStage);
SharedTextureRectangle const& getTargetHdr() const { return mAllocTargetHdr; }
SharedTextureRectangle const& getTargetVelocity() const { return mAllocTargetVelocity; }
void registerShader(RenderContext const& ctx, SharedProgram const& shader) const override;
void uploadShaderData(RenderContext const& ctx, UsedProgram& shader) const override;
StageType getType() const override { return StageType::Opaque; }
std::string name() const override { return "Opaque"; }
};
}
}
|
const path = require('path')
const fs = require('fs')
const webpack = require('webpack')
const WebpackBar = require('webpackbar')
const ESLintPlugin = require('eslint-webpack-plugin')
const HTMLWebpackPlugin = require('html-webpack-plugin')
const MiniCssExtractPlugin = require('mini-css-extract-plugin')
const environment = require('./environment.js')
const { CleanWebpackPlugin } = require('clean-webpack-plugin')
const HtmlCriticalPlugin = require('html-critical-webpack-plugin')
// Three different arrays of plugins in case some configs might have specific plugins
// Ex: critical css it's only used for production 🚀
const myPlugins = []
const prodPlugins = []
const devPlugins = []
//This dynamically creates an array of all the html pages inside the src folder and for each one of them dynamically creates a new HTMLWebpackPlugin
const sitePages = fs
.readdirSync(environment.paths.pages)
.filter((file) => path.extname(file).toLowerCase() === '.ejs').map(name => name.split('.').slice(0, -1).join('.')) //output ['about','index',etc.]
// For each ejs page inside the src folder we dynamically create a new HTMLWebpackPlugin
const html_webpack_plugins = sitePages.map((name) => {
return new HTMLWebpackPlugin({
template: `${environment.paths.pages}/${name}.ejs`,
filename: `${name}.html`,
favicon: `${environment.paths.source}/images/favicon.ico`,
});
});
// A list of plugins used by this boilerplate
// Webpack progression bar
const webpackBar = new WebpackBar()
// ESLint configuration
//
const es_lint_config = new ESLintPlugin({
files: ['.', 'src', 'config'],
formatter: 'table',
})
// CSS extractor
const mini_css_extract_plugin = new MiniCssExtractPlugin({
filename: 'css/[name].css',
chunkFilename: '[name].css',
})
// For each html page inside the dist folder we dynamically create a new indexCriticalCss
// Critical CSS
const criticalCss = sitePages.map((name) => {
return new HtmlCriticalPlugin({
base: environment.paths.output,
src: `${name}.html`,
dest: `${name}.html`,
inline: true,
minify: true,
extract: true,
dimensions: [
{
height: 565,
width: 375,
},
{
height: 900,
width: 1200,
},
],
penthouse: {
blockJSRequests: false,
},
});
});
// Clean webpack
const clean_webpack_plugin = new CleanWebpackPlugin({
verbose: true,
cleanOnceBeforeBuildPatterns: ['**/*', '!stats.json'],
})
const ejs = new webpack.ProvidePlugin({
_: 'underscore',
})
myPlugins.push(
webpackBar,
es_lint_config,
...html_webpack_plugins,
mini_css_extract_plugin,
clean_webpack_plugin,
ejs,
)
prodPlugins.push(
...criticalCss
)
module.exports = { myPlugins, prodPlugins, devPlugins }
|
@RestController
@RequestMapping("/items")
public class ItemsController {
@Autowired
private ItemsRepository itemsRepository;
@GetMapping
public List<Item> list() {
return itemsRepository.findAll();
}
@PostMapping
public Item create(@RequestBody Item item) {
return itemsRepository.save(item);
}
}
@Entity
public class Item {
private Long id;
private String name;
private String description;
// getters and setters
}
public interface ItemsRepository extends JpaRepository<Item, Long> {
}
|
list = [3, 6, 9, 12, 15]
sum = list[0] + list[-1]
print("The sum is: ", sum)
|
/*tslint:disable:no-unused-variable */
import { Injectable, ReflectiveInjector } from '@angular/core';
import { async, inject, TestBed } from '@angular/core/testing';
import { AngularFireDatabase } from 'angularfire2/database';
import { Subject } from 'rxjs/Rx';
import { AfoListObservable } from './afo-list-observable';
import { AfoObjectObservable } from './afo-object-observable';
import { AngularFireOfflineDatabase } from './database';
import { LocalForageToken } from './localforage';
import { LocalUpdateService } from './local-update-service';
import { CacheItem, WriteCache } from './interfaces';
describe('Service: AngularFireOfflineDatabase', () => {
let mockAngularFireDatabase: MockAngularFireDatabase;
let mockLocalForageService: MockLocalForageService;
beforeEach(() => {
mockLocalForageService = new MockLocalForageService();
mockAngularFireDatabase = new MockAngularFireDatabase();
TestBed.configureTestingModule({
providers: [
AngularFireOfflineDatabase,
LocalUpdateService,
{ provide: AngularFireDatabase, useValue: mockAngularFireDatabase },
{ provide: LocalForageToken, useValue: mockLocalForageService }
]
});
});
beforeEach(async(() => {
TestBed.compileComponents();
}));
it('should create the service', inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
expect(service).toBeTruthy();
}));
it('should return a list (1 - processing complete)', done => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const key = '/slug-2';
let newValue = [
{ val: () => { return 'xyz'; } }
];
service.processing.current = false;
service.list(key).subscribe(list => {
expect(list[0].$value).toBe('xyz');
done();
});
expect(service.listCache[key].loaded).toBe(false);
mockAngularFireDatabase.update('list', newValue);
})();
});
it('should return a list (2 - while processing)', () => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const key = '/slug-2';
let newValue = [
{ val: () => { return 'xyz'; } }
];
service.list(key);
mockAngularFireDatabase.update('list', newValue);
expect(service.processing.listCache[key][0].$value).toBe('xyz');
})();
});
it('should not setup a list', inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const key = '/slug-2';
// Setup test - Set up list
service.list(key, {});
// If `setupObject` is called, then this will be false:
expect(service.listCache[key].loaded).toBe(false);
// Setting to true
service.listCache[key].loaded = true;
// Test
service.list(key);
// Will still be true if `setupObject` was not called
expect(service.listCache[key].loaded).toBe(true);
}));
it('should return an object', async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
let newValue = { val: () => { return 'abc23-7'; } };
service.object('/slug-2').subscribe(object => {
expect(object.$value).toBe('abc23-7');
});
mockAngularFireDatabase.update('object', newValue);
})));
it('should return an object', async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
let newValue = { val: () => { return 'abc23-7'; } };
service.processing.current = false;
service.object('/slug-2').subscribe(object => {
expect(object.$value).toBe('abc23-7');
});
mockAngularFireDatabase.update('object', newValue);
})));
it('should not setup an object', inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const key = '/slug-2';
// Setup test - Set up list
service.object(key, {});
// If `setupObject` is called, then this will be false:
expect(service.objectCache[key].loaded).toBe(false);
// Setting to true
service.objectCache[key].loaded = true;
// Test
service.object(key);
// Will still be true if `setupObject` was not called
expect(service.objectCache[key].loaded).toBe(true);
}));
it('should return a locally stored object value (1 - with processing)',
async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const key = '/slug-2';
mockLocalForageService.values[`read/object${key}`] = '293846488sxjfhslsl20201-4ghcjs';
service.processing.current = true;
service.object(key).subscribe(object => {
expect(object.$value).toBe('293846488sxjfhslsl20201-4ghcjs');
expect(object.$exists()).toEqual(true);
});
})));
it('should return a locally stored object value (2 - not processing)',
async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const key = '/slug-2';
service.processing.current = false;
mockLocalForageService.values[`read/object${key}`] = '293846488sxjfhslsl20201-4ghcjs';
service.object(key).subscribe(object => {
expect(object.$value).toBe('293846488sxjfhslsl20201-4ghcjs');
expect(object.$exists()).toEqual(true);
});
})));
it('should not return a locally stored value if loaded', done => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const key = '/slug-2';
let returnedValue = false;
service.processing.current = false;
mockLocalForageService.values[`read/object${key}`] = '293846488sxjfhslsl20201-4ghcjs';
service.object(key).subscribe(object => {
// Expect this to not happen
returnedValue = true;
});
// Fake loading
service.objectCache[key].loaded = true;
// Wait for result
setTimeout(() => {
expect(returnedValue).toBe(false);
done();
});
})();
});
it('get local list (1) - should update value',
async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
service.processing.current = false;
const key = '/list-2';
const listKeys = ['key-1', 'key-2', 'key-3'];
// Prepare return values for localForage
mockLocalForageService.values[`read/list${key}`] = listKeys;
listKeys.forEach(listKey => {
mockLocalForageService.values[`read/object${key}/${listKey}`] = '1';
});
// Run test
service.list(key).subscribe(object => {
expect(object[0].$value).toEqual('1');
expect(object[1].$value).toEqual('1');
expect(object[2].$value).toEqual('1');
expect(object[2].$exists()).toEqual(true);
expect(object[3]).toEqual(undefined);
});
})));
it('get local list (2) - should not update value if loaded', done => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
service.processing.current = false;
let returnedValue = false;
const key = '/list-2';
const listKeys = ['key-1', 'key-2', 'key-3'];
// Prepare return values for localForage
mockLocalForageService.values[`read/list${key}`] = listKeys;
listKeys.forEach(listKey => {
mockLocalForageService.values[`read/object${key}/${listKey}`] = '1';
});
// Run test
service.list(key).subscribe(object => {
// Expect this to not happen
returnedValue = true;
});
// Fake loading
service.listCache[key].loaded = true;
// Wait for result
setTimeout(() => {
expect(returnedValue).toBe(false);
done();
});
})();
});
describe('Wait while processing', () => {
it('1 - wait for a list', done => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
service.processing.current = true;
const key = '/list-2';
const listKeys = ['key-1', 'key-2', 'key-3'];
// Prepare return values for localForage
mockLocalForageService.values[`read/list${key}`] = listKeys;
listKeys.forEach(listKey => {
mockLocalForageService.values[`read/object${key}/${listKey}`] = '1';
});
// Run test
service.list(key);
// Wait for results
setTimeout(() => {
const isDefined = service.processing.listCache[key] !== undefined;
expect(isDefined).toBe(true);
if (isDefined) { expect(service.processing.listCache[key].length).toBe(3); }
done();
});
})();
});
it('2 - wait for an object', done => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
service.processing.current = true;
const key = '/object-2';
// Prepare return values for localForage
mockLocalForageService.values[`read/object${key}`] = 'object value';
// Run test
service.object(key);
// Wait for results
setTimeout(() => {
const isDefined = service.processing.objectCache[key] !== undefined;
expect(isDefined).toBe(true);
if (isDefined) { expect(service.processing.objectCache[key].$value).toBe('object value'); }
done();
});
})();
});
});
describe('Process writes', () => {
it('1 - should remove a list', done => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const key = 'item-1';
const cacheItem: CacheItem = {
type: 'list',
ref: key,
method: 'remove',
args: []
};
const writeCache: WriteCache = {
lastId: 3,
cache: {
'3': cacheItem
}
};
mockLocalForageService.resolves['write'](writeCache);
service.listCache[key] = {
loaded: false,
offlineInit: false,
sub: new MockAfoListObservable()
};
service.processing.current = true;
setTimeout(() => {
expect(mockAngularFireDatabase.listData$.history[0]).toBe('remove');
done();
});
})();
});
it('2 - should do nothing if write cache is empty',
async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
mockLocalForageService.resolves['write'](null);
setTimeout(() => {
expect(service.cacheIndex).toBe(0);
});
})));
it('3 - should do nothing if returned write cached is empty',
async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const writeCache: WriteCache = {
lastId: 1,
cache: {}
};
expect(service.processing.current).toBe(true);
// Run test
mockLocalForageService.resolves['write'](writeCache);
// Wait for result
setTimeout(() => {
expect(service.cacheIndex).toBe(1);
expect(service.processing.current).toBe(false);
});
})));
it('4 - should check if it should emulate a list', done => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const key = 'item-1';
const cacheItem: CacheItem = {
type: 'object',
ref: key,
method: 'set',
args: []
};
const writeCache: WriteCache = {
lastId: 3,
cache: {
'3': cacheItem
}
};
mockLocalForageService.resolves['write'](writeCache);
service.objectCache[key] = {
loaded: false,
offlineInit: false,
sub: new MockAfoObjectObservable()
};
service.processing.current = true;
setTimeout(() => {
done();
});
})();
});
it('5 - should add valid items to emulateQue', done => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
service.emulateQue['/items'] = [];
const cacheItem1: CacheItem = {
type: 'object',
ref: 'items/item-1',
method: 'set',
args: ['value1']
};
const writeCache: WriteCache = {
lastId: 2,
cache: {
'3': cacheItem1
}
};
mockLocalForageService.resolves['write'](writeCache);
service.processing.current = true;
setTimeout(() => {
done();
});
})();
});
it('6 - should add valid items to emulateQue and create a new que item when empty', done => {
inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const cacheItem1: CacheItem = {
type: 'object',
ref: 'items/item-1',
method: 'set',
args: ['value1']
};
const writeCache: WriteCache = {
lastId: 2,
cache: {
'3': cacheItem1
}
};
mockLocalForageService.resolves['write'](writeCache);
service.processing.current = true;
setTimeout(() => {
done();
});
})();
});
it('7 - should update the emulate List',
async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const writeCache: WriteCache = {
lastId: 1,
cache: {}
};
const cacheItem: CacheItem = {
type: 'object',
ref: 'items/item-1',
method: 'set',
args: ['value1']
};
service.listCache['items'] = {
loaded: false,
offlineInit: false,
sub: new MockAfoObjectObservable()
};
service.emulateQue = {
'random-key': [],
'items': [
cacheItem
]
};
mockLocalForageService.resolves['write'](writeCache);
setTimeout(() => {
expect(service.listCache['items'].sub.history[0].value).toBe('value1');
});
})));
it('8 - should publish processed values',
async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
const testResults = {items: undefined, thing: undefined};
// Setup list
service.listCache['items'] = {
loaded: false,
offlineInit: false,
sub: new MockAfoListObservable()
};
service.processing.listCache['items'] = ['item-1', 'item-2'];
service.listCache['items'].sub.subscribe(x => testResults.items = x);
// Setup object
service.objectCache['thing'] = {
loaded: false,
offlineInit: false,
sub: new MockAfoObjectObservable()
};
service.processing.objectCache['thing'] = {title: 'thing-1'};
service.objectCache['thing'].sub.subscribe(x => testResults.thing = x);
// Run test
mockLocalForageService.resolves['write'](null);
// Wait for results
setTimeout(() => {
expect(testResults.thing.title).toBe('thing-1');
expect(testResults.items[0]).toBe('item-1');
});
})));
});
it('should return an unwrapped null value', async(inject([AngularFireOfflineDatabase], (service: AngularFireOfflineDatabase) => {
let newValue = { val: () => { return null; } };
service.object('/slug-2').subscribe(object => {
expect(object.$value).toBe(null);
});
mockAngularFireDatabase.update('object', newValue);
})));
});
export const MockApiData = [
{
dataUrl: 'https://example.com/slug-1',
date: '',
id: 1,
stamp: 1437120051000,
slug: 'slug-1',
text: 'this is string of searchable text'
}
];
@Injectable()
export class MockLocalForageService {
values = {};
resolves = {};
getItem(key) {
return new Promise(resolve => {
const value = this.values[key];
if (value === undefined) { // resolve later
this.resolves[key] = resolve;
} else { // resolve immediately
resolve(value);
}
});
}
setItem(key, value) {
return new Promise(resolve => resolve(this.values[key] = value));
}
}
@Injectable()
export class MockAngularFireDatabase extends AngularFireDatabase {
listData$: any;
objectData$;
constructor() {
super(null);
this.init();
}
init() { }
update(type, newValue) {
this[`${type}Data$`].next(newValue);
}
list() {
if (this.listData$ === undefined) {
this.listData$ = new MockFirebaseListObservable();
}
return this.listData$;
}
object() {
if (this.objectData$ === undefined) {
this.objectData$ = new MockFirebaseObjectObservable();
}
return this.objectData$;
}
}
export const Ref = {
$ref: {
ref: {key: 'key-1'},
push: undefined,
resolve: undefined,
toString: () => 'https://angularfire2-offline.firebaseio.com/key-1',
database: {
ref: () => {
return {
toString: () => 'https://angularfire2-offline.firebaseio.com/'
};
}
}
}
};
@Injectable()
export class MockFirebaseListObservable<T> extends Subject<T> {
history = [];
$ref = Ref.$ref;
constructor() {
super();
}
remove() {
this.history.push('remove');
return new Promise(resolve => resolve());
}
}
@Injectable()
export class MockFirebaseObjectObservable<T> extends Subject<T> {
history = [];
$ref = Ref.$ref;
constructor() {
super();
}
set() {
this.history.push('set');
return new Promise(resolve => resolve());
}
}
export class MockAfoListObservable<T> extends AfoListObservable<T> {
history = [];
constructor() {
super(Ref, null);
}
emulate(method, value) {
this.history.push({
method: method,
value: value
});
}
}
export class MockAfoObjectObservable<T> extends AfoObjectObservable<T> {
history = [];
constructor() {
super(Ref, null);
}
emulate(method, value) {
this.history.push({
method: method,
value: value
});
}
}
|
<reponame>winton/a_b_server<gh_stars>1-10
require File.expand_path('../../gems', __FILE__)
ABServer::Gems.require(:console)
$:.unshift File.dirname(__FILE__) + '/../'
require 'boot/application'
require 'boot/sinatra'
require 'boot/active_wrapper'
require 'boot/delayed_job'
require 'boot/model'
require 'boot/job'
|
<gh_stars>10-100
/*
* DSFML - The Simple and Fast Multimedia Library for D
*
* Copyright (c) 2013 - 2018 <NAME> (<EMAIL>)
*
* This software is provided 'as-is', without any express or implied warranty.
* In no event will the authors be held liable for any damages arising from the
* use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not claim
* that you wrote the original software. If you use this software in a product,
* an acknowledgment in the product documentation would be appreciated but is
* not required.
*
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any source distribution
*
*
* DSFML is based on SFML (Copyright Laurent Gomila)
*/
#ifndef DSFML_DSTREAM_H
#define DSFML_DSTREAM_H
#include <DSFMLC/Config.h>
#include <SFML/System.hpp>
//Define an interface usable with D's C++ interop
class DStream
{
public:
virtual DLong read(void* data, DLong size);
virtual DLong seek(DLong position);
virtual DLong tell();
virtual DLong getSize();
};
//Define a class based off sf::InputStream that encoumpasses a DStream
class sfmlStream:public sf::InputStream
{
private:
DStream* myStream;
public:
sfmlStream()
{
}
sfmlStream(DStream* stream)
{
myStream = stream;
}
virtual sf::Int64 read(void* data, sf::Int64 size)
{
return myStream->read(data, size);
}
virtual sf::Int64 seek(sf::Int64 position)
{
return myStream->seek(position);
}
virtual sf::Int64 tell()
{
return myStream->tell();
}
virtual sf::Int64 getSize()
{
return myStream->getSize();
}
};
#endif // DSFML_DSTREAM_H
|
#!/bin/bash
CLIENT_NAME=$1
if [[ -z "$CLIENT_NAME" ]]; then
CLIENT_NAME='0'
fi
echo "client name is: $CLIENT_NAME"
cd tls
# Client key
openssl ecparam \
-genkey \
-name secp384r1 \
-out client_$CLIENT_NAME.key
# Client cert
openssl req \
-new \
-key client_$CLIENT_NAME.key \
-subj "/O=YoMo/CN=YoMo Client" | \
openssl x509 \
-req \
-CA ca.crt \
-CAkey ca.key \
-CAserial ca.txt \
-CAcreateserial \
-days 3650 \
-out client_$CLIENT_NAME.crt
|
(function() {
'use strict';
angular
.module('app.station')
.controller('StationDataGroupsCtrl', StationDataGroupsCtrl);
StationDataGroupsCtrl.$inject = [
'_station', '_groupList', '_groups'
];
function StationDataGroupsCtrl(_station, _groupList, _groups) {
var vm = this;
vm.station = _station;
vm.groupList = _groupList;
vm.groups = _groups;
}
})();
|
package br.com.agateownz.foodsocial.modules.user.service;
import br.com.agateownz.foodsocial.modules.user.model.UserFollower;
import br.com.agateownz.foodsocial.modules.user.model.UserFollowerId;
import br.com.agateownz.foodsocial.modules.user.model.UserFollowing;
import br.com.agateownz.foodsocial.modules.user.model.UserFollowingId;
import br.com.agateownz.foodsocial.modules.user.repository.UserFollowerRepository;
import br.com.agateownz.foodsocial.modules.user.repository.UserFollowingRepository;
import br.com.agateownz.foodsocial.modules.user.repository.UserRepository;
import javax.transaction.Transactional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import static br.com.agateownz.foodsocial.modules.user.exceptions.UserExceptions.USER_NOT_FOUND;
@Service
public class UserFollowerService {
@Autowired
private UserRepository userRepository;
@Autowired
private UserFollowerRepository userFollowerRepository;
@Autowired
private UserFollowingRepository userFollowingRepository;
@Transactional
public void follow(Long userId, Long followerId) {
var sourceUser = userRepository.findById(userId).orElseThrow(() -> USER_NOT_FOUND);
var targetUser = userRepository.findById(followerId).orElseThrow(() -> USER_NOT_FOUND);
var sourceUserFollowing = UserFollowing.builder()
.id(new UserFollowingId(sourceUser, targetUser))
.build();
var targetUserFollower = UserFollower.builder()
.id(new UserFollowerId(targetUser, sourceUser))
.build();
userFollowerRepository.save(targetUserFollower);
userFollowingRepository.save(sourceUserFollowing);
}
}
|
def construct_factor_graph(edges, fmap, factor_index, vid, var_map, fmp_index, feature_id, features, variables):
def binary_edge(edge_index, factor_index, var1_index, var2_index):
# Implementation of binary_edge function is assumed to be provided elsewhere
pass
edges.append(binary_edge(len(edges), factor_index, var_map[vid[0]], var_map[vid[1]]))
for id in vid:
fmap[fmp_index]["vid"] = var_map[id]
if features[feature_id]['parameterize'] == 1:
fmap[fmp_index]["x"] = variables[vid]['feature_set'][feature_id][1] # feature_value
fmap[fmp_index]["theta"] = variables[vid]['feature_set'][feature_id][0] # theta
fmp_index += 1
|
<filename>title/arm9/source/graphics/graphics.cpp
/*-----------------------------------------------------------------
Copyright (C) 2015
<NAME>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
------------------------------------------------------------------*/
#include <nds.h>
#include <maxmod9.h>
#include "common/gl2d.h"
#include "bios_decompress_callback.h"
#include "FontGraphic.h"
#include "common/inifile.h"
#include "common/dsimenusettings.h"
#include "logo_rocketrobz.h"
#include "logo_rocketrobzbootstrap.h"
#include "font6x8.h"
#include "graphics.h"
#include "fontHandler.h"
#define CONSOLE_SCREEN_WIDTH 32
#define CONSOLE_SCREEN_HEIGHT 24
//extern int appName;
extern int screenmode;
extern bool fadeType;
int screenBrightness = 31;
bool renderingTop = true;
int mainBgTexID, subBgTexID;
glImage topBgImage[(256 / 16) * (256 / 16)];
glImage subBgImage[(256 / 16) * (256 / 16)];
void vramcpy_ui (void* dest, const void* src, int size)
{
u16* destination = (u16*)dest;
u16* source = (u16*)src;
while (size > 0) {
*destination++ = *source++;
size-=2;
}
}
void clearBrightness(void) {
fadeType = true;
screenBrightness = 0;
}
// Ported from PAlib (obsolete)
void SetBrightness(u8 screen, s8 bright) {
u16 mode = 1 << 14;
if (bright < 0) {
mode = 2 << 14;
bright = -bright;
}
if (bright > 31) bright = 31;
*(u16*)(0x0400006C + (0x1000 * screen)) = bright + mode;
}
void vBlankHandler()
{
if(fadeType == true) {
screenBrightness--;
if (screenBrightness < 0) screenBrightness = 0;
} else {
screenBrightness++;
if (screenBrightness > 31) screenBrightness = 31;
}
SetBrightness(0, screenBrightness);
SetBrightness(1, screenBrightness);
}
void LoadBMP(void) {
FILE* file;
switch (ms().appName) {
case 0:
default:
file = fopen("nitro:/graphics/TWiLightMenu.bmp", "rb");
break;
case 1:
file = fopen("nitro:/graphics/SRLoader.bmp", "rb");
break;
case 2:
file = fopen("nitro:/graphics/DSiMenuPP.bmp", "rb");
break;
}
// Start loading
fseek(file, 0xe, SEEK_SET);
u8 pixelStart = (u8)fgetc(file) + 0xe;
fseek(file, pixelStart, SEEK_SET);
for (int y=191; y>=0; y--) {
u16 buffer[256];
fread(buffer, 2, 0x100, file);
u16* src = buffer;
for (int i=0; i<256; i++) {
u16 val = *(src++);
BG_GFX[y*256+i] = ((val>>10)&0x1f) | ((val)&(0x1f<<5)) | (val&0x1f)<<10 | BIT(15);
}
}
fclose(file);
}
void runGraphicIrq(void) {
*(u16*)(0x0400006C) |= BIT(14);
*(u16*)(0x0400006C) &= BIT(15);
SetBrightness(0, 31);
SetBrightness(1, 31);
irqSet(IRQ_VBLANK, vBlankHandler);
irqEnable(IRQ_VBLANK);
}
void loadTitleGraphics() {
videoSetMode(MODE_3_2D | DISPLAY_BG3_ACTIVE);
videoSetModeSub(MODE_0_2D | DISPLAY_BG0_ACTIVE);
vramSetBankD(VRAM_D_MAIN_BG_0x06040000);
vramSetBankC (VRAM_C_SUB_BG_0x06200000);
REG_BG3CNT = BG_MAP_BASE(0) | BG_BMP16_256x256;
REG_BG3X = 0;
REG_BG3Y = 0;
REG_BG3PA = 1<<8;
REG_BG3PB = 0;
REG_BG3PC = 0;
REG_BG3PD = 1<<8;
REG_BG0CNT_SUB = BG_MAP_BASE(0) | BG_COLOR_256 | BG_TILE_BASE(2);
BG_PALETTE[0]=0;
BG_PALETTE[255]=0xffff;
u16* bgMapSub = (u16*)SCREEN_BASE_BLOCK_SUB(0);
for (int i = 0; i < CONSOLE_SCREEN_WIDTH*CONSOLE_SCREEN_HEIGHT; i++) {
bgMapSub[i] = (u16)i;
}
bool appNameChanged = false;
scanKeys();
if (keysHeld() & KEY_UP) {
ms().appName = 0;
appNameChanged = true;
} else if (keysHeld() & KEY_DOWN) {
ms().appName = 1;
appNameChanged = true;
} else if (keysHeld() & KEY_LEFT) {
ms().appName = 2;
appNameChanged = true;
}
if (appNameChanged) {
ms().saveSettings();
}
// Display TWiLightMenu++ logo
LoadBMP();
if (isDSiMode()) { // Show nds-bootstrap logo, if in DSi mode
swiDecompressLZSSVram ((void*)logo_rocketrobzbootstrapTiles, (void*)CHAR_BASE_BLOCK_SUB(2), 0, &decompressBiosCallback);
vramcpy_ui (&BG_PALETTE_SUB[0], logo_rocketrobzbootstrapPal, logo_rocketrobzbootstrapPalLen);
} else {
swiDecompressLZSSVram ((void*)logo_rocketrobzTiles, (void*)CHAR_BASE_BLOCK_SUB(2), 0, &decompressBiosCallback);
vramcpy_ui (&BG_PALETTE_SUB[0], logo_rocketrobzPal, logo_rocketrobzPalLen);
}
}
|
#!/bin/bash
msg "customizing ssh daemon configuration"
cat << EOF >> /etc/ssh/sshd_config
# Setup for Docker PANDA user.
Match User $DOCKER_USER
X11Forwarding no
AllowTcpForwarding no
AllowStreamLocalForwarding no
ForceCommand $PANDA_PATH/bin/recvmssh.sh
#StrictHostKeyChecking no
# Setup for root.
Match User root
X11Forwarding no
AllowTcpForwarding no
AllowStreamLocalForwarding no
ForceCommand $PANDA_PATH/bin/recvmssh.sh root
#StrictHostKeyChecking no
EOF
sed -E -i 's/^#?(PermitRootLogin|UsePAM).*/\1 yes/' /etc/ssh/sshd_config
msg "restarting ssh daemon"
/etc/init.d/ssh restart
|
<reponame>Sahanovskyi/Econsave<filename>domain/src/main/java/com/gw/domain/repository/Repository.java<gh_stars>1-10
package com.gw.domain.repository;
import com.gw.domain.model.DecisionItem;
import com.gw.domain.model.PrivatBank.PrivatBankClient;
import com.gw.domain.model.TransactionItem;
import java.util.List;
import io.reactivex.Observable;
/**
* Interface that represents a Repository for getting {@link TransactionItem} related data.
*/
public interface Repository {
/**
* Get an {@link Observable} which will emit a List of {@link TransactionItem}.
*
* @param client provide PrivatBank client information.
*/
Observable<List<TransactionItem>> getPrivatBankTransactionItems(PrivatBankClient client);
Observable<List<TransactionItem>> getSmsTransactionItems(String number);
Observable<List<DecisionItem>> getDecisions();
}
|
<reponame>muthukumaravel7/armnn
var _test_input_output_layer_visitor_8cpp =
[
[ "BOOST_AUTO_TEST_CASE", "_test_input_output_layer_visitor_8cpp.xhtml#ad3d9cbf26cb5894fd6d9169dbe743417", null ],
[ "BOOST_AUTO_TEST_CASE", "_test_input_output_layer_visitor_8cpp.xhtml#ac7ce83f024515592cffac13ae5220f1e", null ],
[ "BOOST_AUTO_TEST_CASE", "_test_input_output_layer_visitor_8cpp.xhtml#ac28b0a4861e6eab3e7621a7ed4eb5f62", null ],
[ "BOOST_AUTO_TEST_CASE", "_test_input_output_layer_visitor_8cpp.xhtml#a9a7475b081b431ffa9915aac51c2d338", null ]
];
|
/// <reference path="../common/models.ts" />
/// <reference path="../common/messaging.ts" />
/// <reference path="broker.ts"/>
/// <reference path="active-state.ts"/>
/// <reference path="backtest.ts"/>
/// <reference path="config.ts"/>
/// <reference path="fair-value.ts"/>
/// <reference path="interfaces.ts"/>
/// <reference path="market-filtration.ts"/>
/// <reference path="markettrades.ts"/>
/// <reference path="messages.ts"/>
/// <reference path="quote-sender.ts"/>
/// <reference path="quoter.ts"/>
/// <reference path="quoting-engine.ts"/>
/// <reference path="quoting-parameters.ts"/>
/// <reference path="safety.ts"/>
/// <reference path="statistics.ts"/>
/// <reference path="utils.ts"/>
/// <reference path="web.ts"/>
import _ = require("lodash");
import Q = require("q");
import path = require("path");
import express = require('express');
import util = require('util');
import moment = require("moment");
import fs = require("fs");
import bunyan = require("bunyan");
import request = require('request');
import http = require("http");
import socket_io = require('socket.io')
import HitBtc = require("./gateways/hitbtc");
import Coinbase = require("./gateways/coinbase");
import NullGw = require("./gateways/nullgw");
import OkCoin = require("./gateways/okcoin");
import Bitfinex = require("./gateways/bitfinex");
import Utils = require("./utils");
import Config = require("./config");
import Broker = require("./broker");
import QuoteSender = require("./quote-sender");
import MarketTrades = require("./markettrades");
import Messaging = require("../common/messaging");
import Models = require("../common/models");
import Interfaces = require("./interfaces");
import Quoter = require("./quoter");
import Safety = require("./safety");
import compression = require("compression");
import Persister = require("./persister");
import Active = require("./active-state");
import FairValue = require("./fair-value");
import Web = require("./web");
import Promises = require("./promises");
import QuotingParameters = require("./quoting-parameters");
import MarketFiltration = require("./market-filtration");
import PositionManagement = require("./position-management");
import Statistics = require("./statistics");
import Backtest = require("./backtest");
import QuotingEngine = require("./quoting-engine");
import Messages = require("./messages");
import log from "./logging";
import QuotingStyleRegistry = require("./quoting-styles/style-registry");
import MidMarket = require("./quoting-styles/mid-market");
import TopJoin = require("./quoting-styles/top-join");
import Depth = require("./quoting-styles/depth");
const serverUrl = 'BACKTEST_SERVER_URL' in process.env ? process.env['BACKTEST_SERVER_URL'] : "http://localhost:5001";
const config = new Config.ConfigProvider();
let exitingEvent : () => Promise<number> = () => new Promise(() => 0);
const performExit = () => {
Promises.timeout(2000, exitingEvent()).then(completed => {
mainLog.info("All exiting event handlers have fired, exiting application.");
process.exit();
}).catch(() => {
mainLog.warn("Did not complete clean-up tasks successfully, still shutting down.");
process.exit(1);
});
};
process.on("uncaughtException", err => {
mainLog.error(err, "Unhandled exception!");
performExit();
});
process.on("unhandledRejection", (reason, p) => {
mainLog.error(reason, "Unhandled promise rejection!", p);
performExit();
});
process.on("exit", (code) => {
mainLog.info("Exiting with code", code);
});
process.on("SIGINT", () => {
mainLog.info("Handling SIGINT");
performExit();
});
const mainLog = log("tribeca:main");
const messagingLog = log("tribeca:messaging");
function ParseCurrencyPair(raw: string) : Models.CurrencyPair {
const split = raw.split("/");
if (split.length !== 2)
throw new Error("Invalid currency pair! Must be in the format of BASE/QUOTE, eg BTC/USD");
return new Models.CurrencyPair(Models.Currency[split[0]], Models.Currency[split[1]]);
}
const pair = ParseCurrencyPair(config.GetString("TradedPair"));
const defaultActive : Models.SerializedQuotesActive = new Models.SerializedQuotesActive(false, new Date(1));
const defaultQuotingParameters : Models.QuotingParameters = new Models.QuotingParameters(.3, .05, Models.QuotingMode.Top,
Models.FairValueModel.BBO, 3, .8, false, Models.AutoPositionMode.Off, false, 2.5, 300, .095, 2*.095, .095, 3, .1);
const backTestSimulationSetup = (inputData : Array<Models.Market | Models.MarketTrade>, parameters : Backtest.BacktestParameters) : SimulationClasses => {
const timeProvider : Utils.ITimeProvider = new Backtest.BacktestTimeProvider(moment(_.first(inputData).time), moment(_.last(inputData).time));
const exchange = Models.Exchange.Null;
const gw = new Backtest.BacktestGateway(inputData, parameters.startingBasePosition, parameters.startingQuotePosition, <Backtest.BacktestTimeProvider>timeProvider);
const getExch = async (orderCache: Broker.OrderStateCache): Promise<Interfaces.CombinedGateway> => new Backtest.BacktestExchange(gw);
const getPublisher = <T>(topic: string, persister?: Persister.ILoadAll<T>): Messaging.IPublish<T> => {
return new Messaging.NullPublisher<T>();
};
const getReceiver = <T>(topic: string) : Messaging.IReceive<T> => new Messaging.NullReceiver<T>();
const getPersister = <T>(collectionName: string) : Promise<Persister.ILoadAll<T>> => new Promise((cb) => cb(new Backtest.BacktestPersister<T>()));
const getRepository = <T>(defValue: T, collectionName: string) : Promise<Persister.ILoadLatest<T>> => new Promise(cb => cb(new Backtest.BacktestPersister<T>([defValue])));
const startingActive : Models.SerializedQuotesActive = new Models.SerializedQuotesActive(true, timeProvider.utcNow());
const startingParameters : Models.QuotingParameters = parameters.quotingParameters;
return {
exchange: exchange,
startingActive: startingActive,
startingParameters: startingParameters,
timeProvider: timeProvider,
getExch: getExch,
getReceiver: getReceiver,
getPersister: getPersister,
getRepository: getRepository,
getPublisher: getPublisher
};
};
const liveTradingSetup = () : SimulationClasses => {
const timeProvider : Utils.ITimeProvider = new Utils.RealTimeProvider();
const app = express();
const http_server = http.createServer(app);
const io = socket_io(http_server);
const username = config.GetString("WebClientUsername");
const password = <PASSWORD>("WebClientPassword");
if (username !== "NULL" && password !== "<PASSWORD>") {
mainLog.info("Requiring authentication to web client");
const basicAuth = require('basic-auth-connect');
app.use(basicAuth((u, p) => u === username && p === password));
}
app.use(compression());
app.use(express.static(path.join(__dirname, "admin")));
const webport = config.GetNumber("WebClientListenPort");
http_server.listen(webport, () => mainLog.info('Listening to admins on *:', webport));
const getExchange = (): Models.Exchange => {
const ex = config.GetString("EXCHANGE").toLowerCase();
switch (ex) {
case "hitbtc": return Models.Exchange.HitBtc;
case "coinbase": return Models.Exchange.Coinbase;
case "okcoin": return Models.Exchange.OkCoin;
case "null": return Models.Exchange.Null;
case "bitfinex": return Models.Exchange.Bitfinex;
default: throw new Error("unknown configuration env variable EXCHANGE " + ex);
}
};
const exchange = getExchange();
const getExch = (orderCache: Broker.OrderStateCache): Promise<Interfaces.CombinedGateway> => {
switch (exchange) {
case Models.Exchange.HitBtc: return HitBtc.createHitBtc(config, pair);
case Models.Exchange.Coinbase: return Coinbase.createCoinbase(config, orderCache, timeProvider, pair);
case Models.Exchange.OkCoin: return OkCoin.createOkCoin(config, pair);
case Models.Exchange.Null: return NullGw.createNullGateway(config, pair);
case Models.Exchange.Bitfinex: return Bitfinex.createBitfinex(timeProvider, config, pair);
default: throw new Error("no gateway provided for exchange " + exchange);
}
};
const getPublisher = <T>(topic: string, persister?: Persister.ILoadAll<T>): Messaging.IPublish<T> => {
const socketIoPublisher = new Messaging.Publisher<T>(topic, io, null, messagingLog.info.bind(messagingLog));
if (persister)
return new Web.StandaloneHttpPublisher<T>(socketIoPublisher, topic, app, persister);
else
return socketIoPublisher;
};
const getReceiver = <T>(topic: string) : Messaging.IReceive<T> =>
new Messaging.Receiver<T>(topic, io, messagingLog.info.bind(messagingLog));
const db = Persister.loadDb(config);
const getPersister = async <T extends Persister.Persistable>(collectionName: string) : Promise<Persister.ILoadAll<T>> => {
const coll = (await (await db).collection(collectionName));
return new Persister.Persister<T>(timeProvider, coll, collectionName, exchange, pair);
};
const getRepository = async <T extends Persister.Persistable>(defValue: T, collectionName: string) : Promise<Persister.ILoadLatest<T>> =>
new Persister.RepositoryPersister<T>(await (await db).collection(collectionName), defValue, collectionName, exchange, pair);
return {
exchange: exchange,
startingActive: defaultActive,
startingParameters: defaultQuotingParameters,
timeProvider: timeProvider,
getExch: getExch,
getReceiver: getReceiver,
getPersister: getPersister,
getRepository: getRepository,
getPublisher: getPublisher
};
};
interface SimulationClasses {
exchange: Models.Exchange;
startingActive : Models.SerializedQuotesActive;
startingParameters : Models.QuotingParameters;
timeProvider: Utils.ITimeProvider;
getExch(orderCache: Broker.OrderStateCache): Promise<Interfaces.CombinedGateway>;
getReceiver<T>(topic: string) : Messaging.IReceive<T>;
getPersister<T extends Persister.Persistable>(collectionName: string) : Promise<Persister.ILoadAll<T>>;
getRepository<T>(defValue: T, collectionName: string) : Promise<Persister.ILoadLatest<T>>;
getPublisher<T>(topic: string, persister?: Persister.ILoadAll<T>): Messaging.IPublish<T>;
}
const runTradingSystem = async (classes: SimulationClasses) : Promise<void> => {
const getPersister = classes.getPersister;
const orderPersister = await getPersister<Models.OrderStatusReport>("osr");
const tradesPersister = await getPersister<Models.Trade>("trades");
const fairValuePersister = await getPersister<Models.FairValue>("fv");
const mktTradePersister = await getPersister<Models.MarketTrade>("mt");
const positionPersister = await getPersister<Models.PositionReport>("pos");
const messagesPersister = await getPersister<Models.Message>("msg");
const rfvPersister = await getPersister<Models.RegularFairValue>("rfv");
const tbpPersister = await getPersister<Models.TargetBasePositionValue>("tbp");
const tsvPersister = await getPersister<Models.TradeSafety>("tsv");
const marketDataPersister = await getPersister<Models.Market>(Messaging.Topics.MarketData);
const activePersister = await classes.getRepository<Models.SerializedQuotesActive>(classes.startingActive, Messaging.Topics.ActiveChange);
const paramsPersister = await classes.getRepository<Models.QuotingParameters>(classes.startingParameters, Messaging.Topics.QuotingParametersChange);
const exchange = classes.exchange;
const shouldPublishAllOrders = !config.Has("ShowAllOrders") || config.GetBoolean("ShowAllOrders");
const ordersFilter = shouldPublishAllOrders ? {} : {source: {$gte: Models.OrderSource.OrderTicket}};
const [
initOrders, initTrades, initMktTrades, initMsgs, initParams, initActive, initRfv] = await Promise.all([
orderPersister.loadAll(10000, ordersFilter),
tradesPersister.loadAll(10000),
mktTradePersister.loadAll(100),
messagesPersister.loadAll(50),
paramsPersister.loadLatest(),
activePersister.loadLatest(),
rfvPersister.loadAll(50)
])
_.defaults(initParams, defaultQuotingParameters);
_.defaults(initActive, defaultActive);
const orderCache = new Broker.OrderStateCache();
const timeProvider = classes.timeProvider;
const getPublisher = classes.getPublisher;
const gateway = await classes.getExch(orderCache);
const advert = new Models.ProductAdvertisement(exchange, pair, config.GetString("TRIBECA_MODE"), gateway.base.minTickIncrement);
getPublisher(Messaging.Topics.ProductAdvertisement).registerSnapshot(() => [advert]).publish(advert);
const quotePublisher = getPublisher(Messaging.Topics.Quote);
const fvPublisher = getPublisher(Messaging.Topics.FairValue, fairValuePersister);
const marketDataPublisher = getPublisher(Messaging.Topics.MarketData, marketDataPersister);
const orderStatusPublisher = getPublisher(Messaging.Topics.OrderStatusReports, orderPersister);
const tradePublisher = getPublisher(Messaging.Topics.Trades, tradesPersister);
const activePublisher = getPublisher(Messaging.Topics.ActiveChange);
const quotingParametersPublisher = getPublisher(Messaging.Topics.QuotingParametersChange);
const marketTradePublisher = getPublisher(Messaging.Topics.MarketTrade, mktTradePersister);
const messagesPublisher = getPublisher(Messaging.Topics.Message, messagesPersister);
const quoteStatusPublisher = getPublisher(Messaging.Topics.QuoteStatus);
const targetBasePositionPublisher = getPublisher(Messaging.Topics.TargetBasePosition, tbpPersister);
const tradeSafetyPublisher = getPublisher(Messaging.Topics.TradeSafetyValue, tsvPersister);
const positionPublisher = getPublisher(Messaging.Topics.Position, positionPersister);
const connectivity = getPublisher(Messaging.Topics.ExchangeConnectivity);
const messages = new Messages.MessagesPubisher(timeProvider, messagesPersister, initMsgs, messagesPublisher);
messages.publish("start up");
const getReceiver = classes.getReceiver;
const activeReceiver = getReceiver<boolean>(Messaging.Topics.ActiveChange);
const quotingParametersReceiver = getReceiver<Models.QuotingParameters>(Messaging.Topics.QuotingParametersChange);
const submitOrderReceiver = getReceiver<Models.OrderRequestFromUI>(Messaging.Topics.SubmitNewOrder);
const cancelOrderReceiver = getReceiver<Models.OrderStatusReport>(Messaging.Topics.CancelOrder);
const cancelAllOrdersReceiver = getReceiver(Messaging.Topics.CancelAllOrders);
const broker = new Broker.ExchangeBroker(pair, gateway.md, gateway.base, gateway.oe, connectivity);
mainLog.info({
exchange: broker.exchange,
pair: broker.pair.toString(),
minTick: broker.minTickIncrement,
makeFee: broker.makeFee,
takeFee: broker.takeFee,
hasSelfTradePrevention: broker.hasSelfTradePrevention,
}, "using the following exchange details");
const orderBroker = new Broker.OrderBroker(timeProvider, broker, gateway.oe, orderPersister, tradesPersister, orderStatusPublisher,
tradePublisher, submitOrderReceiver, cancelOrderReceiver, cancelAllOrdersReceiver, messages, orderCache, initOrders, initTrades, shouldPublishAllOrders);
const marketDataBroker = new Broker.MarketDataBroker(timeProvider, gateway.md, marketDataPublisher, marketDataPersister, messages);
const positionBroker = new Broker.PositionBroker(timeProvider, broker, gateway.pg, positionPublisher, positionPersister, marketDataBroker);
const paramsRepo = new QuotingParameters.QuotingParametersRepository(quotingParametersPublisher, quotingParametersReceiver, initParams);
paramsRepo.NewParameters.on(() => paramsPersister.persist(paramsRepo.latest));
const safetyCalculator = new Safety.SafetyCalculator(timeProvider, paramsRepo, orderBroker, paramsRepo, tradeSafetyPublisher, tsvPersister);
const startQuoting = (moment(timeProvider.utcNow()).diff(moment(initActive.time), 'minutes') < 3 && initActive.active);
const active = new Active.ActiveRepository(startQuoting, broker, activePublisher, activeReceiver);
const quoter = new Quoter.Quoter(orderBroker, broker);
const filtration = new MarketFiltration.MarketFiltration(broker, new Utils.ImmediateActionScheduler(timeProvider), quoter, marketDataBroker);
const fvEngine = new FairValue.FairValueEngine(broker, timeProvider, filtration, paramsRepo, fvPublisher, fairValuePersister);
const ewma = new Statistics.ObservableEWMACalculator(timeProvider, fvEngine, initParams.quotingEwma);
const rfvValues = _.map(initRfv, (r: Models.RegularFairValue) => r.value);
const shortEwma = new Statistics.EwmaStatisticCalculator(initParams.shortEwma);
shortEwma.initialize(rfvValues);
const longEwma = new Statistics.EwmaStatisticCalculator(initParams.longEwma);
longEwma.initialize(rfvValues);
const registry = new QuotingStyleRegistry.QuotingStyleRegistry([
new MidMarket.MidMarketQuoteStyle(),
new TopJoin.InverseJoinQuoteStyle(),
new TopJoin.InverseTopOfTheMarketQuoteStyle(),
new TopJoin.JoinQuoteStyle(),
new TopJoin.TopOfTheMarketQuoteStyle(),
new TopJoin.PingPongQuoteStyle(),
new Depth.DepthQuoteStyle()
]);
const positionMgr = new PositionManagement.PositionManager(broker, timeProvider, rfvPersister, fvEngine, initRfv, shortEwma, longEwma);
const tbp = new PositionManagement.TargetBasePositionManager(timeProvider, positionMgr, paramsRepo, positionBroker, targetBasePositionPublisher, tbpPersister);
const quotingEngine = new QuotingEngine.QuotingEngine(registry, timeProvider, filtration, fvEngine, paramsRepo, quotePublisher,
orderBroker, positionBroker, broker, ewma, tbp, safetyCalculator);
const quoteSender = new QuoteSender.QuoteSender(timeProvider, quotingEngine, quoteStatusPublisher, quoter, active, positionBroker, fvEngine, marketDataBroker, broker);
const marketTradeBroker = new MarketTrades.MarketTradeBroker(gateway.md, marketTradePublisher, marketDataBroker,
quotingEngine, broker, mktTradePersister, initMktTrades);
if (config.inBacktestMode) {
const t = Utils.date();
console.log("starting backtest");
try {
(<Backtest.BacktestExchange>gateway).run();
}
catch (err) {
console.error("exception while running backtest!", err.message, err.stack);
throw err;
}
const results = [paramsRepo.latest, positionBroker.latestReport, {
trades: orderBroker._trades.map(t => [t.time.valueOf(), t.price, t.quantity, t.side]),
volume: orderBroker._trades.reduce((p, c) => p + c.quantity, 0)
}];
console.log("sending back results, took: ", moment(Utils.date()).diff(t, "seconds"));
request({url: serverUrl+"/result",
method: 'POST',
json: results}, (err, resp, body) => { });
}
exitingEvent = () => {
const a = new Models.SerializedQuotesActive(active.savedQuotingMode, timeProvider.utcNow());
mainLog.info("persisting active to", a.active);
activePersister.persist(a);
return orderBroker.cancelOpenOrders();
};
// event looped blocked timer
let start = process.hrtime();
const interval = 100;
setInterval(() => {
const delta = process.hrtime(start);
const ms = (delta[0] * 1e9 + delta[1]) / 1e6;
const n = ms - interval;
if (n > 25)
mainLog.info(`Event looped blocked for ${Utils.roundUp(n, .001)}ms`);
start = process.hrtime();
}, interval).unref();
};
const harness = async () : Promise<any> => {
if (config.inBacktestMode) {
console.log("enter backtest mode");
const getFromBacktestServer = (ep: string) : Promise<any> => {
return new Promise((resolve, reject) => {
request.get(serverUrl+"/"+ep, (err, resp, body) => {
if (err) reject(err);
else resolve(body);
});
});
};
const input = await getFromBacktestServer("inputData").then(body => {
const inp : Array<Models.Market | Models.MarketTrade> = (typeof body ==="string") ? eval(body) : body;
for (let i = 0; i < inp.length; i++) {
const d = inp[i];
d.time = new Date(d.time);
}
return inp;
});
const nextParameters = () : Promise<Backtest.BacktestParameters> => getFromBacktestServer("nextParameters").then(body => {
const p = (typeof body ==="string") ? <string|Backtest.BacktestParameters>JSON.parse(body) : body;
console.log("Recv'd parameters", util.inspect(p));
return (typeof p === "string") ? null : p;
});
while (true) {
const next = await nextParameters();
if (!next) break;
runTradingSystem(backTestSimulationSetup(input, next));
}
}
else {
return runTradingSystem(liveTradingSetup());
}
};
harness();
|
class RestrictionListType < ActiveModel::Type::Value
def cast(value)
value.map { |restriction| RestrictionType.new.cast(restriction) }.freeze
end
end
|
#!/bin/bash
set -e
# Setup and start Sauce Connect for your TravisCI build
# This script requires your .travis.yml to include the following two private env variables:
# SAUCE_USERNAME
# SAUCE_ACCESS_KEY
# Follow the steps at https://saucelabs.com/opensource/travis to set that up.
#
# Curl and run this script as part of your .travis.yml before_script section:
# before_script:
# - curl https://gist.github.com/santiycr/5139565/raw/sauce_connect_setup.sh | bash
CONNECT_URL="https://d2nkw87yt5k0to.cloudfront.net/downloads/sc-latest-linux.tar.gz"
CONNECT_DIR="/tmp/sauce-connect-$RANDOM"
CONNECT_DOWNLOAD="sc-latest-linux.tar.gz"
CONNECT_LOG="$LOGS_DIR/sauce-connect"
CONNECT_STDOUT="$LOGS_DIR/sauce-connect.stdout"
CONNECT_STDERR="$LOGS_DIR/sauce-connect.stderr"
# Get Connect and start it
mkdir -p $CONNECT_DIR
cd $CONNECT_DIR
curl $CONNECT_URL -o $CONNECT_DOWNLOAD 2> /dev/null 1> /dev/null
mkdir sauce-connect
tar --extract --file=$CONNECT_DOWNLOAD --strip-components=1 --directory=sauce-connect > /dev/null
rm $CONNECT_DOWNLOAD
SAUCE_ACCESS_KEY=`echo $SAUCE_ACCESS_KEY | rev`
ARGS=""
# Set tunnel-id only on Travis, to make local testing easier.
if [ ! -z "$TRAVIS_JOB_NUMBER" ]; then
ARGS="$ARGS --tunnel-identifier $TRAVIS_JOB_NUMBER"
fi
if [ ! -z "$BROWSER_PROVIDER_READY_FILE" ]; then
ARGS="$ARGS --readyfile $BROWSER_PROVIDER_READY_FILE"
fi
echo "Starting Sauce Connect in the background, logging into:"
echo " $CONNECT_LOG"
echo " $CONNECT_STDOUT"
echo " $CONNECT_STDERR"
sauce-connect/bin/sc -u $SAUCE_USERNAME -k $SAUCE_ACCESS_KEY -v $ARGS \
--logfile $CONNECT_LOG 2> $CONNECT_STDERR 1> $CONNECT_STDOUT &
|
#!/bin/sh
# Build and install the IPL documentation
# Assumes you're running this from the unidoc source directory
UBASE=$(realpath ../..)
if [ -z "${htmldir}" ]; then
htmldir=${UBASE}/doc
fi
SBASE=${UBASE}/ipl
TBASE=${htmldir}/ipl-api
DIRS="procs gprocs mprocs progs gprogs mprogs"
# SDIRS and LDIRS are comma-separated lists
# (This script doesn't use LDIRS
SDIRS="${UBASE}/ipl/procs"
basetitle="Unicon IPL API"
cdir=$(pwd)
for dir in ${DIRS}; do
echo
echo "[Building IPL docs for ${dir}]"
echo
title="${basetitle} for ${dir}"
SD=${SBASE}/${dir}
TD=${TBASE}/${dir}
mkdir -p ${TD}
cd ${SD}
UniDoc --title=\"${title}\" --resolve \
--sourcePath=${SDIRS} --linkSrc --targetDir=${TD} *.icn
echo
echo
echo
echo
done
|
StartTest(function(t) {
t.testBrowser(function (t) {
document.body.innerHTML = '<input id="input" type="text" maxlength="8" />';
var input = document.getElementById('input');
t.chain(
{
type : '1234567890',
target : input
},
function () {
t.is(input.value, '12345678', "`maxlength` attribute was honored")
}
)
});
});
|
import { combineReducers } from 'redux';
import apiInfo from './apiInfo';
import loginInfo from './loginInfo.js';
import driversInfo from './driversInfo';
import ridersInfo from './ridersInfo';
import matchesInfo from './matchesInfo';
import matchesOtherDriverInfo from './matchesOtherDriverInfo';
import uploadInfo from './uploadInfo';
import inputFormsInfo from './inputFormsInfo';
export default combineReducers({
apiInfo,
loginInfo,
driversInfo,
ridersInfo,
matchesInfo,
matchesOtherDriverInfo,
uploadInfo,
inputFormsInfo
});
|
<gh_stars>1-10
/*
* Copyright (c) 2018 Intel Corporation
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <drivers/timer/system_timer.h>
#include <sys_clock.h>
#include <spinlock.h>
#include <arch/arm/cortex_m/cmsis.h>
void z_arm_exc_exit(void);
#define COUNTER_MAX 0x00ffffff
#define TIMER_STOPPED 0xff000000
#define CYC_PER_TICK (sys_clock_hw_cycles_per_sec() \
/ CONFIG_SYS_CLOCK_TICKS_PER_SEC)
#define MAX_TICKS ((COUNTER_MAX / CYC_PER_TICK) - 1)
#define MAX_CYCLES (MAX_TICKS * CYC_PER_TICK)
/* Minimum cycles in the future to try to program. Note that this is
* NOT simply "enough cycles to get the counter read and reprogrammed
* reliably" -- it becomes the minimum value of the LOAD register, and
* thus reflects how much time we can reliably see expire between
* calls to elapsed() to read the COUNTFLAG bit. So it needs to be
* set to be larger than the maximum time the interrupt might be
* masked. Choosing a fraction of a tick is probably a good enough
* default, with an absolute minimum of 1k cyc.
*/
#define MIN_DELAY MAX(1024, (CYC_PER_TICK/16))
#define TICKLESS (IS_ENABLED(CONFIG_TICKLESS_KERNEL) && \
!IS_ENABLED(CONFIG_QEMU_TICKLESS_WORKAROUND))
/* VAL value above which we assume that a subsequent COUNTFLAG
* overflow seen in CTRL is real and not an artifact of wraparound
* timing.
*/
#define VAL_ABOUT_TO_WRAP 8
static struct k_spinlock lock;
static u32_t last_load;
static u32_t cycle_count;
static u32_t announced_cycles;
static volatile u32_t overflow_cyc;
static u32_t elapsed(void)
{
u32_t val, ctrl1, ctrl2;
/* SysTick is infuriatingly racy. The counter wraps at zero
* automatically, setting a 1 in the COUNTFLAG bit of the CTRL
* register when it does. But reading the control register
* automatically resets that bit, so we need to save it for
* future calls. And ordering is critical and race-prone: if
* we read CTRL first, then it is possible for VAL to wrap
* after that read but before we read VAL and we'll miss the
* overflow. If we read VAL first, then it can wrap after we
* read it and we'll see an "extra" overflow in CTRL. And we
* want to handle multiple overflows, so we effectively must
* read CTRL first otherwise there will be no way to detect
* the double-overflow if called at the end of a cycle. There
* is no safe algorithm here, so we split the difference by
* reading CTRL twice, suppressing the second overflow bit if
* VAL was "about to overflow".
*/
ctrl1 = SysTick->CTRL;
val = SysTick->VAL & COUNTER_MAX;
ctrl2 = SysTick->CTRL;
overflow_cyc += (ctrl1 & SysTick_CTRL_COUNTFLAG_Msk) ? last_load : 0;
if (val > VAL_ABOUT_TO_WRAP) {
int wrap = ctrl2 & SysTick_CTRL_COUNTFLAG_Msk;
overflow_cyc += (wrap != 0) ? last_load : 0;
}
return (last_load - val) + overflow_cyc;
}
/* Callout out of platform assembly, not hooked via IRQ_CONNECT... */
void z_clock_isr(void *arg)
{
ARG_UNUSED(arg);
u32_t dticks;
cycle_count += last_load;
dticks = (cycle_count - announced_cycles) / CYC_PER_TICK;
announced_cycles += dticks * CYC_PER_TICK;
overflow_cyc = SysTick->CTRL; /* Reset overflow flag */
overflow_cyc = 0U;
z_clock_announce(TICKLESS ? dticks : 1);
z_arm_exc_exit();
}
int z_clock_driver_init(struct device *device)
{
NVIC_SetPriority(SysTick_IRQn, _IRQ_PRIO_OFFSET);
last_load = CYC_PER_TICK - 1;
overflow_cyc = 0U;
SysTick->LOAD = last_load;
SysTick->VAL = 0; /* resets timer to last_load */
SysTick->CTRL |= (SysTick_CTRL_ENABLE_Msk |
SysTick_CTRL_TICKINT_Msk |
SysTick_CTRL_CLKSOURCE_Msk);
return 0;
}
void z_clock_set_timeout(s32_t ticks, bool idle)
{
/* Fast CPUs and a 24 bit counter mean that even idle systems
* need to wake up multiple times per second. If the kernel
* allows us to miss tick announcements in idle, then shut off
* the counter. (Note: we can assume if idle==true that
* interrupts are already disabled)
*/
if (IS_ENABLED(CONFIG_TICKLESS_IDLE) && idle && ticks == K_FOREVER) {
SysTick->CTRL &= ~SysTick_CTRL_ENABLE_Msk;
last_load = TIMER_STOPPED;
return;
}
#if defined(CONFIG_TICKLESS_KERNEL) && !defined(CONFIG_QEMU_TICKLESS_WORKAROUND)
u32_t delay;
ticks = MIN(MAX_TICKS, MAX(ticks - 1, 0));
/* Desired delay in the future */
delay = (ticks == 0) ? MIN_DELAY : ticks * CYC_PER_TICK;
k_spinlock_key_t key = k_spin_lock(&lock);
cycle_count += elapsed();
/* Round delay up to next tick boundary */
delay = delay + (cycle_count - announced_cycles);
delay = ((delay + CYC_PER_TICK - 1) / CYC_PER_TICK) * CYC_PER_TICK;
last_load = delay - (cycle_count - announced_cycles);
overflow_cyc = 0U;
SysTick->LOAD = last_load - 1;
SysTick->VAL = 0; /* resets timer to last_load */
k_spin_unlock(&lock, key);
#endif
}
u32_t z_clock_elapsed(void)
{
if (!TICKLESS) {
return 0;
}
k_spinlock_key_t key = k_spin_lock(&lock);
u32_t cyc = elapsed() + cycle_count - announced_cycles;
k_spin_unlock(&lock, key);
return cyc / CYC_PER_TICK;
}
u32_t z_timer_cycle_get_32(void)
{
k_spinlock_key_t key = k_spin_lock(&lock);
u32_t ret = elapsed() + cycle_count;
k_spin_unlock(&lock, key);
return ret;
}
void z_clock_idle_exit(void)
{
if (last_load == TIMER_STOPPED) {
SysTick->CTRL |= SysTick_CTRL_ENABLE_Msk;
}
}
void sys_clock_disable(void)
{
SysTick->CTRL &= ~SysTick_CTRL_ENABLE_Msk;
}
|
#!/bin/sh
# Run this script to generate the configure script and other files that will
# be included in the distribution. These files are not checked in because they
# are automatically generated.
set -e
# Check that we're being run from the right directory.
if test ! -f src/google/protobuf/stubs/common.h; then
cat >&2 << __EOF__
Could not find source code. Make sure you are running this script from the
root of the distribution tree.
__EOF__
exit 1
fi
# Check that gtest is present. Usually it is already there since the
# directory is set up as an SVN external.
if test ! -e gtest; then
echo "Google Test not present. Fetching gtest-1.7.0 from the web..."
curl -O https://googletest.googlecode.com/files/gtest-1.7.0.zip
unzip -q gtest-1.7.0.zip
rm gtest-1.7.0.zip
mv gtest-1.7.0 gtest
fi
set -ex
# Temporary hack: Must change C runtime library to "multi-threaded DLL",
# otherwise it will be set to "multi-threaded static" when MSVC upgrades
# the project file to MSVC 2005/2008. vladl of Google Test says gtest will
# probably change their default to match, then this will be unnecessary.
# One of these mappings converts the debug configuration and the other
# converts the release configuration. I don't know which is which.
sed -i -e 's/RuntimeLibrary="5"/RuntimeLibrary="3"/g;
s/RuntimeLibrary="4"/RuntimeLibrary="2"/g;' gtest/msvc/*.vcproj
# TODO(kenton): Remove the ",no-obsolete" part and fix the resulting warnings.
autoreconf -f -i -Wall,no-obsolete
rm -rf autom4te.cache config.h.in~
exit 0
|
<reponame>jntakpe/bbl-msa<filename>notification-service/src/main/java/com/sopra/bbl/msa/notifications/client/ProfileClient.java
package com.sopra.bbl.msa.notifications.client;
import com.sopra.bbl.msa.notifications.dto.ProfileNotificationDTO;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
/**
* Client permettant de communiquer avec le service gérant les profils utilisateurs
*
* @author jntakpe
*/
@FeignClient(name = "profile-service", fallback = ProfileClientFallback.class)
public interface ProfileClient {
@RequestMapping(value = "profiles/{login}", method = RequestMethod.GET)
ProfileNotificationDTO findProfileByLogin(@PathVariable("login") String login);
}
|
################################################################################
### Head: luaenv
##
export LUAENV_ROOT="$HOME/.luaenv"
export PATH="$LUAENV_ROOT/bin:$PATH"
eval "$(luaenv init -)"
##
### Tail: luaenv
################################################################################
|
#!/bin/bash
echo "Shutting down separate units!"
rlwrap $1/syn -f2 $1/machines/test01/ShutdownMachine.clp
tmux kill-session -t test01_xu
#TODO: put in code to delete the frontend connection
rm -rf /tmp/machines/test01
|
import Debug from "debug";
import { Address, BalanceUpdate, CreateSwapPayload, LoanDetails, Status, Trade, Txid, WalletStatus } from "./models";
Debug.enable("*");
const debug = Debug("wasmProxy");
export async function walletStatus(name: string): Promise<WalletStatus> {
const { wallet_status } = await import("./wallet");
debug("walletStatus");
const status = await wallet_status(name);
if (status.loaded && status.exists) {
let address = await getAddress(name);
return { status: Status.Loaded, address };
} else if (status.exists) {
return { status: Status.NotLoaded };
} else {
return { status: Status.None };
}
}
export async function getAddress(name: string): Promise<Address> {
const { get_address } = await import("./wallet");
debug("getAddress");
return get_address(name);
}
export async function createWallet(name: string, password: string): Promise<void> {
const { create_new_wallet } = await import("./wallet");
debug("createWallet");
return create_new_wallet(name, password);
}
export async function unlockWallet(name: string, password: string): Promise<void> {
const { load_existing_wallet } = await import("./wallet");
debug("unlockWallet");
return load_existing_wallet(name, password);
}
export async function getBalances(name: string): Promise<BalanceUpdate> {
const { get_balances } = await import("./wallet");
debug("getBalances");
return get_balances(name);
}
export async function makeSellCreateSwapPayload(name: string, btc: string): Promise<CreateSwapPayload> {
const { make_sell_create_swap_payload } = await import("./wallet");
debug("makeSellCreateSwapPayload");
return make_sell_create_swap_payload(name, btc);
}
export async function makeBuyCreateSwapPayload(name: string, usdt: string): Promise<CreateSwapPayload> {
const { make_buy_create_swap_payload } = await import("./wallet");
debug("makeBuyCreateSwapPayload");
return make_buy_create_swap_payload(name, usdt);
}
export async function makeLoanRequestPayload(name: string, collateral: string): Promise<CreateSwapPayload> {
const { make_loan_request } = await import("./wallet");
debug("makeLoanRequestPayload");
return make_loan_request(name, collateral);
}
export async function signAndSendSwap(name: string, hex: string): Promise<Txid> {
const { sign_and_send_swap_transaction } = await import("./wallet");
debug("signAndSendSwap");
const tx = { inner: hex };
return sign_and_send_swap_transaction(name, tx);
}
export async function extractTrade(name: string, hex: string): Promise<Trade> {
const { extract_trade } = await import("./wallet");
debug("extractTrade");
const tx = { inner: hex };
return extract_trade(name, tx);
}
// TODO: Replace any with actual LoanResponse interface
export async function extractLoan(name: string, loanResponse: any): Promise<LoanDetails> {
const { extract_loan } = await import("./wallet");
debug("extractLoan");
return extract_loan(name, loanResponse);
}
export async function signLoan(name: string): Promise<string> {
const { sign_loan } = await import("./wallet");
debug("signLoan");
return (await sign_loan(name)).inner;
}
export async function withdrawAll(name: string, address: string): Promise<Txid> {
const { withdraw_everything_to } = await import("./wallet");
debug("withdrawAll");
return withdraw_everything_to(name, address);
}
export async function getOpenLoans(): Promise<LoanDetails[]> {
const { get_open_loans } = await import("./wallet");
debug("getOpenLoans");
return get_open_loans();
}
export async function repayLoan(name: string, txid: string): Promise<void> {
const { repay_loan } = await import("./wallet");
debug("repayLoan");
return repay_loan(name, txid);
}
export async function getPastTransactions(name: string): Promise<Txid[]> {
const { get_past_transactions } = await import("./wallet");
debug("getPastTransactions");
return get_past_transactions(name);
}
|
const isUnique = ( str ) => {
const strSet = new Set();
for ( let i = 0; i < str.length; i++ ) {
if ( strSet.has( str[ i ] ) ) return false;
strSet.add( str[ i ] );
}
return true;
};
console.log( isUnique( 'abcdhijklmnopqrstuv' ) ); // true
console.log( isUnique( 'abcdefga' ) ); // false
|
class MultiplicationTable {
// function to print table
def printTable(n: Int){
for (i <- 1 to 10){
println(n + " * " + i + " = " + n*i)
}
}
// main function
def main(args: Array[String]): Unit = {
println("Table of 6:")
printTable(6)
}
}
|
#!/bin/sh
set -e
UNSIGNED=$1
SIGNATURE=$2
ARCH=x86_64
ROOTDIR=dist
BUNDLE=${ROOTDIR}/Akik-Qt.app
TEMPDIR=signed.temp
OUTDIR=signed-app
if [ -z "$UNSIGNED" ]; then
echo "usage: $0 <unsigned app> <signature>"
exit 1
fi
if [ -z "$SIGNATURE" ]; then
echo "usage: $0 <unsigned app> <signature>"
exit 1
fi
rm -rf ${TEMPDIR} && mkdir -p ${TEMPDIR}
tar -C ${TEMPDIR} -xf ${UNSIGNED}
tar -C ${TEMPDIR} -xf ${SIGNATURE}
if [ -z "${PAGESTUFF}" ]; then
PAGESTUFF=${TEMPDIR}/pagestuff
fi
if [ -z "${CODESIGN_ALLOCATE}" ]; then
CODESIGN_ALLOCATE=${TEMPDIR}/codesign_allocate
fi
for i in `find ${TEMPDIR} -name "*.sign"`; do
SIZE=`stat -c %s ${i}`
TARGET_FILE=`echo ${i} | sed 's/\.sign$//'`
echo "Allocating space for the signature of size ${SIZE} in ${TARGET_FILE}"
${CODESIGN_ALLOCATE} -i ${TARGET_FILE} -a ${ARCH} ${SIZE} -o ${i}.tmp
OFFSET=`${PAGESTUFF} ${i}.tmp -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
if [ -z ${QUIET} ]; then
echo "Attaching signature at offset ${OFFSET}"
fi
dd if=$i of=${i}.tmp bs=1 seek=${OFFSET} count=${SIZE} 2>/dev/null
mv ${i}.tmp ${TARGET_FILE}
rm ${i}
echo "Success."
done
mv ${TEMPDIR}/${ROOTDIR} ${OUTDIR}
rm -rf ${TEMPDIR}
echo "Signed: ${OUTDIR}"
|
package kbasesearchengine.system;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.io.FilenameUtils;
import kbasesearchengine.main.LineLogger;
import kbasesearchengine.tools.Utils;
/** Flat file based storage for search transformation specifications and
* storage type / version -> search transformation type / version mappings.
*
* @see ObjectTypeParsingRulesFileParser
* @see TypeMappingParser
* @see TypeMapping
*
* @author <EMAIL>
*
*/
public class TypeFileStorage implements TypeStorage {
private static final String TYPE_STORAGE = "[TypeStorage]";
// as opposed to file types for mappings
private static final Set<String> ALLOWED_FILE_TYPES_FOR_TYPES =
new HashSet<>(Arrays.asList(".json", ".yaml"));
private final Map<String, ArrayList<ObjectTypeParsingRules>> searchTypes = new HashMap<>();
private final Map<CodeAndType, TypeMapping> storageTypes;
private Map<CodeAndType, TypeMapping> processTypesDir(
final Path typesDir,
final ObjectTypeParsingRulesFileParser searchSpecParser,
final FileLister fileLister,
final LineLogger logger)
throws IOException, TypeParseException {
final Map<String, Path> typeToFile = new HashMap<>();
final Map<CodeAndType, TypeMapping.Builder> storageTypes = new HashMap<>();
for (final Path file: fileLister.list(typesDir)) {
if (fileLister.isRegularFile(file) && isAllowedFileType(file)) {
final List<ObjectTypeParsingRules> types;
try (final InputStream is = fileLister.newInputStream(file)) {
types = searchSpecParser.parseStream(is, file.toString());
}
final String searchType = types.get(0).getGlobalObjectType().getType();
if (typeToFile.containsKey(searchType)) {
throw new TypeParseException(String.format(
"Multiple definitions for the same search type %s in files %s and %s",
searchType, file, typeToFile.get(searchType)));
}
typeToFile.put(searchType, file);
searchTypes.put(searchType, new ArrayList<>(types));
final CodeAndType cnt = new CodeAndType(types.get(0).getStorageObjectType());
if (!storageTypes.containsKey(cnt)) {
storageTypes.put(cnt, TypeMapping.getBuilder(cnt.storageCode, cnt.storageType)
.withDefaultSearchType(new SearchObjectType(searchType, types.size()))
.withNullableSourceInfo(file.toString()));
} else {
storageTypes.get(cnt)
.withDefaultSearchType(new SearchObjectType(searchType, types.size()));
}
logger.logInfo(String.format("%s Processed type tranformation file with storage " +
"code %s, storage type %s and search type %s: %s",
TYPE_STORAGE, cnt.storageCode, cnt.storageType, searchType, file));
} else {
logger.logInfo(TYPE_STORAGE + " Skipping file in type tranformation directory: " +
file);
}
}
final Map<CodeAndType, TypeMapping> ret = new HashMap<>();
storageTypes.keySet().stream().forEach(k -> ret.put(k, storageTypes.get(k).build()));
return ret;
}
private boolean isAllowedFileType(final Path file) {
final String path = file.toString();
for (final String allowedExtension: ALLOWED_FILE_TYPES_FOR_TYPES) {
if (path.endsWith(allowedExtension)) {
return true;
}
}
return false;
}
private static class CodeAndType {
private final String storageCode;
private final String storageType;
private CodeAndType(final TypeMapping type) {
this.storageCode = type.getStorageCode();
this.storageType = type.getStorageType();
}
private CodeAndType(final StorageObjectType type) {
this.storageCode = type.getStorageCode();
this.storageType = type.getType();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + storageCode.hashCode();
result = prime * result + storageType.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
CodeAndType other = (CodeAndType) obj;
if (!storageCode.equals(other.storageCode)) {
return false;
}
if (!storageType.equals(other.storageType)) {
return false;
}
return true;
}
}
// could make a simpler constructor with default args for the parsers and lister
/** Create a new type storage system.
* @param typesDir the directory in which to find transformation specifications.
* @param mappingsDir the directory in which to find type mappings.
* @param searchSpecParser the parser for transformation specifications.
* @param mappingParsers one or more parsers for type mappings. The map maps from file
* extension (e.g. "yaml") to mapper implementation.
* @param fileLister a file handler instance.
* @param logger a logger.
* @throws IOException if errors occur when reading a file.
* @throws TypeParseException if a file could not be parsed.
*/
public TypeFileStorage(
final Path typesDir,
final Path mappingsDir,
final ObjectTypeParsingRulesFileParser searchSpecParser,
final Map<String, TypeMappingParser> mappingParsers,
final FileLister fileLister,
final LineLogger logger)
throws IOException, TypeParseException {
Utils.nonNull(typesDir, "typesDir");
Utils.nonNull(mappingsDir, "mappingsDir");
Utils.nonNull(searchSpecParser, "searchSpecParser");
Utils.nonNull(mappingParsers, "mappingParsers");
Utils.nonNull(fileLister, "fileLister");
Utils.nonNull(logger, "logger");
storageTypes = processTypesDir(typesDir, searchSpecParser, fileLister, logger);
final Map<CodeAndType, TypeMapping> mappings = processMappingsDir(
mappingsDir, mappingParsers, fileLister, logger);
for (final CodeAndType cnt: mappings.keySet()) {
if (storageTypes.containsKey(cnt)) {
final String mappingSource = mappings.get(cnt).getSourceInfo().orNull();
logger.logInfo(String.format(
"%s Overriding type mapping for storage code %s and storage type %s " +
"from type transformation file with definition from type mapping file%s",
TYPE_STORAGE, cnt.storageCode, cnt.storageType,
mappingSource == null ? "" : " " + mappingSource));
} // ok to set up a mapping for a storage type not explicitly listed in a search
// type file, so we don't throw an exception here
storageTypes.put(cnt, mappings.get(cnt));
}
}
private Map<CodeAndType, TypeMapping> processMappingsDir(
final Path mappingsDir,
final Map<String, TypeMappingParser> parsers,
final FileLister fileLister,
final LineLogger logger)
throws IOException, TypeParseException {
final Map<CodeAndType, TypeMapping> ret = new HashMap<>();
for (final Path file : fileLister.list(mappingsDir)) {
if (fileLister.isRegularFile(file)) {
final String ext = FilenameUtils.getExtension(file.toString());
final TypeMappingParser parser = parsers.get(ext);
if (parser != null) {
final Set<TypeMapping> mappings;
try (final InputStream is = fileLister.newInputStream(file)) {
mappings = parser.parse(is, file.toString());
}
for (final TypeMapping map: mappings) {
final CodeAndType cnt = new CodeAndType(map);
if (ret.containsKey(cnt)) {
throw typeMappingCollisionException(map, ret.get(cnt));
}
final String source = map.getSourceInfo().orNull();
for (final SearchObjectType searchType: map.getSearchTypes()) {
if (!searchTypes.containsKey(searchType.getType())) {
throw new TypeParseException(String.format(
"The search type %s specified in source code/type %s/%s " +
"does not have an equivalent transform type.%s",
searchType.getType(), cnt.storageCode, cnt.storageType,
source == null ? "" : " File: " + source));
}
if (searchTypes.get(searchType.getType()).size() <
searchType.getVersion()) {
throw new TypeParseException(String.format(
"Version %s of search type %s specified in " +
"source code/type %s/%s does not exist.%s",
searchType.getVersion(), searchType.getType(),
cnt.storageCode, cnt.storageType,
source == null ? "" : " File: " + source));
}
}
ret.put(cnt, map);
}
final String source = mappings.iterator().next().getSourceInfo().orNull();
logger.logInfo(String.format(TYPE_STORAGE +
" Processed type mapping file with storage code %s and types %s.%s",
mappings.iterator().next().getStorageCode(),
String.join(", ", mappings.stream().map(m -> m.getStorageType())
.sorted().collect(Collectors.toList())),
source == null ? "" : " File: " + source));
} else {
logger.logInfo(TYPE_STORAGE + " Skipping file in type mapping directory: " +
file);
}
} else {
logger.logInfo(TYPE_STORAGE +
" Skipping entry in type mapping directory: " + file);
}
}
return ret;
}
private TypeParseException typeMappingCollisionException(
final TypeMapping map,
final TypeMapping priorMapping) {
final String source = map.getSourceInfo().orNull();
final String priorSource = priorMapping.getSourceInfo().orNull();
String exception = String.format("Type collision for type %s in storage %s.",
map.getStorageType(), map.getStorageCode());
final List<String> files = new LinkedList<>();
if (source != null) {
files.add(source);
}
if (priorSource != null) {
files.add(priorSource);
}
if (!files.isEmpty()) {
Collections.sort(files);
exception += " (" + String.join(", ", files) + ")";
}
return new TypeParseException(exception);
}
@Override
public Set<ObjectTypeParsingRules> listObjectTypeParsingRules() {
return searchTypes.values().stream().map(l -> l.get(l.size() - 1))
.collect(Collectors.toSet());
}
@Override
public ObjectTypeParsingRules getObjectTypeParsingRules(final SearchObjectType type)
throws NoSuchTypeException {
//TODO CODE seems like throwing an error here for the guid transform case is a late fail. The check should occur when the OTPRs are being built.
if (searchTypes.containsKey(type.getType())) {
final ArrayList<ObjectTypeParsingRules> vers = searchTypes.get(type.getType());
if (type.getVersion() > vers.size()) {
throw new NoSuchTypeException(String.format("No type %s_%s found",
type.getType(), type.getVersion()));
}
return vers.get(type.getVersion() - 1);
} else {
throw new NoSuchTypeException(String.format("No type %s_%s found",
type.getType(), type.getVersion()));
}
}
@Override
public Set<ObjectTypeParsingRules> listObjectTypeParsingRules(
final StorageObjectType storageObjectType) {
final TypeMapping mapping = storageTypes.get(new CodeAndType(storageObjectType));
if (mapping == null) {
return Collections.emptySet();
}
final Set<SearchObjectType> types = mapping.getSearchTypes(storageObjectType.getVersion());
final Set<ObjectTypeParsingRules> ret = new HashSet<>();
for (final SearchObjectType t: types) {
ret.add(searchTypes.get(t.getType()).get(t.getVersion() - 1));
}
return ret;
}
}
|
# this file can be used by developers to run all tests localy before travis
# it must be called from the root directory
# TODO not working
#python3 gelato/core/tests/test_calculus.py
python3 gelato/core/tests/test_derivatives.py
python3 gelato/core/tests/test_space.py
python3 gelato/core/tests/test_expr_1d.py
python3 gelato/core/tests/test_expr_2d.py
python3 gelato/core/tests/test_expr_3d.py
python3 gelato/fem/tests/test_kernel_1d.py
python3 gelato/fem/tests/test_kernel_2d.py
python3 gelato/fem/tests/test_kernel_3d.py
python3 gelato/fem/tests/test_assembly_1d.py
python3 gelato/fem/tests/test_assembly_2d.py
python3 gelato/fem/tests/test_assembly_3d.py
python3 gelato/fem/tests/test_pde_1d.py
python3 gelato/fem/tests/test_pde_2d.py
python3 gelato/fem/tests/test_pde_3d.py
|
#!/bin/bash
if [ -z "$CONFIG" ]
then
export CONFIG=Debug
fi
if [ -z $IOS_SDK_VERSION ]; then
IOS_SDK_VERSION="13.1"
fi
DEFAULT_OUTPUT=../../libs/ios$IOS_SDK_VERSION-$CONFIG
#--------------------------------------------------
#
# Canonicalize relative paths to absolute paths
#
if [ -z "$OUTPUT_DIR" ]
then
mkdir -p "$DEFAULT_OUTPUT" 2>/dev/null
OUTPUT_DIR="$DEFAULT_OUTPUT"
fi
pushd "$OUTPUT_DIR" > /dev/null
dir=$(pwd)
OUTPUT_DIR="$dir"
popd > /dev/null
DEVELOPER=`xcode-select -print-path`
LIB_ROOT=./generation
rm -r $LIB_ROOT
buildIOS()
{
ARCH=$1
OPENSSL_CONFIGURE_OPTIONS="no-whirlpool no-ui no-engine -fPIC"
if [[ "$ARCH" == "i386" || "$ARCH" == "x86_64" ]]; then
PLATFORM="iPhoneSimulator"
else
PLATFORM="iPhoneOS"
fi
export $PLATFORM
export CROSS_TOP="$DEVELOPER/Platforms/$PLATFORM.platform/Developer"
export CROSS_SDK="$PLATFORM$IOS_SDK_VERSION.sdk"
export BUILD_TOOLS="$DEVELOPER"
export CC="$BUILD_TOOLS/usr/bin/gcc -arch $ARCH"
mkdir -p "$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/$ARCH"
echo "Building $OPENSSL_VERSION for $PLATFORM $IOS_SDK_VERSION $ARCH"
if [[ "$ARCH" == "x86_64" ]]; then
./Configure darwin64-x86_64-cc $OPENSSL_CONFIGURE_OPTIONS
else
./Configure iphoneos-cross $OPENSSL_CONFIGURE_OPTIONS
fi
if [ $? -ne 0 ]; then
echo "Error executing:./Configure $ARCH $OPENSSL_CONFIGURE_OPTIONS"
exit 1
fi
sed -ie "s!^CFLAG=!CFLAG=-isysroot $CROSS_TOP/SDKs/$CROSS_SDK -miphoneos-version-min=$IOS_SDK_VERSION !" "Makefile"
make clean
make -j4
if [ $? -ne 0 ]; then
echo "Error executing make for platform:$ARCH"
exit 1
fi
mkdir -p "$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/$ARCH"
cp -v libcrypto.a "$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/$ARCH"
cp -v libssl.a "$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/$ARCH"
cp -v libcrypto.so.1.1 "$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/$ARCH/libcrypto.1.1.so"
cp -v libssl.so.1.1 "$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/$ARCH/libssl.1.1.so"
# copy header
mkdir -p "$OUTPUT_DIR/include-$ARCH/openssl"
cp -r -v "include/openssl" "$OUTPUT_DIR/include-$ARCH/"
}
buildIOS "arm64"
buildIOS "x86_64"
echo "Building iOS libraries"
lipo \
"$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/arm64/libcrypto.a" \
"$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/x86_64/libcrypto.a" \
-create -output "$OUTPUT_DIR/libcrypto.a"
lipo \
"$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/arm64/libssl.a" \
"$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/x86_64/libssl.a" \
-create -output "$OUTPUT_DIR/libssl.a"
make clean
#cp -r "$LIB_ROOT/ios$IOS_SDK_VERSION-$CONFIG/" "$OUTPUT_DIR"
echo
echo "Your ouputs are in " "$OUTPUT_DIR"
echo
|
#!/bin/bash
# mcfly - Shell history search
# https://github.com/cantino/mcfly
set -euo pipefail
wget -O /tmp/mcfly.tar.gz "$( \
curl -s 'https://api.github.com/repos/cantino/mcfly/releases/latest' \
| jq -r '.assets | .[] | .browser_download_url | select(endswith("x86_64-unknown-linux-gnu.tar.gz"))'\
)"
mkdir -p ${HOME}/.local/bin
cd ${HOME}/.local/bin && tar xvf /tmp/mcfly.tar.gz mcfly
|
package org.trenkmann.restsample.controller;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import lombok.SneakyThrows;
import org.springframework.hateoas.CollectionModel;
import org.springframework.hateoas.EntityModel;
import org.springframework.hateoas.server.RepresentationModelAssembler;
import org.springframework.stereotype.Component;
import org.trenkmann.restsample.model.ShopCart;
import org.trenkmann.restsample.model.dto.ShopOrderDTO;
/**
* @author <NAME>
*/
@Component
public class ShopCartResourceAssembler implements
RepresentationModelAssembler<ShopCart, EntityModel<ShopCart>> {
@SneakyThrows
@Override
public EntityModel<ShopCart> toModel(ShopCart cart) {
return new EntityModel<>(cart,
linkTo(methodOn(ShopcartController.class).getCartById(cart.getId())).withSelfRel(),
linkTo(methodOn(ShopcartController.class).getCartElementsByCartId(cart.getId()))
.withRel("elementInCart"),
linkTo(methodOn(ShopOrderController.class).newShopOrder(new ShopOrderDTO()))
.withRel("order"),
linkTo(methodOn(ShopcartController.class).getCarts()).withRel("carts"));
}
@Override
public CollectionModel<EntityModel<ShopCart>> toCollectionModel(
Iterable<? extends ShopCart> entities) {
List<EntityModel<ShopCart>> list = StreamSupport.stream(entities.spliterator(), false)
.map(this::toModel)
.collect(Collectors.toList());
return new CollectionModel<>(list,
linkTo(methodOn(ShopcartController.class).getCarts()).withSelfRel());
}
}
|
from rest_framework import serializers
class DurationField(serializers.IntegerField):
pass
|
<reponame>shirou/VSNowm
import * as vscode from "vscode";
import * as path from "path";
import { FileItem } from "../utils";
export class NoteTreeItem extends vscode.TreeItem {
public filePath: string;
public ctime: number;
public mtime: number;
public permissions?: vscode.FilePermission;
public size: number;
public iconPath = new vscode.ThemeIcon("file-text");
constructor(public readonly label: string, file: FileItem) {
super(label, vscode.TreeItemCollapsibleState.None);
this.label = label;
this.filePath = file.path;
this.ctime = file.ctime;
this.mtime = file.mtime;
this.size = file.size;
this.permissions = file.permissions;
this.command = {
title: label,
command: "vsnowm.openNote",
arguments: [vscode.Uri.file(this.filePath)],
};
this.contextValue = "note";
}
}
|
<gh_stars>10-100
import com.mittudev.ipc.Connection;
import com.mittudev.ipc.Message;
import com.mittudev.ipc.ConnectionCallback;
import java.lang.Thread;
class Callback implements ConnectionCallback{
public int recived = 0;
public void onMessage(Message msg){
System.out.println(new String(msg.getData()));
recived++;
}
}
public class Reader{
public static void main(String[] args) {
Callback cb = new Callback();
Connection conn = new Connection("ipcdemo", 1);
conn.setCallback(cb);
conn.startAutoDispatch();
while(cb.recived < 3){
try{
Thread.sleep(100);
}catch (Exception e){
e.printStackTrace();
}
}
conn.stopAutoDispatch();
conn.close();
conn.destroy();
}
}
|
package de.ids_mannheim.korap.web;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.UniformInterfaceException;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import de.ids_mannheim.korap.exceptions.KustvaktException;
import de.ids_mannheim.korap.exceptions.StatusCodes;
import javax.ws.rs.core.MultivaluedMap;
import java.net.URI;
/**
* @author hanl
* @date 10/12/2013
*/
// use for Piotr Ps. rest api connection
public class ClientsHandler {
private WebResource service;
public ClientsHandler (URI address) {
ClientConfig config = new DefaultClientConfig();
Client client = Client.create(config);
this.service = client.resource(address);
}
public String getResponse (String path, String key, Object value)
throws KustvaktException {
MultivaluedMap map = new MultivaluedMapImpl();
map.add(key, value);
try {
return service.path(path).queryParams(map).get(String.class);
}
catch (UniformInterfaceException e) {
throw new KustvaktException(StatusCodes.REQUEST_INVALID);
}
}
public String getResponse (MultivaluedMap map, String ... paths)
throws KustvaktException {
try {
WebResource resource = service;
for (String p : paths)
resource = resource.path(p);
resource = resource.queryParams(map);
return resource.get(String.class);
}
catch (UniformInterfaceException e) {
throw new KustvaktException(StatusCodes.REQUEST_INVALID);
}
}
}
|
#!/usr/bin/env bats
setup() {
[ ! -z $LC_ALL ] && export LC_ALL="en_US.UTF-8"
utils=$(cd $BATS_TEST_DIRNAME/..; pwd)/utils
tmpdir=$(mktemp -d testXXXXXX)
cat << EOF > $tmpdir/test1.json
{"utts": {"uttid1": [{"feat": "aaa.ark:123", "text": "あ い"}]}}
EOF
cat << EOF > $tmpdir/test2.json
{"utts": {"uttid2": [{"feat": "aaa.ark:456", "text": "か き"}]}}
EOF
cat << EOF > $tmpdir/valid
{
"utts": {
"uttid1": [
{
"feat": "aaa.ark:123",
"text": "あ い"
}
],
"uttid2": [
{
"feat": "aaa.ark:456",
"text": "か き"
}
]
}
}
EOF
}
teardown() {
rm -rf $tmpdir
}
@test "concatjson.py" {
python $utils/concatjson.py $tmpdir/*.json > $tmpdir/out.json
jsondiff $tmpdir/out.json $tmpdir/valid
}
|
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import { Provider } from 'react-redux';
import Home from './containers/Home';
import registerServiceWorker from './registerServiceWorker';
import configureStore from './store';
import './styles/css/fonts/stylesheet.css';
import './styles/css/reset.css';
const root = document.getElementById('root');
if (root) {
ReactDOM.render(
<Provider store={configureStore()}>
<Home />
</Provider>,
root
);
}
registerServiceWorker();
|
<reponame>tactilenews/100eyes
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe RequestRow::RequestRow, type: :component do
subject { render_inline(described_class.new(**params)) }
let(:the_request) { create(:request) }
let(:params) { { request: the_request } }
it { should have_css('.RequestRow') }
end
|
import tkinter as tk
import tkinter.font as tkFont
import tkinter.ttk as ttk
import pymysql.cursors
connection = pymysql.connect(
host='milogert.com',
user='milo',
passwd='<PASSWORD>',
db='personal',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor
)
def calculate(*args):
try:
value = float(feet.get())
meters.set((0.3048 * value * 10000.0 + 0.5)/10000.0)
except ValueError:
pass
def setupTable(*args):
try:
with connection.cursor() as cursor:
# Read a single record
sql = "SELECT * FROM `ddns__credentials`"
cursor.execute(sql)
result = cursor.fetchall()
return result
except ValueError:
# TODO
pass
# def select_cmd(selected):
# print('Selected items:', selected)
## Display the ui. ###########################################################
# root = Tk()
# root.title("Feet to Meters")
# mainframe = ttk.Frame(root, padding="3 3 12 12")
# mainframe.grid(column=0, row=0, sticky=(N, W, E, S))
# mainframe.columnconfigure(0, weight=1)
# mainframe.rowconfigure(0, weight=1)
# feet = StringVar()
# meters = StringVar()
# feet_entry = ttk.Entry(mainframe, width=7, textvariable=feet)
# feet_entry.grid(column=2, row=1, sticky=(W, E))
# ttk.Label(mainframe, textvariable=meters).grid(column=2, row=2, sticky=(W, E))
# ttk.Button(mainframe, text="Calculate", command=calculate).grid(column=3, row=3, sticky=W)
# ttk.Label(mainframe, text="feet").grid(column=3, row=1, sticky=W)
# ttk.Label(mainframe, text="is equivalent to").grid(column=1, row=2, sticky=E)
# ttk.Label(mainframe, text="meters").grid(column=3, row=2, sticky=W)
# for child in mainframe.winfo_children(): child.grid_configure(padx=5, pady=5)
# feet_entry.focus()
# root.bind('<Return>', calculate)
headers = ('subdomain', 'ip', 'provider', 'response')
entries = setupTable()
class MultiColumnListbox(object):
"""use a ttk.TreeView as a multicolumn ListBox"""
def __init__(self):
self.tree = None
self._setup_widgets()
self._build_tree()
def _setup_widgets(self):
# Set up the container.
self.container = ttk.Frame()
self.container.pack(fill='both', expand=True)
# create a treeview with dual scrollbars
self.tree = ttk.Treeview(columns=headers, show="headings")
vsb = ttk.Scrollbar(orient="vertical", command=self.tree.yview)
hsb = ttk.Scrollbar(orient="horizontal", command=self.tree.xview)
self.tree.configure(yscrollcommand=vsb.set, xscrollcommand=hsb.set)
self.tree.bind('<1>', self.select_cmd)
self.tree.grid(column=0, row=0, rowspan=9, sticky='nsew', in_=self.container)
vsb.grid(column=1, row=0, rowspan=9, sticky='ns', in_=self.container)
hsb.grid(column=0, row=10, sticky='ew', in_=self.container)
self.aBtnAdd = ttk.Button()
self.aBtnAdd.configure(text="Add", command=self.addDialog)
self.aBtnAdd.grid(column=2, row=0, padx=10, pady=10, in_=self.container)
self.aBtnEdit = ttk.Button()
self.aBtnEdit.configure(text="Edit")
self.aBtnEdit.grid(column=2, row=1, padx=10, pady=10, in_=self.container)
self.aBtnDelete = ttk.Button()
self.aBtnDelete.configure(text="Delete")
self.aBtnDelete.grid(column=2, row=2, padx=10, pady=10, in_=self.container)
self.aBtnUpdate = ttk.Button()
self.aBtnUpdate.configure(text="Update")
self.aBtnUpdate.grid(column=2, row=11, padx=10, pady=10, in_=self.container)
self.container.grid_columnconfigure(0, weight=1)
self.container.grid_rowconfigure(0, weight=1)
def _build_tree(self):
for col in headers:
self.tree.heading(
col,
text=col.title(),
command=lambda c=col: sortby(self.tree, c, 0)
)
# adjust the column's width to the header string
self.tree.column(col, width=tkFont.Font().measure(col.title()))
for item in entries:
ins = normalizeData(item)
self.tree.insert('', 'end', values=ins)
# adjust column's width if necessary to fit each value
for ix, val in enumerate(ins):
col_w = tkFont.Font().measure(val)
if self.tree.column(headers[ix], width=None) < col_w:
self.tree.column(headers[ix], width=col_w)
def select_cmd(self, event):
print(self.tree.focus())
print(self.tree.selection())
def sortby(tree, col, descending):
"""sort tree contents when a column header is clicked on"""
# grab values to sort
data = [(tree.set(child, col), child) \
for child in tree.get_children('')]
# if the data to be sorted is numeric change to float
#data = change_numeric(data)
# now sort the data in place
data.sort(reverse=descending)
for ix, item in enumerate(data):
tree.move(item[1], '', ix)
# switch the heading so it will sort in the opposite direction
tree.heading(col, command=lambda col=col: sortby(tree, col, \
int(not descending)))
def normalizeData(data):
ret = []
for item in headers:
ret.append(data[item])
return ret
if __name__ == '__main__':
root = tk.Tk()
root.title("Multicolumn Treeview/Listbox")
listbox = MultiColumnListbox()
root.mainloop()
# Close the database connection once the program is done running.
connection.close()
|
<gh_stars>0
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); /* eslint no-param-reassign:0 */
exports.default = swaggerDiff;
var _deepDiff = require('deep-diff');
var _deepDiff2 = _interopRequireDefault(_deepDiff);
var _semver = require('semver');
var _semver2 = _interopRequireDefault(_semver);
var _getConfig = require('./getConfig');
var _getConfig2 = _interopRequireDefault(_getConfig);
var _prepareSpec = require('./prepareSpec');
var _prepareSpec2 = _interopRequireDefault(_prepareSpec);
var _applyRules = require('./applyRules');
var _applyRules2 = _interopRequireDefault(_applyRules);
var _postProcessDiff = require('./postProcessDiff');
var _postProcessDiff2 = _interopRequireDefault(_postProcessDiff);
var _rules = require('../rules');
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* @param {string|object} oldSpec - The file path of the old Swagger spec; or a Swagger object.
* @param {string|object} newSpec - The file path of the new Swagger spec; or a Swagger object.
* @param {string|object} config - The file path of the config file or the config file
* @return {Promise}
* Promise returns the following obejct
* {
* errors: {Array>Diff>}
* warnings: {Array>Diff>}
* infos: {Array>Diff>}
* unmatchDiffs: {Array<RawDiff>}
* }
*/
function swaggerDiff(oldSpec, newSpec, config) {
var debug = require('debug')('swagger-diff:workflow');
debug('start');
config = (0, _getConfig2.default)(config);
return Promise.all([(0, _prepareSpec2.default)(oldSpec), (0, _prepareSpec2.default)(newSpec)]).then(function (_ref) {
var _ref2 = _slicedToArray(_ref, 2);
//! here a shallow copy is being done between the promise return value (which is in the index local scope)
var prepOldSpec = _ref2[0];
var prepNewSpec = _ref2[1];
debug('specs perpared');
var versionDiff = void 0;
if (prepOldSpec.info && prepNewSpec.info) {
var oldVersion = prepOldSpec.info.version;
var newVersion = prepNewSpec.info.version;
if (!_semver2.default.valid(oldVersion) || !_semver2.default.valid(newVersion)) {
debug('one swagger file version is not semver compliant => ignore version comparison');
} else {
versionDiff = _semver2.default.diff(oldVersion, newVersion);
if (versionDiff === null) {
versionDiff = 'unchanged';
}
}
prepOldSpec.info.version = null;
prepNewSpec.info.version = null;
}
debug('versionDiff', versionDiff);
var rawDiffs = (0, _deepDiff2.default)(prepOldSpec, prepNewSpec);
debug('rawDiffs', rawDiffs);
//console.dir(rawDiffs, {'maxArrayLength': null});
var defaultRules;
if(oldSpec.openapi) {
console.log('Comparing Open API 3 specs');
defaultRules= 'defaultOAS3'
} else {
console.log('Comparing Swagger specs');
defaultRules= 'default'
}
var _rules2 = _interopRequireDefault(_rules[defaultRules]);
var changes = (0, _applyRules2.default)(rawDiffs, _rules2.default.break, _rules2.default.smooth, _rules2.default.info);
debug('changes', changes);
var diffs = config.skipDiffPostProcessing ? changes : (0, _postProcessDiff2.default)(changes, versionDiff, config);
debug('diffs', diffs);
//console.dir(diffs, {'maxArrayLength': null});
return diffs;
});
}
module.exports = exports['default'];
|
/**
* Validates the input value based on the specified length constraints.
* @param {Object} options - The options for validation.
* @param {string} options.value - The input value to be validated.
* @param {number} options.minLength - The minimum length allowed for the input value.
* @param {number} options.maxLength - The maximum length allowed for the input value.
* @returns {boolean} - True if the input value is valid, false otherwise.
*/
function validator({ value, minLength, maxLength }: { value: string, minLength: number, maxLength: number }): boolean {
return value.length >= minLength && value.length <= maxLength;
}
class SuperheroForm {
heroNameEl: HTMLInputElement;
descriptionEl: HTMLInputElement;
constructor(heroNameEl: HTMLInputElement, descriptionEl: HTMLInputElement) {
this.heroNameEl = heroNameEl;
this.descriptionEl = descriptionEl;
}
/**
* Retrieves and validates the superhero's name and description.
* @returns {Object | void} - An object containing the name and description if the input is valid, or void if the input is invalid.
*/
getInputs(): { name: string, description: string } | void {
const heroName: string = this.heroNameEl.value;
if (!validator({ value: heroName, minLength: 3, maxLength: 20 })) {
alert("Hero name must be greater than 3 and less than 20 characters");
return;
}
const description: string = this.descriptionEl.value;
if (!validator({ value: description, minLength: 5, maxLength: 100 })) {
alert("Hero Description must be greater than 5 and less than 100 characters");
return;
}
return { name: heroName, description: description };
}
}
// Example usage
const heroNameInput = document.getElementById('heroName') as HTMLInputElement;
const descriptionInput = document.getElementById('description') as HTMLInputElement;
const superheroForm = new SuperheroForm(heroNameInput, descriptionInput);
const inputs = superheroForm.getInputs();
if (inputs) {
console.log("Valid inputs:", inputs);
} else {
console.log("Invalid inputs");
}
|
class Student:
def __init__(self, student_id):
self.student_id = student_id
self.grades = {}
def set_grade(self, subject, grade):
self.grades[subject] = grade
def get_grade(self, subject):
return self.grades[subject]
def get_average_grade(self):
total = sum(self.grades.values())
subjects = len(self.grades)
return total / subjects
|
#!/bin/bash
/setup-ssh.sh
rm -rf /root/.ssh/*
cp /work/example_cluster/ssh_config /root/.ssh/config
ssh-keygen -f /root/.ssh/id_rsa -N ''
cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys
chmod 700 /root/.ssh
chmod 600 /root/.ssh/*
/usr/sbin/sshd
# This is a hack because we're not creating a real package which would create symlinks for the .py scripts
while read link; do echo $link|sed -e 's|opt/venvs/paasta-tools/|/venv/|'| sed -e 's/\ usr/\ \/usr/'| xargs ln -s; done < /work/debian/paasta-tools.links
/usr/sbin/rsyslogd
cron
mesos-master --zk=zk://zookeeper:2181/mesos-testcluster --registry=in_memory --quorum=1 --authenticate --authenticate_slaves --credentials=/etc/mesos-secrets --hostname=$(hostname) &
paasta-deployd &> /var/log/paasta-deployd.log
while true; do
paasta-deployd &> /var/log/paasta-deployd.log
echo "paasta-deployd exited, restarting in 5s..."
sleep 5
done
|
import { Configuration } from '../config/configuration';
import createRemoteJWKSet from 'jose/jwks/remote';
import { NextFunction, Request, Response } from 'express';
import jwtVerify, { GetKeyFunction, JWSHeaderParameters } from 'jose/jwt/verify';
import { constants } from 'http2';
import { FlattenedJWSInput, JWTVerifyResult } from 'jose/webcrypto/types';
import logger from '../components/logger';
import { CONFIG_ELEMENT } from '../config/config-element';
const configuration = new Configuration();
const jwksUri = configuration.get(CONFIG_ELEMENT.JWKS_URI);
const getKey: GetKeyFunction<JWSHeaderParameters, FlattenedJWSInput> = async (header, token) => {
const jwks = await createRemoteJWKSet(new URL(jwksUri));
const key = await jwks.getSigningKey(header.kid);
return key;
};
export const jwtVerificationMiddleware = async (req: Request, res: Response, next: NextFunction) => {
// Extract the JWT from the request headers
const token = req.headers.authorization?.replace('Bearer ', '');
if (!token) {
return res.status(constants.HTTP_STATUS_UNAUTHORIZED).json({ error: 'Missing or invalid token' });
}
try {
// Verify the JWT using jose library
const verifiedToken: JWTVerifyResult = await jwtVerify(token, getKey, {
algorithms: ['RS256'],
});
// Attach the decoded token payload to the request object
req.user = verifiedToken.payload;
next();
} catch (error) {
logger.error('JWT verification failed', error);
return res.status(constants.HTTP_STATUS_UNAUTHORIZED).json({ error: 'Invalid token' });
}
};
|
const months = [
null,
"january",
"february",
"march",
"april",
"may",
"june",
"july",
"august",
"september",
"october",
"november",
"december",
];
export function convertDateToParams(original: string) {
const splitDate = original.split("-");
const monthIndex = Number(splitDate[0]);
const year = splitDate[1];
const month = months[monthIndex];
return {
year,
month,
};
}
|
def find_longest_palindrome_substring(s):
max_length = 0
longest_palindrome = ""
for i in range(len(s)):
for j in range(i, len(s)):
substring = s[i:j+1]
if is_palindrome(substring):
if len(substring) > max_length:
max_length = len(substring)
longest_palindrome = substring
return longest_palindrome
def is_palindrome(string):
reversed_string = string[::-1]
if reversed_string == string:
return True
else:
return False
|
#!/bin/bash
################################################################################
# ____ ____ ____ ____ ____
# ||G |||i |||t |||t |||y ||
# ||__|||__|||__|||__|||__||
# |/__\|/__\|/__\|/__\|/__\|
#
# Gitty -- A GitHub client in Bash
#
# Copyright (c) 2014-20 Roberto Reale
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
################################################################################
export GITTY_LIB_BASE=.
for m in $(find $GITTY_LIB_BASE/lib/ \( -type f -a -not -name ".*" \) -exec grep -l "^#\!.*/bash" {} \;)
do
source $m
done
# are we being sourced?
(return 0 2>/dev/null) && sourced=1 || sourced=0
if [[ $sourced -eq 0 && $# -ge 2 ]]
then
module=$1 ; shift
method=$1 ; shift
gitty::$module::$method "$@"
fi
# Local variables:
# mode: shell-script
# sh-basic-offset: 4
# sh-indent-comment: t
# indent-tabs-mode: nil
# End:
# ex: ts=4 sw=4 et filetype=sh
|
#!/usr/bin/env bash
set -ex
# NOTE: the port is hardcoded, for kubectl
# doesn't currently support templating.
ME=$(basename $0)
DIR=$(dirname "$(readlink -f $0)")
echo $DIR
USAGE_EXAMPLE="$ME --docker-user mario --user-email mario.mario@bros.com"
REGISTRY="localhost:5000"
function usage {
BOLD=$(tput bold)
RESET=$(tput sgr0)
cat <<-END
${BOLD}NAME${RESET}
$ME -- create a local registry.
${BOLD}SYNOPSIS${RESET}
$ME [-h | --help] [-u | --docker-user USER] [-e | --docker-email] [-r | --registry host:port]
${BOLD}OPTIONS${RESET}
-e | --docker-email
User's Docker email.
-u | --docker-user
The Docker user.
-r | --registry
Registry address (default localhost:5000).
-h | --help
Print this help message.
${BOLD}EXAMPLES${RESET}
$USAGE_EXAMPLE
END
}
TEMP=`getopt -o h,u:,e:,r: --long help,docker-user:,user-email:,registry: \
-n "$ME" -- "$@"`
eval set -- "$TEMP"
while true; do
case "$1" in
-u | --docker-user )
DOCKER_USER="$2"
shift 2
;;
-e | --user-email )
USER_EMAIL="$2"
shift 2
;;
-r | --registry )
REGISTRY="$2"
shift 2
;;
-h | --help )
shift
usage
exit 0
;;
-- )
shift
break
;;
* )
break
;;
esac
done
[[ -z $DOCKER_USER ]] && {
read -t 30 -p "Please supply a Docker user: " DOCKER_USER
}
[[ -z $USER_EMAIL ]] && {
read -t 30 -p "Please supply user's email: " USER_EMAIL
}
read -s -p "Please type the Docker password: " DOCKER_PASS
echo "Docker login"
docker login --username "${DOCKER_USER}" --password "$DOCKER_PASS"
MINIKUBE_STATUS=$(minikube status --format {{.MinikubeStatus}})
if [[ $MINIKUBE_STATUS != "Running" ]]
then
echo "Starting minikube"
minikube start --vm-driver=xhyve
fi
# delete previous pods
echo "Removing previous registry"
# NOTE:
# currently kubectl returns before completely removing a resource
kubectl delete replicationcontroller --force --ignore-not-found=true --namespace=kube-system kube-registry-v0
kubectl delete pod --force --ignore-not-found=true --namespace=kube-system kube-registry-proxy
kubectl delete service --force --ignore-not-found=true --namespace=kube-system kube-registry
kubectl delete secret --force --ignore-not-found=true regsecret
echo "Registry Docker configuration"
cat ~/.docker/config.json
echo "Starting minikube"
minikube start \
--vm-driver xhyve \
--insecure-registry "$REGISTRY"
# start talking to the docker daemon inside the minikube VM
eval $(minikube docker-env)
# create a secret
echo "Creating secret"
kubectl create secret docker-registry regsecret \
--docker-server "${REGISTRY}"/registry \
--docker-username "$DOCKER_USER" \
--docker-password "$DOCKER_PASS" \
--docker-email "$USER_EMAIL"
echo $(kubectl get secret regsecret --output=yaml | grep -Po "cfg: \K(.+)") > $DIR/secret64
kubectl get pods --namespace kube-system
echo "Creating registry"
kubectl create -f $DIR/local-registry.yaml
sleep 15
kubectl get pods --namespace kube-system
POD=$(kubectl get pod -n kube-system | grep kube-registry-v0 | awk '{print $1;}')
while true
do
POD_STATUS=$(kubectl get pod --namespace kube-system "$POD" --template={{.status.phase}})
[[ $POD_STATUS == "Running" ]] && break
sleep 5
done
echo "Starting registry"
PORT=${REGISTRY##*:}
nohup kubectl port-forward --namespace kube-system "$POD" $PORT:$PORT &>/dev/null &
#kubectl port-forward --namespace kube-system "$POD" $PORT:$PORT
# NOTE: kubectl fails to tunnel the first request
curl -s http://"${REGISTRY}"/v2/_catalog 2>&1 >/dev/null
echo "Done"
|
'use strict';
var servicesModule = require('./_index.js');
/**
* @ngInject
*/
function TweetsService($q, $http, $resource, AppSettings, AuthService) {
// var url = AppSettings.oauthProxyUrl.split('/')[0] + '/' + AppSettings.oauthProxyUrl.split('/')[1] + '/' + AppSettings.oauthProxyUrl.split('/')[2];
// return $resource(url + '/:user/:app/:id', {
return $resource('/api/tweets/:userId/:wallId/:tweetId', {
userId: '@userId',
wallId: '@wallId',
tweetId: '@tweetId'
}, {
query: {
method: 'GET',
isArray: true,
params: {
userId: '@userId',
wallId: '@wallId',
}
},
save: {
method: 'POST',
params: {
userId: '@userId',
wallId: '@wallId',
}
},
update: {
method: 'PUT',
params: {
userId: '@userId',
wallId: '@wallId',
tweetId: '@tweetId'
}
}
});
}
servicesModule.service('TweetsService', ['$q', '$http', '$resource', 'AppSettings', 'AuthService', TweetsService]);
|
package collections
import (
"fmt"
"math"
"sort"
"strings"
)
type ListInterface struct {
Collection []interface{}
SortFunc func(i, j int) bool
}
func (list *ListInterface) Shift() interface{} {
old := (*list).Collection
n := len(old)
if n == 0 {
return ""
}
x := old[0:1][0]
(*list).Collection = old[1:]
return x
}
func (list *ListInterface) Pop() interface{} {
old := (*list).Collection
n := len(old)
if n == 0 {
return ""
}
x := old[n-1] //old[n-1].([]string)[0]
(*list).Collection = old[0 : n-1]
return x
}
func (list *ListInterface) String() string {
var txt = ""
for _, value := range list.Collection {
txt = txt + "," + fmt.Sprint(value)
}
return txt
}
func (list *ListInterface) Append(elems interface{}) {
list.Collection = append(list.Collection, elems)
}
func (list *ListInterface) Len() int {
return len(list.Collection)
}
func (list *ListInterface) Less(i, j int) bool {
return list.SortFunc(i, j)
}
func (list *ListInterface) Swap(i, j int) {
var temp = list.Collection[i]
list.Collection[i] = list.Collection[j]
list.Collection[j] = temp
}
func (list *ListInterface) SortL() {
list.SortFunc = func(i, j int) bool {
return fmt.Sprint(list.Collection[i]) < fmt.Sprint(list.Collection[j])
}
sort.Sort(list)
}
func (list *ListInterface) Join(fix string) string {
var txt = ""
for _, value := range list.Collection {
if strings.EqualFold(txt, "") {
txt = fmt.Sprint(value)
} else {
txt = txt + fix + fmt.Sprint(value)
}
}
return txt
}
func (list *ListInterface) SortH() {
list.SortFunc = func(i, j int) bool {
a := fmt.Sprint(list.Collection[i])
b := fmt.Sprint(list.Collection[j])
if a[0] > b[0] {
return true
} else if a[0] == b[0] {
le := int(math.Max(float64(len(a)), float64(len(b))))
for oo := 0; oo < le; oo++ {
var aa byte = 0
if oo > len(a)-1 {
aa = 0
} else {
aa = a[oo]
}
var bb byte = 0
if oo > len(b)-1 {
bb = 0
} else {
bb = b[oo]
}
if aa > bb {
return true
} else if aa < bb {
return false
}
}
return false
} else {
return false
}
}
sort.Sort(list)
}
|
function sum() {
let total = 0;
for (let i = 0; i < arguments.length; i++) {
total += arguments[i];
}
return total;
}
console.log(sum(1, 2, 3, 4));
// 10
|
public class ImageParameters {
public int mCoverHeight, mCoverWidth;
public int mPreviewHeight, mPreviewWidth;
public boolean mIsPortrait;
public int mDisplayOrientation;
public int mLayoutOrientation;
public ImageParameters(Parcel in) {
mIsPortrait = (in.readByte() == 1);
mDisplayOrientation = in.readInt();
mLayoutOrientation = in.readInt();
}
public void setCoverDimensions(int height, int width) {
mCoverHeight = height;
mCoverWidth = width;
}
public void setPreviewDimensions(int height, int width) {
mPreviewHeight = height;
mPreviewWidth = width;
}
public int calculateCoverArea() {
return mCoverHeight * mCoverWidth;
}
public int calculatePreviewArea() {
return mPreviewHeight * mPreviewWidth;
}
public boolean isCoverPortrait() {
return mIsPortrait;
}
public boolean isPreviewPortrait() {
// Assuming the preview image orientation is the same as the cover image
return mIsPortrait;
}
}
|
<filename>core/src/mindustry/world/CachedTile.java
package mindustry.world;
import mindustry.entities.type.TileEntity;
import mindustry.game.Team;
import mindustry.world.modules.*;
/**
* A tile which does not trigger change events and whose entity types are cached.
* Prevents garbage when loading previews.
*/
public class CachedTile extends Tile{
public CachedTile(){
super(0, 0);
}
@Override
public Team getTeam(){
return Team.get(getTeamID());
}
@Override
protected void preChanged(){
//this basically overrides the old tile code and doesn't remove from proximity
team = 0;
}
@Override
protected void changed(){
entity = null;
Block block = block();
if(block.hasEntity()){
TileEntity n = block.newEntity();
n.cons = new ConsumeModule(entity);
n.tile = this;
n.block = block;
if(block.hasItems) n.items = new ItemModule();
if(block.hasLiquids) n.liquids = new LiquidModule();
if(block.hasPower) n.power = new PowerModule();
entity = n;
}
}
}
|
#!/bin/bash
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
air_path=${1}
om_path=${2}
# Pix2Pix model from .air to .om
atc --model="${air_path}" \
--output="${om_path}" \
--framework=1 \
--soc_version=Ascend310
# Delete unnecessary files
rm fusion_result.json
rm -r kernel_meta/
|
<filename>app/src/main/java/com/example/myshoppingmall/ProductImagesAdapter.java
package com.example.myshoppingmall;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import androidx.annotation.NonNull;
import androidx.viewpager.widget.PagerAdapter;
import com.bumptech.glide.Glide;
import com.bumptech.glide.request.RequestOptions;
import java.util.List;
public class ProductImagesAdapter extends PagerAdapter {
private List<String> productImages;
public ProductImagesAdapter(List<String> productImages) {
this.productImages = productImages;
}
@NonNull
@Override
public Object instantiateItem(@NonNull ViewGroup container, int position) {
ImageView productImage = new ImageView(container.getContext());
Glide.with(container.getContext()).load(productImages.get(position)).apply(new RequestOptions().placeholder(R.mipmap.banner_placeholder)).into(productImage);
container.addView(productImage, 0);
return productImage;
}
@Override
public void destroyItem(@NonNull ViewGroup container, int position, @NonNull Object object) {
container.removeView((ImageView)object);
}
@Override
public int getCount() {
return productImages.size();
}
@Override
public boolean isViewFromObject(@NonNull View view, @NonNull Object object) {
return view == object;
}
}
|
<filename>sources/Engine/Modules/Graphics/GraphicsSystem/EnvironmentRenderingSystem.cpp
#include "precompiled.h"
#pragma hdrstop
#include "EnvironmentRenderingSystem.h"
#include <utility>
#include "Modules/ECS/ECS.h"
EnvironmentComponent::EnvironmentComponent() = default;
void EnvironmentComponent::setEnvironmentMaterial(ResourceHandle<GLMaterial> material)
{
m_environmentMaterial = std::move(material);
}
GLMaterial* EnvironmentComponent::getEnvironmentMaterial() const
{
return m_environmentMaterial.get();
}
EnvironmentComponent::BindingParameters EnvironmentComponent::getBindingParameters() const
{
return EnvironmentComponent::BindingParameters{
.materialResourceName = m_environmentMaterial.getResourceId()
};
}
EnvironmentComponentBinder::EnvironmentComponentBinder(const ComponentBindingParameters& componentParameters,
std::shared_ptr<ResourcesManager> resourcesManager)
: m_bindingParameters(componentParameters),
m_resourcesManager(std::move(resourcesManager))
{
}
void EnvironmentComponentBinder::bindToObject(GameObject& gameObject)
{
auto& environmentComponent = *gameObject.addComponent<EnvironmentComponent>().get();
ResourceHandle<GLMaterial> materialInstance =
m_resourcesManager->getResource<GLMaterial>(m_bindingParameters.materialResourceName);
environmentComponent.setEnvironmentMaterial(materialInstance);
}
EnvironmentRenderingSystem::EnvironmentRenderingSystem(
std::shared_ptr<GLGraphicsContext> graphicsContext,
std::shared_ptr<GraphicsScene> graphicsScene,
ResourceHandle<Mesh> environmentMesh)
: RenderingSystem(std::move(graphicsContext), std::move(graphicsScene)),
m_environmentMesh(std::move(environmentMesh))
{
}
EnvironmentRenderingSystem::~EnvironmentRenderingSystem() = default;
void EnvironmentRenderingSystem::configure()
{
}
void EnvironmentRenderingSystem::unconfigure()
{
}
void EnvironmentRenderingSystem::update(float delta)
{
ARG_UNUSED(delta);
}
void EnvironmentRenderingSystem::render()
{
GameObject environmentObject = getGameWorld()->allWith<EnvironmentComponent>().begin().getGameObject();
if (!environmentObject.isAlive()) {
SW_ASSERT(false);
return;
}
GLMaterial* material = environmentObject.getComponent<EnvironmentComponent>()->getEnvironmentMaterial();
auto& frameStats = m_graphicsScene->getFrameStats();
frameStats.increaseSubMeshesCount(1);
frameStats.increasePrimitivesCount(m_environmentMesh->getSubMeshIndicesCount(0) / 3);
m_graphicsContext->scheduleRenderTask(RenderTask{
.material = material,
.mesh = m_environmentMesh.get(),
.subMeshIndex = 0,
});
}
|
<reponame>Cristianasg/system<filename>application/assets/js/pages.js
var pages = {
}
|
<filename>app/src/main/java/com/tanmay/biisit/myMusic/MyMusicFragment.java<gh_stars>1-10
package com.tanmay.biisit.myMusic;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.ServiceConnection;
import android.database.Cursor;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Bundle;
import android.os.IBinder;
import android.provider.MediaStore;
import android.support.annotation.NonNull;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.MediaController;
import android.widget.Spinner;
import android.widget.TextView;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import com.tanmay.biisit.CustomMediaController;
import com.tanmay.biisit.MediaPlayerService;
import com.tanmay.biisit.R;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.tanmay.biisit.MediaPlayerService.ACTION_PAUSE;
import static com.tanmay.biisit.MediaPlayerService.ACTION_PLAY;
import static com.tanmay.biisit.MediaPlayerService.ACTION_REDRAW;
import static com.tanmay.biisit.MediaPlayerService.ACTION_STOP;
import static com.tanmay.biisit.MediaPlayerService.BROADCAST_CLIENT_ID_KEY;
import static com.tanmay.biisit.MediaPlayerService.BROADCAST_CLIENT_ITEM_POS_KEY;
import static com.tanmay.biisit.MediaPlayerService.BROADCAST_MEDIA_URI_KEY;
import static com.tanmay.biisit.MediaPlayerService.BROADCAST_RESUMED_ITEM_POS_KEY;
import static com.tanmay.biisit.MediaPlayerService.BROADCAST_SEEK_POSITION_KEY;
import static com.tanmay.biisit.MediaPlayerService.SERVICE_ACTION_PAUSE;
import static com.tanmay.biisit.MediaPlayerService.SERVICE_ACTION_RESUME;
import static com.tanmay.biisit.MediaPlayerService.SERVICE_ACTION_SEEK;
import static com.tanmay.biisit.MediaPlayerService.SERVICE_ACTION_START_PLAY;
/**
* A fragment representing a list of Items.
* <p/>
*/
public class MyMusicFragment extends Fragment
implements LoaderManager.LoaderCallbacks<Cursor>,
MyMusicRecyclerViewAdapter.OnListFragmentInteractionListener,
MediaController.MediaPlayerControl {
public static final int MY_MUSIC_FRAGMENT_CLIENT_ID = 101;
private static final String LOG_TAG = MyMusicFragment.class.getSimpleName();
private static final int CURSOR_LOADER_ID_ALL = 1;
private static final int CURSOR_LOADER_ID_FAV = 2;
private static final String SPINNER_SELECTED_KEY = "SPINNER_SELECTED_KEY";
private static final String CURRENT_URI_KEY = "CURRENT_URI_KEY";
private static final String FAV_ID_KEY = "FAV_ID_KEY";
private boolean mServiceBound = false;
private RecyclerView mRecyclerView;
private Uri mCurrentUri = null;
private MyMusicRecyclerViewAdapter mRecyclerViewAdapter = null;
private MyMusicFragmentReceiver mMusicFragmentReceiver = new MyMusicFragmentReceiver();
private CustomMediaController mController;
private ServiceConnection mServiceConn;
private MediaPlayer mServiceMediaPlayer = null;
private int mLastSelectedPos = -1;
private boolean mOnlyFav = false;
private DatabaseReference mRootRef;
private DatabaseReference mUserInfoReference;
private DatabaseReference mSpecificUserDataReference;
private static final String USER_INFO_KEY = "user_info";
private List<Integer> mFavouriteIds = null;
private Spinner mSpinner;
private int mSpinnerSelectedPos = -1;
private ValueEventListener mUserValueEventListener;
private boolean mIsLoggedIn = false;
private String mUserId;
private FirebaseAuth.AuthStateListener mAuthListener;
private boolean mIsPlaying = false;
private TextView mEmptyView;
/**
* Mandatory empty constructor for the fragment manager to instantiate the
* fragment (e.g. upon screen orientation changes).
*/
public MyMusicFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(LOG_TAG, "onCreate: Fragment created");
super.onCreate(savedInstanceState);
registerBroadcastReceivers();
mServiceConn = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
mServiceBound = true;
mServiceMediaPlayer = (((MediaPlayerService.ServiceBinder)service).getMediaPlayer());
Log.i(LOG_TAG, "onServiceConnected: Service bound");
mController.show();
}
@Override
public void onServiceDisconnected(ComponentName name) {
// Simply end play if service dies
Log.i(LOG_TAG, "onServiceDisconnected: Service unbound");
stopPlayAndUnbind();
}
};
setHasOptionsMenu(true);
// setRetainInstance(true);
startServiceIfDown();
}
private void startServiceIfDown(){
if (!MediaPlayerService.sIsRunning){
Intent serviceStartIntent = new Intent(getActivity(), MediaPlayerService.class);
getActivity().startService(serviceStartIntent);
}
}
private void stopPlayAndUnbind(){
if (mServiceBound) {
getActivity().unbindService(mServiceConn);
mServiceBound = false;
}
mCurrentUri = null;
mServiceMediaPlayer = null;
mController.actuallyHide();
playbackStopped();
}
@Override
public void onDestroy() {
Log.i(LOG_TAG, "onDestroy: Fragment destroyed");
super.onDestroy();
if (mController != null)
mController.actuallyHide();
unregisterBroadcastReceivers();
try {
if (mServiceBound)
getActivity().unbindService(mServiceConn);
} catch (IllegalArgumentException i){
Log.d(LOG_TAG, "onDestroy: Was actually not bound");
}
mServiceBound = false;
if (mSpecificUserDataReference != null)
mSpecificUserDataReference.removeEventListener(mUserValueEventListener);
FirebaseAuth.getInstance().removeAuthStateListener(mAuthListener);
}
@Override
public void onSaveInstanceState(Bundle outState) {
// Log.i(LOG_TAG, "onSaveInstanceState: saving state");
super.onSaveInstanceState(outState);
outState.putInt(SPINNER_SELECTED_KEY, mSpinnerSelectedPos);
// outState.putInt(SELECTED_POS_KEY, mLastSelectedPos);
outState.putParcelable(CURRENT_URI_KEY, mCurrentUri);
// outState.putBoolean(IS_PLAYING_KEY, mIsPlaying);
outState.putIntegerArrayList(FAV_ID_KEY, (ArrayList<Integer>) mFavouriteIds);
}
private void registerBroadcastReceivers() {
IntentFilter intentFilter = new IntentFilter(ACTION_PLAY);
intentFilter.addAction(ACTION_PAUSE);
intentFilter.addAction(ACTION_STOP);
intentFilter.addAction(ACTION_REDRAW);
getActivity().registerReceiver(mMusicFragmentReceiver, intentFilter);
}
private void unregisterBroadcastReceivers() {
getActivity().unregisterReceiver(mMusicFragmentReceiver);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
Log.i(LOG_TAG, "onCreateView");
View view = inflater.inflate(R.layout.fragment_mymusic, container, false);
// Set the adapter
mRecyclerView = (RecyclerView) view.findViewById(R.id.recyclerView);
if (mRecyclerView != null) {
Context context = view.getContext();
// if (mColumnCount <= 1) {
mRecyclerView.setLayoutManager(new LinearLayoutManager(context));
// } else {
// mRecyclerView.setLayoutManager(new GridLayoutManager(context, mColumnCount));
// }
// mRecyclerView.setAdapter(new MyMusicRecyclerViewAdapter(getActivity(), this, null, false));
}
mEmptyView = (TextView) view.findViewById(R.id.empty_view);
Toolbar toolbar = (Toolbar) view.findViewById(R.id.toolbar_mm);
((AppCompatActivity) getActivity()).setSupportActionBar(toolbar);
DrawerLayout drawer = (DrawerLayout) getActivity().findViewById(R.id.drawer_layout);
ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(
getActivity(), drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
//noinspection deprecation
drawer.setDrawerListener(toggle);
toggle.syncState();
mController = new CustomMediaController(getActivity(), true);
mController.setMediaPlayer(MyMusicFragment.this);
mController.setAnchorView(view.findViewById(R.id.recyclerView));
mController.setEnabled(true);
mRootRef = FirebaseDatabase.getInstance().getReference();
mUserInfoReference = mRootRef.child(USER_INFO_KEY);
mUserValueEventListener = new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
if (mOnlyFav){
// Log.i(LOG_TAG, "onDataChange: UserData updated/listener's first call");
List<Integer> newFavouriteIds = new ArrayList<>();
for (DataSnapshot i : dataSnapshot.getChildren()){
newFavouriteIds.add(Integer.valueOf(i.getKey()));
}
if (newFavouriteIds.isEmpty())
showEmptyView(R.string.my_music_no_fav_emptyview_text);
else if (mFavouriteIds == null || !(mFavouriteIds.containsAll(newFavouriteIds) && newFavouriteIds.containsAll(mFavouriteIds))) {
mFavouriteIds = newFavouriteIds;
// if (mOnlyFav || mFavouriteIds == null){
// Log.i(LOG_TAG, "onDataChange: fav actually changed, so will restart loader");
restartCursorLoader();
// }
}
else {
initCursorLoader();
}
}
}
@Override
public void onCancelled(DatabaseError databaseError) {
}
};
mAuthListener = new FirebaseAuth.AuthStateListener() {
@Override
public void onAuthStateChanged(@NonNull FirebaseAuth firebaseAuth) {
FirebaseUser user = firebaseAuth.getCurrentUser();
if (user != null) {
// User is signed in
mUserId = user.getUid();
// Log.d(LOG_TAG, "onAuthStateChanged:signed_in:" + mUserId);
mIsLoggedIn = true;
mSpecificUserDataReference = mUserInfoReference.child(mUserId);
respondToSpinnerValueChanage();
} else {
// User is signed out
// Log.d(LOG_TAG, "onAuthStateChanged:signed_out");
if (mSpecificUserDataReference != null) {
mSpecificUserDataReference.removeEventListener(mUserValueEventListener);
mSpecificUserDataReference = null;
}
mIsLoggedIn = false;
mFavouriteIds = null;
if (mOnlyFav){
showEmptyView(R.string.my_music_no_fav_emptyview_text);
}
}
}
};
FirebaseAuth.getInstance().addAuthStateListener(mAuthListener);
if (savedInstanceState != null){
// Log.i(LOG_TAG, "onCreateView: Restoring from saved state");
mSpinnerSelectedPos = savedInstanceState.getInt(SPINNER_SELECTED_KEY);
// mLastSelectedPos = savedInstanceState.getInt(SELECTED_POS_KEY);
mCurrentUri = savedInstanceState.getParcelable(CURRENT_URI_KEY);
// mIsPlaying = savedInstanceState.getBoolean(IS_PLAYING_KEY);
mFavouriteIds = savedInstanceState.getIntegerArrayList(FAV_ID_KEY);
// respondToSpinnerValueChanage();
}
if (MediaPlayerService.sCurrentClient == MY_MUSIC_FRAGMENT_CLIENT_ID) {
mLastSelectedPos = MediaPlayerService.sCurrentClientItemPos;
mIsPlaying = MediaPlayerService.sIsPlaying;
}
else {
mLastSelectedPos = -1;
mIsPlaying = false;
}
return view;
}
private void restartCursorLoader(){
if (mOnlyFav)
getActivity().getSupportLoaderManager().restartLoader(CURSOR_LOADER_ID_FAV, null, MyMusicFragment.this);
else
getActivity().getSupportLoaderManager().restartLoader(CURSOR_LOADER_ID_ALL, null, MyMusicFragment.this);
}
private void initCursorLoader(){
if (mOnlyFav)
getActivity().getSupportLoaderManager().initLoader(CURSOR_LOADER_ID_FAV, null, MyMusicFragment.this);
else
getActivity().getSupportLoaderManager().initLoader(CURSOR_LOADER_ID_ALL, null, MyMusicFragment.this);
}
private void respondToSpinnerValueChanage(){
// if (mCurrentUri != null)
// mController.show();
// sendServiceBroadcast(SERVICE_ACTION_STOP);
if (mIsLoggedIn) {
if (mOnlyFav) {
// Log.i(LOG_TAG, "respondToSpinnerValueChanage: Adding permanent listener");
if (mSpecificUserDataReference != null)
mSpecificUserDataReference.addValueEventListener(mUserValueEventListener);
else
showEmptyView(R.string.my_music_no_fav_emptyview_text);
}
else {
if (mSpecificUserDataReference != null)
mSpecificUserDataReference.removeEventListener(mUserValueEventListener);
// Log.i(LOG_TAG, "respondToSpinnerValueChanage: Adding one time listener");
// if (mSpecificUserDataReference != null)
// mSpecificUserDataReference.addListenerForSingleValueEvent(mUserValueEventListener);
initCursorLoader();
}
}
else if (! mOnlyFav){
initCursorLoader();
}
else
showEmptyView(R.string.my_music_no_fav_emptyview_text);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
Log.i(LOG_TAG, "onCreateOptionsMenu");
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.mymusic_fragment_menu, menu);
mSpinner = (Spinner) (menu.findItem(R.id.favourites_spinner_menu_item).getActionView());
ArrayAdapter<CharSequence> spinnerAdapter = ArrayAdapter.createFromResource(getActivity(),
R.array.favourites_spinner_choices,
R.layout.custom_spinner_item);
spinnerAdapter.setDropDownViewResource(R.layout.custom_spinner_dropdown_item);
if (mSpinner != null) {
mSpinner.setAdapter(spinnerAdapter);
mSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> adapterView, View view, int position, long id) {
// if (mSpinnerSelectedPos != position) {
// Log.i(LOG_TAG, "onItemSelected: spinner was selected with new pos " + position);
mSpinnerSelectedPos = position;
mOnlyFav = (position == 1);
// Log.i(LOG_TAG, "onItemSelected: removing listener for User1Ref");
respondToSpinnerValueChanage();
// getActivity().getSupportLoaderManager().restartLoader(CURSOR_LOADER_ID_ALL, null, MyMusicFragment.this);
// }
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
}
});
}
else
Log.e(LOG_TAG, "Didn't get the spinner");
if (mSpinnerSelectedPos != -1){
mSpinner.setSelection(mSpinnerSelectedPos);
}
}
private void showEmptyView(int message) {
if (mRecyclerView != null) {
mRecyclerViewAdapter = new MyMusicRecyclerViewAdapter(getActivity(), this, null, false);
mRecyclerView.setAdapter(mRecyclerViewAdapter);
mEmptyView.setText(message);
mEmptyView.setVisibility(View.VISIBLE);
}
}
private void showEmptyView(){
// TODO set empty view visible, and remove the below
showEmptyView(R.string.my_music_emptyview_text);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
Log.i(LOG_TAG, "onCreateLoader");
switch (id) {
case CURSOR_LOADER_ID_FAV:
if (mFavouriteIds == null || mFavouriteIds.isEmpty()){
Log.e(LOG_TAG, "onCreateLoader: Bad call to loader create!!!!!!!!!!!!!!!!");
return null;
}
// Log.w(LOG_TAG, "onCreateLoader: " + mFavouriteIds.toString());
String whereStr = " _ID in (" + TextUtils.join(", ", Arrays.toString(mFavouriteIds.toArray()).split("[\\[\\]]")[1].split(", ")) + ")";
// Log.w(LOG_TAG, "onCreateLoader: " + whereStr);
return new CursorLoader(getActivity(), android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, null, whereStr + " AND " + MediaStore.Audio.Media.IS_MUSIC + " <> 0 ", null, null);
case CURSOR_LOADER_ID_ALL:
return new CursorLoader(getActivity(), android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, null, MediaStore.Audio.Media.IS_MUSIC + " <> 0 ", null, null);
default:
return null;
}
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
Log.i(LOG_TAG, "onLoadFinished: got cursor of size " + data.getCount());
if (mRecyclerView != null) {
if (!data.moveToFirst()){
if (mOnlyFav)
showEmptyView(R.string.my_music_no_fav_emptyview_text);
else
showEmptyView();
return;
}
mRecyclerViewAdapter = new MyMusicRecyclerViewAdapter(getActivity(), this, data, mOnlyFav);
mEmptyView.setVisibility(View.GONE);
mRecyclerView.setAdapter(mRecyclerViewAdapter);
if (mIsPlaying && mLastSelectedPos != -1){
playbackStarted(mLastSelectedPos);
}
}
else {
Log.w(LOG_TAG, "onLoadFinished: not setting adapter as recycler view was not set");
showEmptyView();
}
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
}
private void sendServiceBroadcast(String action){
sendServiceBroadcast(action, -1);
}
private void sendServiceBroadcast(String action, int selectionPosition){
sendServiceBroadcast(action, selectionPosition, -1);
}
private void sendServiceBroadcast(String action, int selectionPosition, int resumePosition) {
Log.i(LOG_TAG, "sendServiceBroadcast: " + action);
Intent intent = new Intent();
// Intent intent = new Intent(getActivity(), MediaPlayerService.class.getCanonicalName());
intent.setAction(action);
intent.putExtra(BROADCAST_CLIENT_ID_KEY, MY_MUSIC_FRAGMENT_CLIENT_ID);
intent.putExtra(BROADCAST_CLIENT_ITEM_POS_KEY, selectionPosition);
intent.putExtra(BROADCAST_MEDIA_URI_KEY, mCurrentUri);
intent.putExtra(BROADCAST_SEEK_POSITION_KEY, resumePosition);
getActivity().sendBroadcast(intent);
}
@SuppressWarnings("all")
@Override
public void onListFragmentInteraction(Uri mediaUri, boolean toStart, int position) {
mLastSelectedPos = position;
mIsPlaying = toStart;
boolean sameAsCurrent;
if (MediaPlayerService.sCurrentClient == MY_MUSIC_FRAGMENT_CLIENT_ID) {
sameAsCurrent = mediaUri.equals(mCurrentUri);
}
else
sameAsCurrent = false;
if (!sameAsCurrent && !toStart ) {
// Stopping new track
Log.e(LOG_TAG, "onListFragmentInteraction: IMPOSSIBLE!!!");
}else if (!sameAsCurrent && toStart){
// Starting new track
// mLastPlayedUri = mCurrentUri;
startServiceIfDown();
mCurrentUri = mediaUri;
sendServiceBroadcast(SERVICE_ACTION_START_PLAY, position);
}
else if (sameAsCurrent && ! toStart){
// Stopping current track
sendServiceBroadcast(SERVICE_ACTION_PAUSE);
// mController.show();
}
else if (sameAsCurrent && toStart){
// Starting current track
sendServiceBroadcast(SERVICE_ACTION_RESUME);
// mController.show();
}
// if (toStart){
// if (mediaUri.equals(mLastPlayedUri)) {
// sendServiceBroadcast(SERVICE_ACTION_RESUME, position);
// }
// else {
// mLastPlayedUri = mCurrentUri;
// mCurrentUri = mediaUri;
// sendServiceBroadcast(SERVICE_ACTION_START_PLAY, position);
// }
// }
// else {
// mLastPlayedUri = mediaUri;
// sendServiceBroadcast(SERVICE_ACTION_PAUSE, position);
// }
// String action = toStart? "Start" : "Stop";
// Toast.makeText(getActivity(), mediaUri + " is to be " + action + "ed", Toast.LENGTH_SHORT).show();
}
private void playbackStopped() {
// Log.i(LOG_TAG, "playbackStopped");
mRecyclerViewAdapter.deselectCurrentItem();
mIsPlaying = false;
}
private void playbackStarted(int pos) {
// Log.i(LOG_TAG, "playbackStarted: at " + pos);
mRecyclerViewAdapter.selectItem(pos);
mIsPlaying = true;
Uri newUri = mRecyclerViewAdapter.getUriAtPos(pos);
if (newUri != null && !newUri.equals(mCurrentUri))
mCurrentUri = newUri;
}
@Override
public void start() {
if (mServiceMediaPlayer != null)
mServiceMediaPlayer.start();
playbackStarted(mLastSelectedPos);
sendServiceBroadcast(SERVICE_ACTION_RESUME);
}
@Override
public void pause() {
if (mServiceMediaPlayer != null)
mServiceMediaPlayer.pause();
playbackStopped();
// int newPos = mServiceMediaPlayer.getCurrentPosition();
sendServiceBroadcast(SERVICE_ACTION_PAUSE);
}
@Override
public int getDuration() {
if (mServiceMediaPlayer != null)
return mServiceMediaPlayer.getDuration();
return 0;
}
@Override
public int getCurrentPosition() {
if (mServiceMediaPlayer != null)
return mServiceMediaPlayer.getCurrentPosition();
return 0;
}
@Override
public void seekTo(int pos) {
if (mServiceMediaPlayer != null)
mServiceMediaPlayer.seekTo(pos);
sendServiceBroadcast(SERVICE_ACTION_SEEK, -1, pos);
}
@Override
public boolean isPlaying() {
return mServiceMediaPlayer != null && mServiceMediaPlayer.isPlaying();
}
@Override
public int getBufferPercentage() {
return 0;
}
@Override
public boolean canPause() {
return true;
}
@Override
public boolean canSeekBackward() {
return true;
}
@Override
public boolean canSeekForward() {
return true;
}
@Override
public int getAudioSessionId() {
if (mServiceMediaPlayer != null)
return mServiceMediaPlayer.getAudioSessionId();
return -1;
}
public class MyMusicFragmentReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
Bundle extras = intent.getExtras();
if (extras.getInt(BROADCAST_CLIENT_ID_KEY, -1) != MY_MUSIC_FRAGMENT_CLIENT_ID)
return;
Log.i(LOG_TAG, "onReceive: " + intent.getAction());
if (intent.getAction().equals(ACTION_PLAY)){
int itemPosToSelect = extras.getInt(BROADCAST_RESUMED_ITEM_POS_KEY);
// Log.i(LOG_TAG, "onReceive: Got item to select as " + itemPosToSelect);
playbackStarted(itemPosToSelect);
refreshOrBind();
// mRecyclerView.getChildAt(itemPosToSelect)
}
else if (intent.getAction().equals(ACTION_PAUSE)){
playbackStopped();
refreshOrBind();
}
else if (intent.getAction().equals(ACTION_STOP)){
stopPlayAndUnbind();
}
else if (intent.getAction().equals(ACTION_REDRAW)){
refreshOrBind();
}
}
private void refreshOrBind(){
if (mServiceBound) {
mController.show();
if (MediaPlayerService.sCurrentClient == MY_MUSIC_FRAGMENT_CLIENT_ID && mLastSelectedPos != MediaPlayerService.sCurrentClientItemPos) {
mLastSelectedPos = MediaPlayerService.sCurrentClientItemPos;
if (mIsPlaying)
playbackStopped();
// if (mIsPlaying != MediaPlayerService.sIsPlaying)
mIsPlaying = MediaPlayerService.sIsPlaying;
if (mIsPlaying)
playbackStarted(mLastSelectedPos);
}
}
else
getActivity().bindService(new Intent(getActivity(), MediaPlayerService.class), mServiceConn, Context.BIND_AUTO_CREATE);
}
}
}
|
import * as React from 'react';
import * as renderer from 'react-test-renderer';
import LoadBookPanel, {Props} from '../../src/renderer/components/LoadBookPanel';
import {createAppState, createBookState} from "../utils/testUtils";
describe('<LoadBookPanel/>', () => {
it('renders correctly', () => {
const tree = renderer
.create(createLoadBookPanel())
.toJSON();
expect(tree).toMatchSnapshot();
});
// TODO: investigate, this test is broken even after updating snapshot
xit('renders when isLoadingBook', () => {
const tree = renderer
.create(createLoadBookPanel({book: createBookState({isLoadingBook: true})}))
.toJSON();
expect(tree).toMatchSnapshot();
});
// TODO: investigate, this test is broken even after updating snapshot
xit('renders with only fileName available', () => {
const tree = renderer
.create(createLoadBookPanel({book: createBookState({fileName: "someFileName"})}))
.toJSON();
expect(tree).toMatchSnapshot();
});
// TODO: investigate, this test is broken even after updating snapshot
xit('renders with only book metadata available', () => {
const tree = renderer
.create(createLoadBookPanel({
book: createBookState({
bookWithMeta: {
metadata: {creator: "creator", title: "title"},
chapters: {}
}
})
}))
.toJSON();
expect(tree).toMatchSnapshot();
});
// TODO: investigate, this test is broken even after updating snapshot
xit('renders with both fileName and book metadata available', () => {
const tree = renderer
.create(createLoadBookPanel({
book: createBookState({
bookWithMeta: {
metadata: {creator: "creator", title: "title"},
chapters: {}
}, fileName: "someFileName"
})
}))
.toJSON();
expect(tree).toMatchSnapshot();
});
function createLoadBookPanel(overrides: Partial<Props> = {}) {
const props: Props = {
setFileName: jest.fn(),
setBookContent: jest.fn(),
notifyLoadingBook: jest.fn(),
setDrawerOpen: jest.fn(),
book: createBookState(),
app: createAppState(),
...overrides
};
return <LoadBookPanel {...props}/>;
}
});
|
SELECT *
FROM users
WHERE last_name LIKE 'B%'
AND age BETWEEN 18 AND 25;
|
class Table:
def __init__(self, columns, rows):
self.columns = columns
self.rows = rows
def get_column_values(self, column_name):
column_idx = None
for idx, column in enumerate(self.columns):
if column == column_name:
column_idx = idx
break
if column_idx is None:
raise KeyError("Column '{}' not found".format(column_name))
for row in self.rows:
yield row[column_name]
# Example usage
columns = ['Name', 'Age', 'City']
rows = [
{'Name': 'Alice', 'Age': 25, 'City': 'New York'},
{'Name': 'Bob', 'Age': 30, 'City': 'San Francisco'},
{'Name': 'Charlie', 'Age': 28, 'City': 'Chicago'}
]
table = Table(columns, rows)
for name in table.get_column_values('Name'):
print(name)
|
#!/bin/sh
aclocal && \
autoheader && \
autoconf && \
automake --foreign --force-missing --add-missing --copy
rm -rf autom4te.cache
|
const MongoClient = require('mongodb').MongoClient;
const uri = "<MongoDB connection string>";
const client = new MongoClient(uri, { useNewUrlParser: true });
client.connect(err => {
const collection = client.db("test").collection("devices");
collection.find({}).toArray(function(err, result) {
if (err) throw err;
console.log(result);
client.close();
});
});
|
ls testing/test.norg | entr -r -c ghcid -T "test"
|
#!/bin/bash
clear_gql_file_path="/api-data/00-clear-api-data.gql"
populate_general_gql_file_path="/api-data/01-populate-api-data-general.gql"
populate_openshift_gql_file_path="/api-data/02-populate-api-data-openshift.gql"
populate_kubernetes_gql_file_path="/api-data/03-populate-api-data-kubernetes.gql"
populate_controller_gql_file_path="/api-data/04-populate-api-data-controller.gql"
populate_controller_os_gql_file_path="/api-data/05-populate-api-data-controller-os.gql"
send_graphql_query() {
local file_path=${1}
API_ADMIN_JWT_TOKEN=$(/home/create_jwt.py)
bearer="Authorization: bearer $API_ADMIN_JWT_TOKEN"
# GraphQL query on single line with \\n for newlines and escaped quotes
data=$(cat $file_path | sed 's/"/\\"/g' | sed 's/\\n/\\\\n/g' | awk -F'\n' '{if(NR == 1) {printf $0} else {printf "\\n"$0}}')
# Create a correct json string
json="{\"query\": \"$data\"}"
wget --header "Content-Type: application/json" --header "$bearer" api:3000/graphql --post-data "$json" --content-on-error -O -
}
watch_apidatafolder() {
chsum_clear_prev=""
chsum_populate_general_prev=""
chsum_populate_openshift_prev=""
chsum_populate_kubernetes_prev=""
chsum_populate_controller_prev=""
chsum_populate_controller_os_prev=""
while [[ true ]]
do
chsum_clear_curr=`md5sum $clear_gql_file_path`
chsum_populate_general_curr=`md5sum $populate_general_gql_file_path`
chsum_populate_openshift_curr=`md5sum $populate_openshift_gql_file_path`
chsum_populate_kubernetes_curr=`md5sum $populate_kubernetes_gql_file_path`
chsum_populate_controller_curr=`md5sum $populate_controller_gql_file_path`
chsum_populate_controller_os_curr=`md5sum $populate_controller_os_gql_file_path`
if
[[ $chsum_clear_prev != $chsum_clear_curr ]] ||
[[ $chsum_populate_general_prev != $chsum_populate_general_curr ]] ||
[[ $chsum_populate_openshift_prev != $chsum_populate_openshift_curr ]] ||
[[ $chsum_populate_kubernetes_prev != $chsum_populate_kubernetes_curr ]] ||
[[ $chsum_populate_controller_prev != $chsum_populate_controller_curr ]] ||
[[ $chsum_populate_controller_os_prev != $chsum_populate_controller_os_curr ]];
then
echo "******* Found changes in gql files in /api-data/, clearing and re-populating"
if
send_graphql_query $clear_gql_file_path;
then
chsum_clear_prev=$chsum_clear_curr
else
echo '**** ERROR while clearing, will try again.'
fi
if
send_graphql_query $populate_general_gql_file_path;
then
chsum_populate_general_prev=$chsum_populate_general_curr
else
echo "**** ERROR while re-populating $populate_general_gql_file_path, will try again."
fi
if
send_graphql_query $populate_openshift_gql_file_path;
then
chsum_populate_openshift_prev=$chsum_populate_openshift_curr
else
echo "**** ERROR while re-populating $populate_openshift_gql_file_path, will try again."
fi
if
send_graphql_query $populate_kubernetes_gql_file_path;
then
chsum_populate_kubernetes_prev=$chsum_populate_kubernetes_curr
else
echo "**** ERROR while re-populating $populate_kubernetes_gql_file_path, will try again."
fi
if
send_graphql_query $populate_controller_gql_file_path;
then
chsum_populate_controller_prev=$chsum_populate_controller_curr
else
echo "**** ERROR while re-populating $populate_controller_gql_file_path, will try again."
fi
if
send_graphql_query $populate_controller_os_gql_file_path;
then
chsum_populate_controller_os_prev=$chsum_populate_controller_os_curr
else
echo "**** ERROR while re-populating $populate_controller_os_gql_file_path, will try again."
fi
fi
sleep 2
done
}
watch_apidatafolder
|
HPARAMS_STR+="z_entropy_loss=true,"
|
<filename>metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilderTest.java
package com.linkedin.metadata.search.elasticsearch.indexbuilder;
import com.google.common.collect.ImmutableMap;
import com.linkedin.metadata.TestEntitySpecBuilder;
import java.util.Map;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
public class MappingsBuilderTest {
@Test
public void testMappingsBuilder() {
Map<String, Object> result = MappingsBuilder.getMappings(TestEntitySpecBuilder.getSpec());
assertEquals(result.size(), 1);
Map<String, Object> properties = (Map<String, Object>) result.get("properties");
assertEquals(properties.size(), 14);
assertEquals(properties.get("urn"), ImmutableMap.of("type", "keyword"));
assertTrue(properties.containsKey("browsePaths"));
// KEYWORD
Map<String, Object> keyPart3Field = (Map<String, Object>) properties.get("keyPart3");
assertEquals(keyPart3Field.get("type"), "keyword");
assertEquals(keyPart3Field.get("normalizer"), "keyword_normalizer");
Map<String, Object> keyPart3FieldSubfields = (Map<String, Object>) keyPart3Field.get("fields");
assertEquals(keyPart3FieldSubfields.size(), 1);
assertTrue(keyPart3FieldSubfields.containsKey("keyword"));
Map<String, Object> customPropertiesField = (Map<String, Object>) properties.get("customProperties");
assertEquals(customPropertiesField.get("type"), "keyword");
assertEquals(customPropertiesField.get("normalizer"), "keyword_normalizer");
Map<String, Object> customPropertiesFieldSubfields = (Map<String, Object>) customPropertiesField.get("fields");
assertEquals(customPropertiesFieldSubfields.size(), 1);
assertTrue(customPropertiesFieldSubfields.containsKey("keyword"));
// TEXT
Map<String, Object> nestedArrayStringField = (Map<String, Object>) properties.get("nestedArrayStringField");
assertEquals(nestedArrayStringField.get("type"), "keyword");
assertEquals(nestedArrayStringField.get("normalizer"), "keyword_normalizer");
Map<String, Object> nestedArrayStringFieldSubfields = (Map<String, Object>) nestedArrayStringField.get("fields");
assertEquals(nestedArrayStringFieldSubfields.size(), 2);
assertTrue(nestedArrayStringFieldSubfields.containsKey("delimited"));
assertTrue(nestedArrayStringFieldSubfields.containsKey("keyword"));
Map<String, Object> nestedArrayArrayField = (Map<String, Object>) properties.get("nestedArrayArrayField");
assertEquals(nestedArrayArrayField.get("type"), "keyword");
assertEquals(nestedArrayArrayField.get("normalizer"), "keyword_normalizer");
Map<String, Object> nestedArrayArrayFieldSubfields = (Map<String, Object>) nestedArrayArrayField.get("fields");
assertEquals(nestedArrayArrayFieldSubfields.size(), 2);
assertTrue(nestedArrayArrayFieldSubfields.containsKey("delimited"));
assertTrue(nestedArrayArrayFieldSubfields.containsKey("keyword"));
// TEXT with addToFilters
Map<String, Object> textField = (Map<String, Object>) properties.get("textFieldOverride");
assertEquals(textField.get("type"), "keyword");
assertEquals(textField.get("normalizer"), "keyword_normalizer");
Map<String, Object> textFieldSubfields = (Map<String, Object>) textField.get("fields");
assertEquals(textFieldSubfields.size(), 2);
assertTrue(textFieldSubfields.containsKey("delimited"));
assertTrue(textFieldSubfields.containsKey("keyword"));
// TEXT_PARTIAL
Map<String, Object> textArrayField = (Map<String, Object>) properties.get("textArrayField");
assertEquals(textArrayField.get("type"), "keyword");
assertEquals(textArrayField.get("normalizer"), "keyword_normalizer");
Map<String, Object> textArrayFieldSubfields = (Map<String, Object>) textArrayField.get("fields");
assertEquals(textArrayFieldSubfields.size(), 3);
assertTrue(textArrayFieldSubfields.containsKey("delimited"));
assertTrue(textArrayFieldSubfields.containsKey("ngram"));
assertTrue(textArrayFieldSubfields.containsKey("keyword"));
// URN
Map<String, Object> foreignKey = (Map<String, Object>) properties.get("foreignKey");
assertEquals(foreignKey.get("type"), "text");
assertEquals(foreignKey.get("analyzer"), "urn_component");
Map<String, Object> foreignKeySubfields = (Map<String, Object>) foreignKey.get("fields");
assertEquals(foreignKeySubfields.size(), 1);
assertTrue(foreignKeySubfields.containsKey("keyword"));
// URN_PARTIAL
Map<String, Object> nestedForeignKey = (Map<String, Object>) properties.get("nestedForeignKey");
assertEquals(nestedForeignKey.get("type"), "text");
assertEquals(nestedForeignKey.get("analyzer"), "urn_component");
Map<String, Object> nestedForeignKeySubfields = (Map<String, Object>) nestedForeignKey.get("fields");
assertEquals(nestedForeignKeySubfields.size(), 2);
assertTrue(nestedForeignKeySubfields.containsKey("keyword"));
assertTrue(nestedForeignKeySubfields.containsKey("ngram"));
// Scores
Map<String, Object> feature1 = (Map<String, Object>) properties.get("feature1");
assertEquals(feature1.get("type"), "double");
Map<String, Object> feature2 = (Map<String, Object>) properties.get("feature2");
assertEquals(feature2.get("type"), "double");
}
}
|
import styled from "styled-components";
import type { ContainerProps } from "./Container.types";
const ContainerMarkup: React.FC<ContainerProps> = (props) => (
<div className={`overflow-hidden mx-auto wpmax-100 ${props.className}`}>
{props.children}
</div>
);
const Container: React.FC<ContainerProps> = styled(ContainerMarkup)`
width: ${(props) => (props.isWide ? "1280px" : "960px")};
`;
export default Container;
|
<reponame>leongaban/redux-saga-exchange<gh_stars>1-10
import * as R from 'ramda';
import { IDependencies } from 'shared/types/app';
import { call, put, takeLatest, select, all, fork } from 'redux-saga/effects';
import { stopSubmit } from 'redux-form';
import { delay } from 'redux-saga';
import getErrorMsg,
{
isApiError, isPasswordError, isTokenInvalidError, isEmailAlreadyConfirmedError,
getApiError, isUserNotFoundError, getNicknameError,
} from 'shared/helpers/getErrorMsg';
import { ILoginInfo, ILoginCredentials } from 'shared/types/models';
import { sessionExpirationLimit } from 'shared/constants';
import { actions as userServiceActions } from 'services/user';
import { actions as notificationService } from 'services/notification';
import * as actions from '../actions';
import * as selectors from '../data/selectors';
import * as reduxFormEntries from '../data/reduxFormEntries';
import { DUPLICATE_EMAIL_ERROR_CODE } from '../../constants';
import * as NS from '../../namespace';
import validateSaga from './validateSagas';
const {
registrationFormEntry, loginFormEntry,
passwordRecoveryFormEntry, changePasswordFormEntry, twoFactorFormEntry,
} = reduxFormEntries;
function getSaga(deps: IDependencies) {
const loginType: NS.ILogin['type'] = 'AUTH:LOGIN';
const logoutType: NS.ILogout['type'] = 'AUTH:LOGOUT';
const registerType: NS.IRegister['type'] = 'AUTH:REGISTER';
const resetPasswordType: NS.IResetPassword['type'] = 'AUTH:RESET_PASSWORD';
const changePasswordType: NS.IChangePassword['type'] = 'AUTH:CHANGE_PASSWORD';
const startTimer: NS.IStartTimer['type'] = 'AUTH:START_TIMER';
const confirmEmailType: NS.IConfirmEmail['type'] = 'AUTH:CONFIRM_EMAIL';
const resendConfirmationEmailType: NS.IResendConfirmationEmail['type'] = 'AUTH:RESEND_CONFIRMATION_EMAIL';
const sendTwoFactorVerificationDataType: NS.ISendTwoFactorVerificationData['type'] = 'AUTH:SEND_TWO_FACTOR_DATA';
return function* saga() {
yield all([
takeLatest(loginType, executeLogin, deps),
takeLatest(logoutType, executeLogout, deps),
takeLatest(registerType, executeRegister, deps),
takeLatest(resetPasswordType, executeResetPassword, deps),
takeLatest(changePasswordType, executeChangePassword, deps),
takeLatest(startTimer, executeStartTimer, deps),
takeLatest(confirmEmailType, executeConfirmEmail, deps),
takeLatest(resendConfirmationEmailType, executeResendConfirmationEmail, deps),
takeLatest(sendTwoFactorVerificationDataType, executeSendTwoFactorVerificationData, deps),
yield fork(validateSaga, deps),
]);
};
}
function* executeLogin({ api }: IDependencies, { payload }: NS.ILogin) {
try {
const { email, password, remember, isAdminPanel } = payload;
const userCredentials: ILoginCredentials = { email, password, isPersistance: remember };
if (isAdminPanel) {
yield call(api.auth.loginToAdminPanel, userCredentials);
const userId = yield call(api.auth.restoreAdminSession);
const user = yield call(api.users.loadUserProfile, userId);
yield put(userServiceActions.adminLogin(user));
} else {
const data: ILoginInfo = yield call(api.auth.login, userCredentials);
if (data.secondFactorRequired) {
if (data.provider === 'Email') {
yield put(notificationService.setNotification({
kind: 'info',
text: 'Email with verification code has been sent',
}));
}
yield put(actions.setTwoFactorInfo({ isRequired: true, provider: data.provider }));
} else {
yield put(userServiceActions.login());
}
}
yield put(actions.loginSuccess());
} catch (error) {
const { name, fieldNames } = loginFormEntry;
if (isApiError(error)) {
const getError = getApiError(error);
if (getError(isUserNotFoundError)) {
yield put(stopSubmit(name, {
[fieldNames.email]: getError(isUserNotFoundError),
}));
} else {
yield put(stopSubmit(name, {
_error: getError(R.T),
}));
}
} else {
yield put(stopSubmit(name, {
_error: getErrorMsg(error),
}));
}
yield put(actions.loginFail(getErrorMsg(error)));
}
}
function* executeLogout({ api, sockets }: IDependencies, { payload: isAdminPanel }: NS.ILogout) {
try {
if (isAdminPanel) {
yield call(api.auth.logoutFromAdminPanel);
yield put(userServiceActions.adminLogout());
} else {
yield call(api.auth.logout);
yield put(userServiceActions.logout());
}
yield put(actions.logoutSuccess());
} catch (error) {
yield put(actions.logoutFail(getErrorMsg(error)));
}
}
function* executeConfirmEmail({ api }: IDependencies, { payload }: NS.IConfirmEmail) {
try {
yield call(api.auth.confirmEmail, payload);
yield put(actions.confirmEmailSuccess());
yield put(notificationService.setNotification({
kind: 'info',
text: 'Confirmation was successful',
}));
} catch (error) {
if (isApiError(error)) {
const getError = getApiError(error);
if (getError(isTokenInvalidError)) {
yield put(actions.setIsTokenInvalid(true));
}
yield put(actions.confirmEmailFail(getError(R.T)));
yield put(notificationService.setNotification({
kind: 'error',
text: getError(isTokenInvalidError) || getError(R.T),
}));
} else {
yield put(actions.confirmEmailFail(getErrorMsg(error)));
}
}
}
function* executeRegister({ api }: IDependencies, { payload }: NS.IRegister) {
try {
const credentials = R.omit(['queryStringForUtm'], payload);
yield call(api.auth.register, credentials, payload.queryStringForUtm, payload.captcha);
yield put(actions.registerSuccess());
} catch (error) {
const { name, fieldNames } = registrationFormEntry;
if (isApiError(error)) {
const getError = getApiError(error);
yield put(stopSubmit(name, {
[fieldNames.password]: getError(isPasswordError),
['_error']: getError((code) => code === 'duplicate_email')
? DUPLICATE_EMAIL_ERROR_CODE
: undefined,
[fieldNames.nickname]: getNicknameError(error, payload.nickname),
}));
} else {
yield put(stopSubmit(name, {
_error: getErrorMsg(error),
}));
}
yield put(actions.registerFail(getErrorMsg(error)));
}
}
function* executeChangePassword({ api }: IDependencies, { payload }: NS.IChangePassword) {
try {
yield call(api.auth.changePassword, payload);
yield put(notificationService.setNotification({
kind: 'info',
text: 'Password has been successfully changed',
}));
yield delay(1000);
yield put(actions.changePasswordSuccess());
} catch (error) {
const { name } = changePasswordFormEntry;
if (isApiError(error)) {
const getError = getApiError(error);
yield put(stopSubmit(name, {
_error: getError(R.T),
}));
} else {
yield put(stopSubmit(name, {
_error: getErrorMsg(error),
}));
}
yield put(actions.changePasswordFail(getErrorMsg(error)));
}
}
function* executeResetPassword({ api }: IDependencies, { payload }: NS.IResetPassword) {
try {
yield call(api.auth.resetPassword, payload.email);
yield put(actions.resetPasswordSuccess());
yield put(notificationService.setNotification({
kind: 'info',
text: 'Email with reset link has been sent',
}));
} catch (error) {
const { name } = passwordRecoveryFormEntry;
if (isApiError(error)) {
const getError = getApiError(error);
yield put(stopSubmit(name, {
_error: getError(R.T),
}));
} else {
yield put(stopSubmit(name, {
_error: getErrorMsg(error),
}));
}
yield put(actions.resetPasswordFail(getErrorMsg(error)));
}
}
function* executeStartTimer() {
const startDate = new Date();
while (yield select(selectors.selectIsTimerStarted)) {
const timerValue = yield select(selectors.selectTimerValue);
if (timerValue > 0) {
const currentDate = new Date();
const secondsPassedFromTimerStart = Math.round((currentDate.getTime() - startDate.getTime()) / 1000);
const remainingTime = sessionExpirationLimit - secondsPassedFromTimerStart;
yield put(actions.setTimerValue(remainingTime > 0 ? remainingTime : 0));
yield call(delay, 1000);
} else {
yield put(actions.stopTimer());
}
}
}
function* executeResendConfirmationEmail({ api }: IDependencies, { payload }: NS.IResendConfirmationEmail) {
try {
yield call(api.auth.resendConfirmationEmail, payload);
yield put(actions.resendConfirmationEmailSuccess());
yield put(notificationService.setNotification({
kind: 'info',
text: 'Email with confirmation link has been sent',
}));
} catch (error) {
if (isApiError(error)) {
const getError = getApiError(error);
if (getError(isEmailAlreadyConfirmedError)) {
yield put(notificationService.setNotification({
kind: 'error',
text: 'User email already confirmed',
}));
}
yield put(actions.resendConfirmationEmailFail(getError(R.T)));
} else {
yield put(actions.resendConfirmationEmailFail(getErrorMsg(error)));
}
}
}
function* executeSendTwoFactorVerificationData({ api }: IDependencies, { payload }: NS.ISendTwoFactorVerificationData) {
try {
yield call(api.auth.twoFactorVerify, payload);
yield put(actions.sendTwoFactorDataSuccess());
yield put(userServiceActions.login());
} catch (error) {
const { name } = twoFactorFormEntry;
const errorText = isApiError(error) ? getApiError(error)(R.T) : getErrorMsg(error);
yield put(stopSubmit(name));
yield put(actions.sendTwoFactorDataFail(errorText));
yield put(notificationService.setNotification({
kind: 'error',
text: errorText,
}));
}
}
export default getSaga;
|
<filename>open-sphere-plugins/arcgis/src/main/java/io/opensphere/arcgis2/esri/EsriPictureFillSymbol.java
package io.opensphere.arcgis2.esri;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonMethod;
import org.codehaus.jackson.annotate.JsonProperty;
/**
* <p>
* Picture fill symbols can be used to symbolize polygon geometries. The
* <code>type</code> property for simple marker symbols is <code>esriPFS</code>.
* </p>
* These symbols include the base64 encoded <code>imageData</code> as well as a
* <code>url</code> that could be used to retrieve the image from the server.
* Note that this is a relative URL. It can be dereferenced by accessing the
* "map layer image resource" or the "feature layer image resource."
*/
@JsonAutoDetect(JsonMethod.NONE)
public class EsriPictureFillSymbol extends EsriPictureMarkerSymbol
{
/** My Outline. */
@JsonProperty("outline")
private EsriSimpleLineSymbol myOutline;
/** My x scale. */
@JsonProperty("xscale")
private int myXScale;
/** My y scale. */
@JsonProperty("yscale")
private int myYScale;
/**
* Gets the outline.
*
* @return the outline
*/
public EsriSimpleLineSymbol getOutline()
{
return myOutline;
}
/**
* Gets the x scale.
*
* @return the x scale
*/
public int getXScale()
{
return myXScale;
}
/**
* Gets the y scale.
*
* @return the y scale
*/
public int getYScale()
{
return myYScale;
}
/**
* Sets the outline.
*
* @param outline the new outline
*/
public void setOutline(EsriSimpleLineSymbol outline)
{
myOutline = outline;
}
/**
* Sets the x scale.
*
* @param xScale the new x scale
*/
public void setXScale(int xScale)
{
myXScale = xScale;
}
/**
* Sets the y scale.
*
* @param yScale the new y scale
*/
public void setYScale(int yScale)
{
myYScale = yScale;
}
}
|
<reponame>nabeelkhan/Oracle-DBA-Life<filename>Scripts_extra/index_partitioned.sql
set echo off
set feedback off
set linesize 512
prompt
prompt All Partitioned Indexes in Database
prompt
break on INDEX_OWNER skip 1 on TABLE_NAME on INDEX_NAME
SELECT IP.INDEX_OWNER, IX.TABLE_OWNER || '.' || IX.TABLE_NAME "OWNER.TABLE", IP.INDEX_NAME, IP.PARTITION_NAME,
IP.PARTITION_POSITION, IP.BLEVEL, IP.CLUSTERING_FACTOR, IP.DISTINCT_KEYS,
IP.NUM_ROWS, IP.PCT_FREE, IP.INI_TRANS, IP.MAX_TRANS, IP.INITIAL_EXTENT,
IP.NEXT_EXTENT, IP.MIN_EXTENT, IP.MAX_EXTENT, IP.PCT_INCREASE, IP.STATUS,
IP.LEAF_BLOCKS, IP.AVG_LEAF_BLOCKS_PER_KEY, IP.AVG_DATA_BLOCKS_PER_KEY,
IP.SAMPLE_SIZE, IP.LAST_ANALYZED
FROM DBA_INDEXES IX,
DBA_IND_PARTITIONS IP
WHERE IX.OWNER NOT IN ('SYS','SYSTEM','OUTLN','DBSNMP')
AND IX.TABLE_OWNER NOT IN ('SYS','SYSTEM','OUTLN','DBSNMP')
AND IX.PARTITIONED = 'YES'
AND IP.INDEX_OWNER NOT IN ('SYS','SYSTEM','OUTLN','DBSNMP')
AND IP.INDEX_OWNER = IX.OWNER
AND IP.INDEX_NAME = IX.INDEX_NAME
ORDER BY 1, 2, 3;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.