repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
Zalexanninev15/VitNX
|
docs/search--/s_5035.js
|
<reponame>Zalexanninev15/VitNX
search_result['5035']=["topic_0000000000001310.html","DockContentDragFilter Constructor",""];
|
gremi64/RSS_DOWNLOADER
|
src/main/java/fr/rss/download/api/service/impl/TvShowServiceImpl.java
|
<filename>src/main/java/fr/rss/download/api/service/impl/TvShowServiceImpl.java
package fr.rss.download.api.service.impl;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.PostConstruct;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.ApplicationHome;
import org.springframework.stereotype.Service;
import fr.rss.download.api.constantes.LANGUE;
import fr.rss.download.api.constantes.QUALITE;
import fr.rss.download.api.model.EpisodeHebergeur;
import fr.rss.download.api.model.Hebergeur;
import fr.rss.download.api.model.zt.tvshow.TVShow;
import fr.rss.download.api.model.zt.tvshow.TVShowSeason;
import fr.rss.download.api.service.ITvShowService;
import fr.rss.download.api.utils.XMLTools;
@Service
public class TvShowServiceImpl implements ITvShowService {
private static final Logger log = LoggerFactory.getLogger(TvShowServiceImpl.class);
private static final String ZT_LINK = "https://ww2.zone-telechargement1.com";
List<TVShow> tvShowList;
String tvShowListFile;
public TvShowServiceImpl(@Value("${zt.tvshow.filename}") String fileName) {
tvShowList = new ArrayList<>();
modifierFichierDestination(fileName);
File f = new File(tvShowListFile);
if (!f.exists()) {
log.debug("Le fichier des séries '{}' n'existe pas encore => CREATION", tvShowListFile);
creerSerieFichier();
}
}
/**
* Parse une page ZT et récupère tous les liens de chaque hébergeur
*
* @param link
* @return
*/
@Override
public List<Hebergeur> parseZtTvShowLink(String link) {
List<Hebergeur> listFournisseurs = new ArrayList<>();
try {
Document document = Jsoup.connect(link)
.userAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36")
.get();
Elements divClassOnPage = document.select("div.postinfo");
Elements informations = divClassOnPage.select("b");
listFournisseurs = new ArrayList<>();
Hebergeur fournisseur = new Hebergeur();
for (Element information : informations) {
// Si on a une div dans le "b"
// alors c'est le nom du fournisseur
if (!information.select("div").isEmpty()) {
fournisseur = new Hebergeur();
listFournisseurs.add(fournisseur);
fournisseur.setName(information.text());
} else if (!information.select("a[href]").isEmpty()) {
EpisodeHebergeur episode = new EpisodeHebergeur();
episode.setName(information.text());
episode.setUrl(information.select("a[href]").attr("href").replaceAll("(\\r|\\n)", ""));
fournisseur.getEpisodes().add(episode);
}
}
log.debug(listFournisseurs.toString());
} catch (IOException e) {
throw new RuntimeException(e);
}
return listFournisseurs;
}
/**
* Initialisation du fichier si il n'existe pas
* ATTENTION : cette méthode effacera tout le contenu du fichier si il existe...
*
* @param fileName
* @return
*/
@Override
public List<TVShow> creerSerieFichier() {
try {
// TODO : ajouter ici les séries que vous souhaitez
TVShow tvShow = new TVShow();
String gameOfThrones = "Game of Thrones";
tvShow = new TVShow(gameOfThrones, QUALITE.HD720P.name(), LANGUE.VOSTFR.name());
tvShow.addTvShowSeason(
new TVShowSeason("6", ZT_LINK + "/15860-telecharger-game-of-thrones-saison-6-hd-720p-vostfr.html"));
tvShow.addTvShowSeason(
new TVShowSeason("7", ZT_LINK + "/26987-game-of-thrones-saison-7-vostfr-hd-720p-streaming.html"));
tvShowList.add(tvShow);
tvShow = new TVShow(gameOfThrones, QUALITE.HDTV.name(), LANGUE.VOSTFR.name());
tvShow.addTvShowSeason(
new TVShowSeason("6", ZT_LINK + "/15859-game-of-thrones-saison-6-vostfr-hdtv-streaming.html"));
tvShow.addTvShowSeason(
new TVShowSeason("7", ZT_LINK + "/26988-game-of-thrones-saison-7-vostfr-hdtv-streaming.html"));
tvShowList.add(tvShow);
String fearTheWalkingDead = "Fear The Walking Dead";
tvShow = new TVShow(fearTheWalkingDead, QUALITE.HDTV.name(), LANGUE.VOSTFR.name());
tvShow.addTvShowSeason(new TVShowSeason("3", ZT_LINK + "/25432-fear-the-walking-dead-saison-3-hdtv-vostfr.html"));
tvShowList.add(tvShow);
tvShow = new TVShow(fearTheWalkingDead, QUALITE.HDTV.name(), LANGUE.FRENCH.name());
tvShow.addTvShowSeason(new TVShowSeason("3", ZT_LINK + "/25619-fear-the-walking-dead-saison-3-hdtv-french.html"));
tvShowList.add(tvShow);
tvShow = new TVShow(fearTheWalkingDead, QUALITE.HD720P.name(), LANGUE.VOSTFR.name());
tvShow.addTvShowSeason(
new TVShowSeason("3", ZT_LINK + "/25431-fear-the-walking-dead-saison-3-hd-720p-vostfr.html"));
tvShowList.add(tvShow);
tvShow = new TVShow(fearTheWalkingDead, QUALITE.HD720P.name(), LANGUE.FRENCH.name());
tvShow.addTvShowSeason(
new TVShowSeason("3", ZT_LINK + "/25615-fear-the-walking-dead-saison-3-hd-720p-french.html"));
tvShowList.add(tvShow);
log.debug(tvShowList.toString());
//Encode
XMLTools.encodeToFile(tvShowList, tvShowListFile);
log.info("Fichier des series créé sans erreur : {}", tvShowListFile);
} catch (Exception e) {
e.printStackTrace();
log.error("Erreur lors de l'ajout d'une série TV dans le fichier : " + tvShowListFile, e);
}
return tvShowList;
}
/**
* Permet d'ajouter une serie avec son lien dans le fichier
*
* @param tvShowName
* @param qualite
* @param langue
* @param seasonNumber
* @param seasonLink
* @return
*/
@Override
public List<TVShow> ajouterSerieFichier(String tvShowName, QUALITE qualite, LANGUE langue, String seasonNumber, String seasonLink) {
// MAJ de this.tvShowList
recupererTvShowList();
try {
boolean foundInList = false;
TVShow newTvShow = new TVShow(tvShowName, qualite.name(), langue.name());
// Est-ce que la série existe déja dans la liste des séries ?
for (TVShow tvShow : tvShowList) {
// Si on trouve la serie (par rapport a son nom, sa qualite et sa langue)
// On quitte la boucle
if (tvShowName.replaceAll("\\s", "").equals(tvShow.getName().toUpperCase().replaceAll("\\s", ""))
&& qualite.equals(QUALITE.valueOf(tvShow.getQualite().toUpperCase()))
&& langue.equals(LANGUE.valueOf(tvShow.getLangue().toUpperCase()))) {
newTvShow = tvShow;
foundInList = true;
log.debug("Serie {} déjà présente dans la liste", tvShowName);
break;
}
}
// Si c'est déja dans la liste, on vérifie que la saison n'est pas déja présente, si oui, on la supprime (= MAJ au final)
if (foundInList) {
// Si la saison est déja présente
if (newTvShow.getTvShowSeason().stream().anyMatch(ls -> ls.getSaison().equals(seasonNumber))) {
for (TVShowSeason tvShowSeason : newTvShow.getTvShowSeason()) {
if (tvShowSeason.getSaison().equals(seasonNumber)) {
// On supprime la saison si déja existante
log.debug("Serie {} : suppression de la saison {}", tvShowName, seasonNumber);
newTvShow.getTvShowSeason().remove(tvShowSeason);
break;
}
}
}
}
// Ajout de la nouvelle saison
log.debug("Serie {} : ajout de la saison {}", tvShowName, seasonNumber);
newTvShow.addTvShowSeason(new TVShowSeason(seasonNumber, seasonLink));
// Si on ne vient pas de la liste, on l'ajoute
if (!foundInList) {
tvShowList.add(newTvShow);
}
log.debug(tvShowList.toString());
//Encode
XMLTools.encodeToFile(tvShowList, tvShowListFile);
} catch (Exception e) {
e.printStackTrace();
log.error("Erreur lors de l'ajout d'une série TV dans le fichier : " + tvShowListFile, e);
}
return tvShowList;
}
/**
* Permet de mettre à jour la liste des séries TV par rapport au fichier
*
* @return
*/
@SuppressWarnings("unchecked")
@PostConstruct
@Override
public List<TVShow> recupererTvShowList() {
try {
// Decode
tvShowList = (ArrayList<TVShow>) XMLTools.decodeFromFile(tvShowListFile);
log.debug(tvShowList.toString());
} catch (Exception e) {
e.printStackTrace();
log.error("Erreur lors de la récuperation des séries TV via le fichier : " + tvShowListFile, e);
}
return tvShowList;
}
/**
* Permet de modifier le fichier de destination (lecture/ecriture) de la serdes
*
* @return
*/
@Override
public String modifierFichierDestination(String fileName) {
ApplicationHome home = new ApplicationHome(this.getClass());
File jarDir = home.getDir();
File file = new File(jarDir, fileName);
tvShowListFile = file.toString();
return tvShowListFile;
}
public void addTvShowList(TVShow tvShow) {
if (tvShowList == null) {
tvShowList = new ArrayList<>();
}
tvShowList.add(tvShow);
}
@Override
public String toString() {
StringBuilder str = new StringBuilder();
str.append("TVShowList :").append("\n");
str.append("\t").append("TVShow = " + tvShowList);
return str.toString();
}
}
|
lucamolteni/droolsjbpm-tools
|
drools-eclipse/org.drools.eclipse/src/main/java/org/drools/eclipse/debug/actions/DeleteLogAction.java
|
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.eclipse.debug.actions;
import org.drools.eclipse.DroolsEclipsePlugin;
import org.drools.eclipse.DroolsPluginImages;
import org.drools.eclipse.debug.AuditView;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IAction;
import org.eclipse.swt.custom.BusyIndicator;
/**
* Action to clear the log.
*/
public class DeleteLogAction extends Action {
private AuditView view;
public DeleteLogAction(AuditView view) {
super(null, IAction.AS_PUSH_BUTTON);
this.view = view;
setToolTipText("Clear Log");
setImageDescriptor(DroolsPluginImages.getImageDescriptor(DroolsPluginImages.DELETE_LOG));
setDisabledImageDescriptor(DroolsPluginImages.getImageDescriptor(DroolsPluginImages.DELETE_LOG_DISABLED));
setId(DroolsEclipsePlugin.getUniqueIdentifier() + ".ClearLogAction");
}
public void run() {
if (!view.isAvailable()) {
return;
}
view.deleteLog();
BusyIndicator.showWhile(view.getViewer().getControl().getDisplay(), new Runnable() {
public void run() {
view.getViewer().refresh();
}
});
}
}
|
DavideCorradiDev/houzi-game-engine
|
source/houal/src/hou/al/al_object_handle.cpp
|
<filename>source/houal/src/hou/al/al_object_handle.cpp
// Houzi Game Engine
// Copyright (c) 2018 <NAME>
// Licensed under the MIT license.
#include "hou/al/al_object_handle.hpp"
#include "hou/al/al_missing_context_error.hpp"
#include <utility>
namespace hou
{
namespace al
{
object_handle::object_handle(ALuint name) noexcept
: non_copyable()
, m_name(name)
{}
object_handle::object_handle(object_handle&& other) noexcept
: m_name(std::move(other.m_name))
{
other.m_name = 0u;
}
object_handle::~object_handle()
{}
ALuint object_handle::get_name() const noexcept
{
return m_name;
}
device_owned_object_handle::device_owned_object_handle(ALuint name)
: object_handle(name)
, m_owning_device_uid(0u)
{
HOU_AL_CHECK_CONTEXT_EXISTENCE();
m_owning_device_uid = al::context::get_current()->get_device_uid();
}
device::uid_type device_owned_object_handle::get_owning_device_uid() const
noexcept
{
return m_owning_device_uid;
}
context_owned_object_handle::context_owned_object_handle(ALuint name)
: object_handle(name)
, m_owning_context_uid(0u)
{
HOU_AL_CHECK_CONTEXT_EXISTENCE();
m_owning_context_uid = al::context::get_current()->get_uid();
}
context::uid_type context_owned_object_handle::get_owning_context_uid() const
noexcept
{
return m_owning_context_uid;
}
} // namespace al
} // namespace hou
|
accosine/odysseus
|
packages/iliad/src/components/FrontMatter/RecipeFrontMatter.js
|
import React, { Fragment } from 'react';
import ChipInput from './ChipInput';
import FrontMatterTextfield from './FrontMatterTextfield';
export default props => (
<Fragment>
<ChipInput
id="ingredients"
onChange={ingredients => props.onChange({ ingredients })}
chipData={props.ingredients}
/>
<FrontMatterTextfield
id="instructions"
multiline
rows={2}
rowsMax={5}
{...props}
/>
<FrontMatterTextfield id="preptime" inputType="number" {...props} />
<FrontMatterTextfield id="cooktime" inputType="number" {...props} />
<FrontMatterTextfield id="recipeyield" {...props} />
<FrontMatterTextfield id="servingsize" {...props} />
<FrontMatterTextfield id="calories" inputType="number" {...props} />
<FrontMatterTextfield id="fatcontent" inputType="number" {...props} />
</Fragment>
);
|
sarang-apps/darshan_browser
|
ios/chrome/browser/signin/feature_flags.h
|
<filename>ios/chrome/browser/signin/feature_flags.h
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef IOS_CHROME_BROWSER_SIGNIN_FEATURE_FLAGS_H_
#define IOS_CHROME_BROWSER_SIGNIN_FEATURE_FLAGS_H_
#include "base/feature_list.h"
// Feature flag to enable NSURLSession for GAIAAuthFetcherIOS.
extern const base::Feature kUseNSURLSessionForGaiaSigninRequests;
#endif // IOS_CHROME_BROWSER_SIGNIN_FEATURE_FLAGS_H_
|
focusunsink/study_python
|
np/8_assure_quality_with_testing/19_to_20_unit_test.py
|
<filename>np/8_assure_quality_with_testing/19_to_20_unit_test.py
# -*- coding:utf-8 -*-
"""
Project : numpy
File Name : 19_to_20
Author : Focus
Date : 8/23/2021 9:49 AM
Keywords :
Abstract :
Param :
Usage : py 19_to_20
Reference :
"""
import numpy as np
import unittest
# import matplotlib.pyplot as plt
# import sys
def factorial(n):
if n == 0:
return 1
if n < 0:
raise(ValueError, "Unexpected negative value")
return np.arange(1, n + 1).cumprod()
class FactorialTest(unittest.TestCase):
def test_factorial(self):
self.assertEqual(6, factorial(3)[-1])
np.testing.assert_equal(np.array([1, 2, 6]), factorial(3))
def test_zero(self):
self.assertEqual(1, factorial(0))
def test_negative(self):
self.assertRaises(IndexError, factorial(-10))
unittest.main()
|
exadel-inc/etoolbox-query-kit
|
core/src/main/java/com/exadel/etoolbox/querykit/core/servlets/EditorCellServlet.java
|
<gh_stars>0
/*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.exadel.etoolbox.querykit.core.servlets;
import com.adobe.granite.ui.components.ds.DataSource;
import com.adobe.granite.ui.components.ds.EmptyDataSource;
import com.adobe.granite.ui.components.ds.SimpleDataSource;
import com.adobe.granite.ui.components.ds.ValueMapResource;
import com.exadel.etoolbox.querykit.core.utils.Constants;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ValueMap;
import org.apache.sling.api.servlets.HttpConstants;
import org.apache.sling.api.servlets.SlingSafeMethodsServlet;
import org.apache.sling.api.wrappers.ValueMapDecorator;
import org.osgi.service.component.annotations.Component;
import javax.servlet.Servlet;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
/**
* Serves requests for rendering query results cell editor UI
*/
@Component(
service = Servlet.class,
property = {
"sling.servlet.resourceTypes=/apps/etoolbox-query-kit/services/edit-cell",
"sling.servlet.methods=" + HttpConstants.METHOD_GET
}
)
public class EditorCellServlet extends SlingSafeMethodsServlet {
/**
* Processes HTTP {@code GET} requests
* @param request {@code SlingHttpServletRequest} object
* @param response {@code slingHttpServletResponse} object
* @throws IOException If the request processing failed
*/
@Override
protected void doGet(SlingHttpServletRequest request, SlingHttpServletResponse response) throws ServletException, IOException {
ResourceResolver resourceResolver = request.getResourceResolver();
String path = request.getParameter("data-path");
String property = request.getParameter("data-name");
String type = request.getParameter("data-type");
Resource resource = resourceResolver.getResource(path);
if (resource == null) {
request.setAttribute(DataSource.class.getName(), EmptyDataSource.instance());
return;
}
List<Resource> resources = new ArrayList<>();
String value = resource.getValueMap().get(property, String.class);
ValueMap vm = new ValueMapDecorator(new LinkedHashMap<>());
vm.put(Constants.PROPERTY_NAME, "./" + property);
vm.put(Constants.PROPERTY_VALUE, value);
vm.put("fieldLabel", property);
resources.add(new ValueMapResource(resourceResolver, "", type, vm));
vm = new ValueMapDecorator(new LinkedHashMap<>());
vm.put(Constants.PROPERTY_NAME, Constants.PROPERTY_PATH);
vm.put(Constants.PROPERTY_VALUE, path);
vm.put("granite:hidden", true);
resources.add(new ValueMapResource(resourceResolver, "", "granite/ui/components/coral/foundation/form/textfield", vm));
DataSource dataSource = new SimpleDataSource(resources.iterator());
request.setAttribute(DataSource.class.getName(), dataSource);
}
}
|
linminglu/Fgame
|
game/scene/event/listener/common/rank_value_changed.go
|
<reponame>linminglu/Fgame<filename>game/scene/event/listener/common/rank_value_changed.go
package common
import (
"fgame/fgame/core/event"
"fgame/fgame/game/battle/battle"
battleeventtypes "fgame/fgame/game/battle/event/types"
gameevent "fgame/fgame/game/event"
"fgame/fgame/game/scene/scene"
)
//排行数据变化
func rankValueChanged(target event.EventTarget, data event.EventData) (err error) {
p, ok := target.(scene.Player)
if !ok {
return
}
s := p.GetScene()
if s == nil {
return
}
eventData, ok := data.(*battle.BattlePlayerActivityRankDataChangedEventData)
if !ok {
return
}
rankType := eventData.GetRankType()
rankData := eventData.GetRankData()
//更新场景活动数据
s.UpdatePlayer(rankType, p.GetId(), p.GetName(), rankData.GetRankValue(rankType))
return nil
}
func init() {
gameevent.AddEventListener(battleeventtypes.EventTypeBattlePlayerActivityRankDataChanged, event.EventListenerFunc(rankValueChanged))
}
|
Andreas237/AndroidPolicyAutomation
|
ExtractedJars/Shopkick_com.shopkick.app/javafiles/androidx/work/OneTimeWorkRequest$Builder.java
|
<gh_stars>1-10
// Decompiled by Jad v1.5.8g. Copyright 2001 <NAME>.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3) annotate safe
package androidx.work;
import androidx.work.impl.model.WorkSpec;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
// Referenced classes of package androidx.work:
// OneTimeWorkRequest, OverwritingInputMerger, Constraints, WorkRequest
public static final class OneTimeWorkRequest$Builder extends WorkRequest.Builder
{
OneTimeWorkRequest buildInternal()
{
if(mBackoffCriteriaSet && android.os.Build.VERSION.SDK_INT >= 23 && mWorkSpec.constraints.requiresDeviceIdle())
//* 0 0:aload_0
//* 1 1:getfield #41 <Field boolean mBackoffCriteriaSet>
//* 2 4:ifeq 41
//* 3 7:getstatic #47 <Field int android.os.Build$VERSION.SDK_INT>
//* 4 10:bipush 23
//* 5 12:icmplt 41
//* 6 15:aload_0
//* 7 16:getfield #17 <Field WorkSpec mWorkSpec>
//* 8 19:getfield #51 <Field Constraints WorkSpec.constraints>
//* 9 22:invokevirtual #57 <Method boolean Constraints.requiresDeviceIdle()>
//* 10 25:ifne 31
//* 11 28:goto 41
throw new IllegalArgumentException("Cannot set backoff criteria on an idle mode job");
// 12 31:new #59 <Class IllegalArgumentException>
// 13 34:dup
// 14 35:ldc1 #61 <String "Cannot set backoff criteria on an idle mode job">
// 15 37:invokespecial #64 <Method void IllegalArgumentException(String)>
// 16 40:athrow
else
return new OneTimeWorkRequest(this);
// 17 41:new #7 <Class OneTimeWorkRequest>
// 18 44:dup
// 19 45:aload_0
// 20 46:invokespecial #67 <Method void OneTimeWorkRequest(OneTimeWorkRequest$Builder)>
// 21 49:areturn
}
volatile WorkRequest buildInternal()
{
return ((WorkRequest) (buildInternal()));
// 0 0:aload_0
// 1 1:invokevirtual #71 <Method OneTimeWorkRequest buildInternal()>
// 2 4:areturn
}
OneTimeWorkRequest$Builder getThis()
{
return this;
// 0 0:aload_0
// 1 1:areturn
}
volatile WorkRequest.Builder getThis()
{
return ((WorkRequest.Builder) (getThis()));
// 0 0:aload_0
// 1 1:invokevirtual #76 <Method OneTimeWorkRequest$Builder getThis()>
// 2 4:areturn
}
public OneTimeWorkRequest$Builder setInitialDelay(long l, TimeUnit timeunit)
{
mWorkSpec.initialDelay = timeunit.toMillis(l);
// 0 0:aload_0
// 1 1:getfield #17 <Field WorkSpec mWorkSpec>
// 2 4:aload_3
// 3 5:lload_1
// 4 6:invokevirtual #84 <Method long TimeUnit.toMillis(long)>
// 5 9:putfield #88 <Field long WorkSpec.initialDelay>
return this;
// 6 12:aload_0
// 7 13:areturn
}
public OneTimeWorkRequest$Builder setInitialDelay(Duration duration)
{
mWorkSpec.initialDelay = duration.toMillis();
// 0 0:aload_0
// 1 1:getfield #17 <Field WorkSpec mWorkSpec>
// 2 4:aload_1
// 3 5:invokevirtual #97 <Method long Duration.toMillis()>
// 4 8:putfield #88 <Field long WorkSpec.initialDelay>
return this;
// 5 11:aload_0
// 6 12:areturn
}
public OneTimeWorkRequest$Builder setInputMerger(Class class1)
{
mWorkSpec.inputMergerClassName = class1.getName();
// 0 0:aload_0
// 1 1:getfield #17 <Field WorkSpec mWorkSpec>
// 2 4:aload_1
// 3 5:invokevirtual #25 <Method String Class.getName()>
// 4 8:putfield #31 <Field String WorkSpec.inputMergerClassName>
return this;
// 5 11:aload_0
// 6 12:areturn
}
public OneTimeWorkRequest$Builder(Class class1)
{
super(class1);
// 0 0:aload_0
// 1 1:aload_1
// 2 2:invokespecial #13 <Method void WorkRequest$Builder(Class)>
mWorkSpec.inputMergerClassName = ((Class) (androidx/work/OverwritingInputMerger)).getName();
// 3 5:aload_0
// 4 6:getfield #17 <Field WorkSpec mWorkSpec>
// 5 9:ldc1 #19 <Class OverwritingInputMerger>
// 6 11:invokevirtual #25 <Method String Class.getName()>
// 7 14:putfield #31 <Field String WorkSpec.inputMergerClassName>
// 8 17:return
}
}
|
Rominitch/MouCaLab
|
Sources/MouCaCore/source/LoaderManager.cpp
|
#include "Dependencies.h"
#include "LibCore/include/CoreFile.h"
#include "LibRT/include/RTAnimationBones.h"
#include "LibRT/include/RTImage.h"
#include "LibRT/include/RTShaderFile.h"
#include "LibRT/include/RTMesh.h"
#include "LibMedia/include/AnimationLoader.h"
#include "LibMedia/include/ImageLoader.h"
#include "LibMedia/include/MeshLoader.h"
#include "MouCaCore/include/LoaderManager.h"
namespace MouCaCore
{
// Debug class: enable log + sync finished after timeout
//#define LOADING_DEBUG
void LoaderManager::SynchonizeData::initialize(const uint32_t countThreadReady)
{
MOUCA_PRE_CONDITION(countThreadReady < 32); //DEV Issue: we don't want more than 32 threads
std::unique_lock<std::mutex> lock(_waitSync);
_maskThread = (1 << countThreadReady) - 1;
// All is ready by default
_countThreadReady = _maskThread;
#ifdef LOADING_DEBUG
std::bitset<8> mask(_maskThread);
std::stringstream ss;
ss << "Initialize SyncData: " << countThreadReady << " with mask " << mask << std::endl;
BT_PRINT_MESSAGE(ss.str());
#endif
MOUCA_POST_CONDITION(_maskThread > 0);
}
void LoaderManager::SynchonizeData::synchronize()
{
std::unique_lock<std::mutex> lock(_waitSync);
#ifdef LOADING_DEBUG
// Wait with timeout to debug
_wait.wait_for(lock, std::chrono::milliseconds(10000),
[this]
{
std::bitset<8> mask(_countThreadReady);
std::stringstream ss;
ss << " Synchronize: " << mask << " " << ((_countThreadReady & _maskThread) == _maskThread ? "SYNC" : "WAIT") << std::endl;
BT_PRINT_MESSAGE(ss.str());
return (_countThreadReady & _maskThread) == _maskThread;
});
#else
// Wait locker
_wait.wait(lock, [this] { return (_countThreadReady & _maskThread) == _maskThread; });
#endif
}
void LoaderManager::SynchonizeData::threadWorking(const size_t idThread)
{
std::unique_lock<std::mutex> lock(_waitSync);
_countThreadReady &= ~(1 << idThread);
#ifdef LOADING_DEBUG
std::bitset<8> mask(_countThreadReady);
std::stringstream ss;
ss << " Working: " << idThread << " with mask " << mask << std::endl;
BT_PRINT_MESSAGE(ss.str());
#endif
}
void LoaderManager::SynchonizeData::threadReady(const size_t idThread)
{
{
std::unique_lock<std::mutex> lock(_waitSync);
#ifdef LOADING_DEBUG
const uint32_t countThreadReady = _countThreadReady | (1 << idThread);
if (countThreadReady != _countThreadReady)
{
std::bitset<8> mask(countThreadReady);
std::stringstream ss;
ss << " Done: " << idThread << " with mask " << mask << std::endl;
BT_PRINT_MESSAGE(ss.str());
}
#endif
_countThreadReady |= (1 << idThread);
}
_wait.notify_all();
}
void LoadingQueue::initialize(LoaderManager* manager, const size_t iD)
{
_manager = manager;
_iD = iD;
}
void LoadingQueue::release()
{
}
void LoadingQueue::demandToFinish()
{
// Demand end of loop
_run = false;
// Wake up (if case of lock !)
_waitJob.notify_all();
}
void LoadingQueue::addJob(const LoadingItems& items)
{
MOUCA_PRE_CONDITION(_run);
// Register job
{
std::unique_lock<std::mutex> locker(_jobMutex);
for( const auto& item : items )
{
_resources.push_back( item );
}
// Reorder
std::sort(_resources.begin(), _resources.end());
// Take job in direct
if(_resources.front()._state == LoadingItem::Direct)
_manager->getSynchronizeDirect().threadWorking(_iD);
// Take job in deferred
if(_resources.back()._state == LoadingItem::Deferred)
_manager->getSynchronizeDeferred().threadWorking(_iD);
}
// Signal job
_waitJob.notify_all();
}
void LoadingQueue::doAction( LoadingItem& item )
{
// Todo: read resource + prepare ?
RT::ShaderFile* file = dynamic_cast<RT::ShaderFile*>(item._resource.get());
if( file != nullptr )
{
MOUCA_ASSERT( !file->getFilename().empty() );
MOUCA_ASSERT( std::filesystem::exists( file->getFilename() ) );
file->open(L"rb");
return;
}
RT::ImageImport* image = dynamic_cast<RT::ImageImport*>(item._resource.get());
if( image != nullptr )
{
MOUCA_ASSERT( !image->getFilename().empty() );
MOUCA_ASSERT( std::filesystem::exists( image->getFilename() ) );
Media::ImageLoader loader;
image->setImage(loader.openImage(image->getFilename()));
return;
}
RT::MeshImport* importer = dynamic_cast<RT::MeshImport*>(item._resource.get());
if( importer != nullptr )
{
Media::MeshLoader loader;
loader.createMesh( *importer );
return;
}
RT::AnimationImporter* animation = dynamic_cast<RT::AnimationImporter*>(item._resource.get());
if( animation != nullptr )
{
Media::AnimationLoader loader;
loader.createAnimation(*animation);
return;
}
MOUCA_THROW_ERROR( "MouCaCore", "InvalidLoader" );
}
void LoadingQueue::run()
{
while(_run)
{
// Check resource state
if( _resources.empty() )
{
_state = Waiting;
// Signal no job !
{
std::unique_lock<std::mutex> locker( _waitJobMutex );
if( _resources.empty() )
{
_manager->getSynchronizeDirect().threadReady( _iD );
_manager->getSynchronizeDeferred().threadReady( _iD );
}
}
// Wait quasi-passive
std::unique_lock<std::mutex> locker( _waitJobMutex );
_waitJob.wait_for( locker, std::chrono::milliseconds( 240 ), [this]{ return !_resources.empty(); } );
_state = Running;
}
else
{
LoadingItem item;
// Take job
{
std::unique_lock<std::mutex> locker( _jobMutex );
item = std::move(_resources.front());
_resources.pop_front();
}
try
{
doAction( item );
}
catch(...)
{
// When resource can't be read/extract !
}
// Signal no direct job left
{
std::unique_lock<std::mutex> locker( _jobMutex );
if( !_resources.empty() && _resources.front()._state != LoadingItem::Direct )
{
_manager->getSynchronizeDirect().threadReady( _iD );
}
}
}
}
_state = Stop;
// Missing synchronize or strong shutdown !
MOUCA_POST_CONDITION(_resources.empty());
}
void LoaderManager::initialize(const uint32_t nbQueues)
{
MOUCA_PRE_CONDITION(_queues.empty()); // DEV Issue: call initialize() both time.
MOUCA_PRE_CONDITION(nbQueues > 0); // DEV Issue: Need minimum of queue !
// Allocate synchronizer
_syncDirect.initialize( nbQueues );
_syncDeferred.initialize( nbQueues );
// Create queue
_queues.resize(nbQueues);
// Launch thread
size_t id=0;
for(auto& queue : _queues)
{
queue.initialize(this, id);
queue.start();
++id;
}
// Wait all thread ready to make job
// Avoid dead lock when thread don't receive wait signal.
synchronize();
MOUCA_POST_CONDITION(!_queues.empty()); /// Operation Failed ?
}
void LoaderManager::release()
{
MOUCA_PRE_CONDITION(!_queues.empty()); // DEV Issue: call release before initialize().
// Send order
for(auto& queue : _queues)
{
queue.demandToFinish();
}
// Wait all
for(auto& queue : _queues)
{
queue.join();
queue.release();
}
// Remove all
_queues.clear();
MOUCA_POST_CONDITION(_queues.empty()); /// Operation Failed ?
}
void LoaderManager::loadResources(LoadingItems& items)
{
MOUCA_PRE_CONDITION(!_queues.empty()); // DEV Issue: Missing call initialize() !
MOUCA_PRE_CONDITION(!items.empty()); // DEV Issue: No job ?
// Sort queue to have priority job first !
std::sort(items.begin(), items.end());
// Algo 1: Job for all
// We add to each queue an item until we finish
auto itQueue = _queues.begin();
while(!items.empty())
{
LoadingItems local;
// Transfer to local
local.push_back(std::move(items.front()));
items.pop_front();
itQueue->addJob( local );
++itQueue;
// Restart thread
if(itQueue == _queues.end())
itQueue = _queues.begin();
}
// Wait "direct" resource before leave
_syncDirect.synchronize();
}
void LoaderManager::synchronize()
{
// Wait all
_syncDeferred.synchronize();
}
}
|
amazon-research/long-short-term-transformer
|
src/rekognition_online_action_detection/models/lstr.py
|
<gh_stars>10-100
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import torch
import torch.nn as nn
from . import transformer as tr
from .models import META_ARCHITECTURES as registry
from .feature_head import build_feature_head
class LSTR(nn.Module):
def __init__(self, cfg):
super(LSTR, self).__init__()
# Build long feature heads
self.long_memory_num_samples = cfg.MODEL.LSTR.LONG_MEMORY_NUM_SAMPLES
self.long_enabled = self.long_memory_num_samples > 0
if self.long_enabled:
self.feature_head_long = build_feature_head(cfg)
# Build work feature head
self.work_memory_num_samples = cfg.MODEL.LSTR.WORK_MEMORY_NUM_SAMPLES
self.work_enabled = self.work_memory_num_samples > 0
if self.work_enabled:
self.feature_head_work = build_feature_head(cfg)
self.d_model = self.feature_head_work.d_model
self.num_heads = cfg.MODEL.LSTR.NUM_HEADS
self.dim_feedforward = cfg.MODEL.LSTR.DIM_FEEDFORWARD
self.dropout = cfg.MODEL.LSTR.DROPOUT
self.activation = cfg.MODEL.LSTR.ACTIVATION
self.num_classes = cfg.DATA.NUM_CLASSES
# Build position encoding
self.pos_encoding = tr.PositionalEncoding(self.d_model, self.dropout)
# Build LSTR encoder
if self.long_enabled:
self.enc_queries = nn.ModuleList()
self.enc_modules = nn.ModuleList()
for param in cfg.MODEL.LSTR.ENC_MODULE:
if param[0] != -1:
self.enc_queries.append(nn.Embedding(param[0], self.d_model))
enc_layer = tr.TransformerDecoderLayer(
self.d_model, self.num_heads, self.dim_feedforward,
self.dropout, self.activation)
self.enc_modules.append(tr.TransformerDecoder(
enc_layer, param[1], tr.layer_norm(self.d_model, param[2])))
else:
self.enc_queries.append(None)
enc_layer = tr.TransformerEncoderLayer(
self.d_model, self.num_heads, self.dim_feedforward,
self.dropout, self.activation)
self.enc_modules.append(tr.TransformerEncoder(
enc_layer, param[1], tr.layer_norm(self.d_model, param[2])))
else:
self.register_parameter('enc_queries', None)
self.register_parameter('enc_modules', None)
# Build LSTR decoder
if self.long_enabled:
param = cfg.MODEL.LSTR.DEC_MODULE
dec_layer = tr.TransformerDecoderLayer(
self.d_model, self.num_heads, self.dim_feedforward,
self.dropout, self.activation)
self.dec_modules = tr.TransformerDecoder(
dec_layer, param[1], tr.layer_norm(self.d_model, param[2]))
else:
param = cfg.MODEL.LSTR.DEC_MODULE
dec_layer = tr.TransformerEncoderLayer(
self.d_model, self.num_heads, self.dim_feedforward,
self.dropout, self.activation)
self.dec_modules = tr.TransformerEncoder(
dec_layer, param[1], tr.layer_norm(self.d_model, param[2]))
# Build classifier
self.classifier = nn.Linear(self.d_model, self.num_classes)
def forward(self, visual_inputs, motion_inputs, memory_key_padding_mask=None):
if self.long_enabled:
# Compute long memories
long_memories = self.pos_encoding(self.feature_head_long(
visual_inputs[:, :self.long_memory_num_samples],
motion_inputs[:, :self.long_memory_num_samples],
).transpose(0, 1))
if len(self.enc_modules) > 0:
enc_queries = [
enc_query.weight.unsqueeze(1).repeat(1, long_memories.shape[1], 1)
if enc_query is not None else None
for enc_query in self.enc_queries
]
# Encode long memories
if enc_queries[0] is not None:
long_memories = self.enc_modules[0](enc_queries[0], long_memories,
memory_key_padding_mask=memory_key_padding_mask)
else:
long_memories = self.enc_modules[0](long_memories)
for enc_query, enc_module in zip(enc_queries[1:], self.enc_modules[1:]):
if enc_query is not None:
long_memories = enc_module(enc_query, long_memories)
else:
long_memories = enc_module(long_memories)
# Concatenate memories
if self.long_enabled:
memory = long_memories
if self.work_enabled:
# Compute work memories
work_memories = self.pos_encoding(self.feature_head_work(
visual_inputs[:, self.long_memory_num_samples:],
motion_inputs[:, self.long_memory_num_samples:],
).transpose(0, 1), padding=self.long_memory_num_samples)
# Build mask
mask = tr.generate_square_subsequent_mask(
work_memories.shape[0])
mask = mask.to(work_memories.device)
# Compute output
if self.long_enabled:
output = self.dec_modules(
work_memories,
memory=memory,
tgt_mask=mask,
)
else:
output = self.dec_modules(
work_memories,
src_mask=mask,
)
# Compute classification score
score = self.classifier(output)
return score.transpose(0, 1)
@registry.register('LSTR')
class LSTRStream(LSTR):
def __init__(self, cfg):
super(LSTRStream, self).__init__(cfg)
############################
# Cache for stream inference
############################
self.long_memories_cache = None
self.compressed_long_memories_cache = None
def stream_inference(self,
long_visual_inputs,
long_motion_inputs,
work_visual_inputs,
work_motion_inputs,
memory_key_padding_mask=None):
assert self.long_enabled, 'Long-term memory cannot be empty for stream inference'
assert len(self.enc_modules) > 0, 'LSTR encoder cannot be disabled for stream inference'
if (long_visual_inputs is not None) and (long_motion_inputs is not None):
# Compute long memories
long_memories = self.feature_head_long(
long_visual_inputs,
long_motion_inputs,
).transpose(0, 1)
if self.long_memories_cache is None:
self.long_memories_cache = long_memories
else:
self.long_memories_cache = torch.cat((
self.long_memories_cache[1:], long_memories
))
long_memories = self.long_memories_cache
pos = self.pos_encoding.pe[:self.long_memory_num_samples, :]
enc_queries = [
enc_query.weight.unsqueeze(1).repeat(1, long_memories.shape[1], 1)
if enc_query is not None else None
for enc_query in self.enc_queries
]
# Encode long memories
long_memories = self.enc_modules[0].stream_inference(enc_queries[0], long_memories, pos,
memory_key_padding_mask=memory_key_padding_mask)
self.compressed_long_memories_cache = long_memories
for enc_query, enc_module in zip(enc_queries[1:], self.enc_modules[1:]):
if enc_query is not None:
long_memories = enc_module(enc_query, long_memories)
else:
long_memories = enc_module(long_memories)
else:
long_memories = self.compressed_long_memories_cache
enc_queries = [
enc_query.weight.unsqueeze(1).repeat(1, long_memories.shape[1], 1)
if enc_query is not None else None
for enc_query in self.enc_queries
]
# Encode long memories
for enc_query, enc_module in zip(enc_queries[1:], self.enc_modules[1:]):
if enc_query is not None:
long_memories = enc_module(enc_query, long_memories)
else:
long_memories = enc_module(long_memories)
# Concatenate memories
if self.long_enabled:
memory = long_memories
if self.work_enabled:
# Compute work memories
work_memories = self.pos_encoding(self.feature_head_work(
work_visual_inputs,
work_motion_inputs,
).transpose(0, 1), padding=self.long_memory_num_samples)
# Build mask
mask = tr.generate_square_subsequent_mask(
work_memories.shape[0])
mask = mask.to(work_memories.device)
# Compute output
if self.long_enabled:
output = self.dec_modules(
work_memories,
memory=memory,
tgt_mask=mask,
)
else:
output = self.dec_modules(
work_memories,
src_mask=mask,
)
# Compute classification score
score = self.classifier(output)
return score.transpose(0, 1)
|
ctggroup/BayesMap
|
src/options.hpp
|
#ifndef options_hpp
#define options_hpp
#include <iostream>
#include <sstream>
#include <fstream>
#include <stdio.h>
#include <cstring>
#include <string>
#include <limits.h>
#include <boost/format.hpp>
#include "gadgets.hpp"
#include <Eigen/Eigen>
#include "common.h"
using namespace std;
using namespace boost;
const unsigned Megabase = 1e6;
class Options {
public:
static MatrixXd parseVarianceComponents(const std::string &arg);
unsigned chainLength;
unsigned burnin;
unsigned seed;
unsigned numThread = 0; // Default to tbb::flow::unlimited
int numThreadSpawned = 0; // Default to 0, let TBB do its thing
size_t decompressionNodeConcurrency = 0;
size_t decompressionTokens = 40;
size_t analysisNodeConcurrency = 0;
size_t analysisTokens = 20;
unsigned preprocessChunks = 1;
unsigned thin; // save every this th sampled value in MCMC
Eigen::MatrixXd S; //variance components
unsigned int numGroups;
string groupFile;
string failureFile;
string bayesW_version;
string quad_points;
string fixedFile;
unsigned int fixedEffectNumber;
string title;
AnalysisType analysisType = AnalysisType::Unknown;
string phenotypeFile;
string dataFile;
InputType inputType = InputType::Unknown;
string mcmcSampleFile;
string optionFile;
bool compress = false;
PreprocessDataType preprocessDataType = PreprocessDataType::Dense;
string iterLogFile;
bool iterLog = false;
string colLogFile;
bool colLog =false;
bool useMarkerCache = false;
double v0E = 0.0001;
double s02E = 0.0001;
double v0G = 0.0001;
double s02G = 0.0001;
Options(){
chainLength = 10000;
burnin = 5000;
seed = static_cast<unsigned int>(std::time(0));
numThread = 0;
numThreadSpawned = 0;
decompressionNodeConcurrency = 0;
decompressionTokens = 40;
analysisNodeConcurrency = 0;
analysisTokens = 20;
preprocessChunks = 1;
thin = 5;
S.resize(1, 3);
S(0,0) = 0.01;
S(0,1) = 0.001;
S(0,2) = 0.0001;
title = "brr";
analysisType = AnalysisType::Unknown;
dataFile = "";
inputType = InputType::Unknown;
fixedFile = "";
phenotypeFile = "";
mcmcSampleFile = "bayesOutput.csv";
optionFile = "";
numGroups =2;
preprocessDataType = PreprocessDataType::Dense;
bayesW_version = "marginal";
fixedEffectNumber = 0;
}
void inputOptions(const int argc, const char* argv[]);
private:
void readFile(const string &file);
void makeTitle(void);
void seedEngine(void);
};
#endif /* options_hpp */
|
aoeai/aoeai-spin-accelerator
|
src/main/java/com/aoeai/spin/accelerator/generate/bean/config/PoConfig.java
|
<gh_stars>1-10
package com.aoeai.spin.accelerator.generate.bean.config;
import lombok.Data;
/**
* PO(数据库对应的)持久对象 配置信息
* @author aoe
* @date 2020/8/24
*/
@Data
public class PoConfig extends JavaConfig {
/**
* 生成Java文件时需要过滤掉的表名前缀(,分割);不填写默认为空
*/
private String tablePrefixFilter = "";
}
|
robertknight/lms
|
tests/functional/conftest.py
|
import contextlib
import pytest
from webtest import TestApp
from lms import db
from lms.app import create_app
from tests.conftest import SESSION, TEST_SETTINGS, get_test_database_url
TEST_SETTINGS["sqlalchemy.url"] = get_test_database_url(
default="postgresql://postgres@localhost:5433/lms_functests"
)
@pytest.fixture(autouse=True)
def clean_database(db_engine):
"""Delete any data added by the previous test."""
tables = reversed(db.BASE.metadata.sorted_tables)
with contextlib.closing(db_engine.connect()) as conn:
transaction = conn.begin() # pylint:disable=no-member
tnames = ", ".join('"' + t.name + '"' for t in tables)
conn.execute("TRUNCATE {};".format(tnames)) # pylint:disable=no-member
transaction.commit()
@pytest.fixture(scope="session")
def pyramid_app():
return create_app(None, **TEST_SETTINGS)
@pytest.fixture
def app(pyramid_app, db_engine):
db.init(db_engine)
return TestApp(pyramid_app)
@pytest.fixture
def db_session(db_engine):
"""Get a standalone database session for preparing database state."""
conn = db_engine.connect()
session = SESSION(bind=conn)
yield session
session.close()
|
Samyakk123/ThreatExchange
|
python-threatexchange/threatexchange/stopncii/tests/test_api.py
|
import pytest
from threatexchange.stopncii.api import (
StopNCIIAPI,
StopNCIICaseStatus,
StopNCIICSPFeedbackValue,
StopNCIIEndpoint,
StopNCIIHashRecord,
StopNCIISignalType,
)
PAGE_TOKEN = (
"<KEY>
"<KEY>
)
def mock_get_impl(endpoint: str, **json):
assert endpoint == StopNCIIEndpoint.FetchHashes
if json.get("nextPageToken") != PAGE_TOKEN:
return {
"count": 2,
"nextSetTimestamp": 1625175071,
"nextPageToken": PAGE_TOKEN,
"hasMoreRecords": True,
"hashRecords": [
{
"lastModtimestamp": 1625167804,
"hashValue": "2afc4a5c09628a7961c14d436493bba66b89b831453baa1d556ba385554daa82",
"hashStatus": "Received",
"caseNumbers": {
"27664732-76e1-4a17-8099-455798e67022": "Received",
"a2696edf-1237-4eb8-a9c1-cd8ee6c055e5": "Received",
"9fbb73d7-c177-4ed2-b4e8-0050b72decd0": "Received",
"bd2bff7c-8160-4615-8dce-38c2f066d1ac": "Received",
},
"signalType": "ImagePDQ",
},
{
"lastModtimestamp": 1625167844,
"hashValue": "79e07de27d7295339435d63cd31cf35a7bfa29eb2885008500a588a5ea3ae75a",
"hashStatus": "Received",
"caseNumbers": {"bd2bff7c-8160-4615-8dce-38c2f066d1ac": "Received"},
"signalType": "ImagePDQ",
},
],
}
return {
"count": 1,
"nextSetTimestamp": 1625167824,
"nextPageToken": PAGE_TOKEN[:-1] + "4",
"hasMoreRecords": False,
"hashRecords": [
{
"lastModtimestamp": 1625175071,
"hashValue": "9def0b7dafa86a1c90f2abd78e79ceb25ec3d1a4b3d4bc7a4354baf7717ea038",
"hashStatus": "Active",
"caseNumbers": {"cc592711-7068-442f-b5d2-24d50d389751": "Active"},
"signalType": "ImagePDQ",
"CSPFeedbacks": [
{
"source": "Facebook",
"feedbackValue": "Blocked",
"tags": ["Nude", "Objectionable"],
}
],
},
],
}
@pytest.fixture
def api(monkeypatch: pytest.MonkeyPatch):
api = StopNCIIAPI("", "")
monkeypatch.setattr(api, "_get", mock_get_impl)
return api
def assert_first_record(record: StopNCIIHashRecord) -> None:
assert record.lastModtimestamp == 1625167804
assert record.signalType == StopNCIISignalType.ImagePDQ
assert (
record.hashValue
== "2afc4a5c09628a7961c14d436493bba66b89b831453baa1d556ba385554daa82"
)
assert record.hashStatus == StopNCIICaseStatus.Received
assert len(record.caseNumbers) == 4
assert all(c == StopNCIICaseStatus.Received for c in record.caseNumbers.values())
assert len(record.CSPFeedbacks) == 0
def assert_second_record(record: StopNCIIHashRecord) -> None:
assert record.lastModtimestamp == 1625167844
assert record.signalType == StopNCIISignalType.ImagePDQ
assert (
record.hashValue
== "79e07de27d7295339435d63cd31cf35a7bfa29eb2885008500a588a5ea3ae75a"
)
assert record.hashStatus == StopNCIICaseStatus.Received
assert len(record.caseNumbers) == 1
assert record.caseNumbers == {
"bd2bff7c-8160-4615-8dce-38c2f066d1ac": StopNCIICaseStatus.Received
}
assert len(record.CSPFeedbacks) == 0
def assert_third_record(record: StopNCIIHashRecord) -> None:
assert record.lastModtimestamp == 1625175071
assert record.signalType == StopNCIISignalType.ImagePDQ
assert (
record.hashValue
== "9def0b7dafa86a1c90f2abd78e79ceb25ec3d1a4b3d4bc7a4354baf7717ea038"
)
assert record.hashStatus == StopNCIICaseStatus.Active
assert len(record.caseNumbers) == 1
assert record.caseNumbers == {
"cc592711-7068-442f-b5d2-24d50d389751": StopNCIICaseStatus.Active
}
assert len(record.CSPFeedbacks) == 1
feedback = record.CSPFeedbacks[0]
assert feedback.source == "Facebook"
assert feedback.feedbackValue == StopNCIICSPFeedbackValue.Blocked
assert feedback.tags == {"Nude", "Objectionable"}
def test_mocked_get_hashes(api: StopNCIIAPI):
result = api.fetch_hashes()
assert result.count == 2
assert result.nextSetTimestamp == 1625175071
assert result.hasMoreRecords is True
assert result.nextPageToken == PAGE_TOKEN
assert len(result.hashRecords) == 2
one, two = result.hashRecords
assert_first_record(one)
assert_second_record(two)
second_result = api.fetch_hashes(next_page=result.nextPageToken)
assert second_result.count == 1
assert second_result.nextSetTimestamp == 1625167824
assert second_result.hasMoreRecords is False
assert second_result.nextPageToken != PAGE_TOKEN
assert len(second_result.hashRecords) == 1
assert_third_record(second_result.hashRecords[0])
def test_mocked_get_hashes_iter(api: StopNCIIAPI):
it = api.fetch_hashes_iter()
as_list = list(it)
assert len(as_list) == 2
all_updates = [
record for result in api.fetch_hashes_iter() for record in result.hashRecords
]
assert len(all_updates) == 3
one, two, three = all_updates
assert_first_record(one)
assert_second_record(two)
assert_third_record(three)
|
FTSRG-MDSD/gsoaarchitect-MDSD-2012
|
hu.bme.mit.inf.gs.dsl.edit/src/soamodel/provider/ServiceComponentItemProvider.java
|
<reponame>FTSRG-MDSD/gsoaarchitect-MDSD-2012
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package soamodel.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.ResourceLocator;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.ItemProviderAdapter;
import org.eclipse.emf.edit.provider.ViewerNotification;
import soamodel.ServiceComponent;
import soamodel.SoamodelFactory;
import soamodel.SoamodelPackage;
/**
* This is the item provider adapter for a {@link soamodel.ServiceComponent} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class ServiceComponentItemProvider
extends ItemProviderAdapter
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ServiceComponentItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public List getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
addNamePropertyDescriptor(object);
addNamespacePropertyDescriptor(object);
addConnectsToPropertyDescriptor(object);
addURIPropertyDescriptor(object);
addPersistsPropertyDescriptor(object);
}
return itemPropertyDescriptors;
}
/**
* This adds a property descriptor for the Name feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addNamePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServiceComponent_name_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServiceComponent_name_feature", "_UI_ServiceComponent_type"),
SoamodelPackage.Literals.SERVICE_COMPONENT__NAME,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Namespace feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addNamespacePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServiceComponent_namespace_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServiceComponent_namespace_feature", "_UI_ServiceComponent_type"),
SoamodelPackage.Literals.SERVICE_COMPONENT__NAMESPACE,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Connects To feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addConnectsToPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServiceComponent_connectsTo_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServiceComponent_connectsTo_feature", "_UI_ServiceComponent_type"),
SoamodelPackage.Literals.SERVICE_COMPONENT__CONNECTS_TO,
true,
false,
true,
null,
null,
null));
}
/**
* This adds a property descriptor for the URI feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addURIPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServiceComponent_URI_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServiceComponent_URI_feature", "_UI_ServiceComponent_type"),
SoamodelPackage.Literals.SERVICE_COMPONENT__URI,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Persists feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addPersistsPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServiceComponent_persists_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServiceComponent_persists_feature", "_UI_ServiceComponent_type"),
SoamodelPackage.Literals.SERVICE_COMPONENT__PERSISTS,
true,
false,
true,
null,
null,
null));
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Collection getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(SoamodelPackage.Literals.SERVICE_COMPONENT__METHODS);
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getText(Object object) {
String label = ((ServiceComponent)object).getName();
return label == null || label.length() == 0 ?
getString("_UI_ServiceComponent_type") :
getString("_UI_ServiceComponent_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(ServiceComponent.class)) {
case SoamodelPackage.SERVICE_COMPONENT__NAME:
case SoamodelPackage.SERVICE_COMPONENT__NAMESPACE:
case SoamodelPackage.SERVICE_COMPONENT__URI:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
case SoamodelPackage.SERVICE_COMPONENT__METHODS:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void collectNewChildDescriptors(Collection newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(SoamodelPackage.Literals.SERVICE_COMPONENT__METHODS,
SoamodelFactory.eINSTANCE.createServiceMethod()));
newChildDescriptors.add
(createChildParameter
(SoamodelPackage.Literals.SERVICE_COMPONENT__METHODS,
SoamodelFactory.eINSTANCE.createRestMethod()));
}
/**
* Return the resource locator for this item provider's resources.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ResourceLocator getResourceLocator() {
return SoaModelEditPlugin.INSTANCE;
}
}
|
brauliolledo/aspectjs
|
packages/memo/dist/esm2015/memo/packages/memo/dist/src/marshalling/marshallers/marshaller.js
|
<filename>packages/memo/dist/esm2015/memo/packages/memo/dist/src/marshalling/marshallers/marshaller.js
/**
* @public
*/
class MemoMarshaller {
}
export { MemoMarshaller };
//# sourceMappingURL=marshaller.js.map
|
DrBretto/drb-random-bits
|
src/main/java/com/drbretto/drb/CreeperHeartItem.java
|
package com.drbretto.drb;
import net.minecraft.block.BlockState;
import net.minecraft.entity.EntityType;
import net.minecraft.entity.TntEntity;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.Hand;
import net.minecraft.util.TypedActionResult;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
public class CreeperHeartItem extends Item {
public CreeperHeartItem(Settings settings) {
super(settings);
}
@Override
public TypedActionResult<ItemStack> use(World world, PlayerEntity user, Hand hand) {
System.out.println("Creeper Heart Used!");
return super.use(world, user, hand);
}
}
|
NyBatis/NyBatisCore
|
src/test/java/org/nybatis/core/log/NLoggerTest.java
|
package org.nybatis.core.log;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.nio.BufferOverflowException;
import org.nybatis.core.conf.Const;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
public class NLoggerTest {
private static final Logger logger = LoggerFactory.getLogger( NLoggerTest.class );
@Test
public void basicTest() {
NLogger.debug( "start !!" );
try {
exceptionRaiser();
} catch( Exception e ) {
NLogger.error( e );
// System.out.println( "-------------------------");
NLogger.error( "error({})\n{}", e.getMessage(), e );
logger.error( e.getMessage(), e );
}
NLogger.info( "Merong\nMerong : {}", "Nayasis" );
NLogger.warn( null );
NLogger.trace( "trace what ??" );
NLogger.info( "trace what ?? : {}", "Nayasis" );
}
private void exceptionRaiser() throws Exception {
throw new Exception( "Merong", new BufferOverflowException() );
}
@Test
public void multiLineTest() {
String format = "사랑하는\n나의\n어머니";
NLogger.debug( format );
logger.debug( format );
}
@Test
public void specificLogger() {
NLogger.getLogger( "specific.test.log" ).debug( "사랑하는\n나의\n어머니" );
}
@Test
public void specificCaller() {
NLogger.debug( "merong" );
NLogger.setCallderDepth( 0 ).debug( "merong" );
NLogger.setCallderDepth( 1 ).debug( "merong" );
}
@Test
public void loglevelTest() {
NLogger.loadConfiguration( Const.path.getRoot() + "/config/log/logback-subclass.xml" );
PrintStream srcOut = System.out;
OutputStream testOut = new ByteArrayOutputStream();
PrintStream printStream = new PrintStream( testOut );
System.setOut( printStream );
try {
logger.debug( "merong by logback" );
NLogger.debug( "merong by NLogger" );
new SubClass().test();
} finally {
System.setOut( srcOut );
printStream.close();
}
String consoleOutput = testOut.toString();
// do not print in SubClass log
Assert.assertEquals( consoleOutput.split( "\n" ).length, 2 );
}
}
|
syc7446/pddlstream
|
pddlstream/algorithms/scheduling/plan_streams.py
|
<filename>pddlstream/algorithms/scheduling/plan_streams.py
from __future__ import print_function
import copy
from collections import defaultdict
from pddlstream.algorithms.downward import get_problem, task_from_domain_problem, get_cost_scale, \
scale_cost, fd_from_fact, make_domain, make_predicate, evaluation_from_fd, plan_preimage, fact_from_fd, conditions_hold
from pddlstream.algorithms.instantiate_task import instantiate_task, sas_from_instantiated
from pddlstream.algorithms.scheduling.add_optimizers import add_optimizer_effects, \
using_optimizers, recover_simultaneous
from pddlstream.algorithms.scheduling.apply_fluents import convert_fluent_streams
from pddlstream.algorithms.scheduling.negative import recover_negative_axioms, convert_negative
from pddlstream.algorithms.scheduling.postprocess import postprocess_stream_plan
from pddlstream.algorithms.scheduling.recover_axioms import recover_axioms_plans
from pddlstream.algorithms.scheduling.recover_functions import compute_function_plan
from pddlstream.algorithms.scheduling.recover_streams import get_achieving_streams, extract_stream_plan, \
evaluations_from_stream_plan
from pddlstream.algorithms.scheduling.stream_action import add_stream_actions
from pddlstream.algorithms.scheduling.utils import partition_results, \
add_unsatisfiable_to_goal, get_instance_facts
from pddlstream.algorithms.search import solve_from_task
from pddlstream.algorithms.algorithm import UNIVERSAL_TO_CONDITIONAL
from pddlstream.language.constants import And, Not, get_prefix, EQ, Action
from pddlstream.language.conversion import obj_from_pddl_plan, evaluation_from_fact, \
fact_from_evaluation, transform_plan_args, transform_action_args, pddl_from_object, obj_from_pddl
from pddlstream.language.external import Result
from pddlstream.language.exogenous import get_fluent_domain
from pddlstream.language.function import Function
from pddlstream.language.stream import StreamResult
from pddlstream.language.optimizer import UNSATISFIABLE
from pddlstream.language.statistics import compute_plan_effort
from pddlstream.language.temporal import SimplifiedDomain, solve_tfd
from pddlstream.language.write_pddl import get_problem_pddl
from pddlstream.utils import Verbose, INF, flatten
def add_stream_efforts(node_from_atom, instantiated, effort_weight, **kwargs):
cost_from_action = {action: action.cost for action in instantiated.actions}
if effort_weight is None:
return cost_from_action
# TODO: make effort just a multiplier (or relative) to avoid worrying about the scale
#efforts = [] # TODO: regularize & normalize across the problem?
for instance in instantiated.actions:
# TODO: prune stream actions here?
# TODO: round each effort individually to penalize multiple streams
facts = get_instance_facts(instance, node_from_atom)
#effort = COMBINE_OP([0] + [node_from_atom[fact].effort for fact in facts])
stream_plan = []
extract_stream_plan(node_from_atom, facts, stream_plan)
if effort_weight is not None:
effort = compute_plan_effort(stream_plan, **kwargs)
instance.cost += scale_cost(effort_weight*effort)
#efforts.append(effort)
#print(min(efforts), efforts)
return cost_from_action
##################################################
def rename_instantiated_actions(instantiated):
# TODO: rename SAS instead?
actions = instantiated.actions[:]
renamed_actions = []
action_from_name = {}
for i, action in enumerate(actions):
renamed_actions.append(copy.copy(action))
renamed_name = 'a{}'.format(i)
renamed_actions[-1].name = '({})'.format(renamed_name)
action_from_name[renamed_name] = action # Change reachable_action_params?
instantiated.actions[:] = renamed_actions
return action_from_name
def pddl_from_instance(instance):
action = instance.action
args = [instance.var_mapping[p.name]
for p in action.parameters[:action.num_external_parameters]]
return Action(action.name, args)
##################################################
def get_plan_cost(action_plan, cost_from_action):
if action_plan is None:
return INF
#return sum([0.] + [instance.cost for instance in action_plan])
scaled_cost = sum([0.] + [cost_from_action[instance] for instance in action_plan])
return scaled_cost / get_cost_scale()
def instantiate_optimizer_axioms(instantiated, domain, results):
# Needed for instantiating axioms before adding stream action effects
# Otherwise, FastDownward will prune these unreachable axioms
# TODO: compute this first and then apply the eager actions
stream_init = {fd_from_fact(result.stream_fact)
for result in results if isinstance(result, StreamResult)}
evaluations = list(map(evaluation_from_fd, stream_init | instantiated.atoms))
temp_domain = make_domain(predicates=[make_predicate(UNSATISFIABLE, [])],
axioms=[ax for ax in domain.axioms if ax.name == UNSATISFIABLE])
temp_problem = get_problem(evaluations, Not((UNSATISFIABLE,)), temp_domain)
# TODO: UNSATISFIABLE might be in atoms making the goal always infeasible
with Verbose():
# TODO: the FastDownward instantiation will prune static preconditions
new_instantiated = instantiate_task(task_from_domain_problem(temp_domain, temp_problem),
check_infeasible=False, prune_static=False)
assert new_instantiated is not None
instantiated.axioms.extend(new_instantiated.axioms)
instantiated.atoms.update(new_instantiated.atoms)
##################################################
def recover_stream_plan(evaluations, current_plan, opt_evaluations, goal_expression, domain, node_from_atom,
action_plan, axiom_plans, negative, replan_step):
# Universally quantified conditions are converted into negative axioms
# Existentially quantified conditions are made additional preconditions
# Universally quantified effects are instantiated by doing the cartesian produce of types (slow)
# Added effects cancel out removed effects
# TODO: node_from_atom is a subset of opt_evaluations (only missing functions)
real_task = task_from_domain_problem(domain, get_problem(evaluations, goal_expression, domain))
opt_task = task_from_domain_problem(domain, get_problem(opt_evaluations, goal_expression, domain))
negative_from_name = {external.blocked_predicate: external for external in negative if external.is_negated()}
real_states, combined_plan = recover_negative_axioms(
real_task, opt_task, axiom_plans, action_plan, negative_from_name)
function_plan = compute_function_plan(opt_evaluations, action_plan)
# TODO: record the supporting facts
full_preimage = plan_preimage(combined_plan, [])
stream_preimage = set(full_preimage) - real_states[0]
negative_preimage = set(filter(lambda a: a.predicate in negative_from_name, stream_preimage))
function_plan.update(convert_negative(negative_preimage, negative_from_name, full_preimage, real_states))
positive_preimage = stream_preimage - negative_preimage
steps_from_fact = {fact_from_fd(l): full_preimage[l] for l in positive_preimage if not l.negated}
target_facts = {fact for fact in steps_from_fact.keys() if get_prefix(fact) != EQ}
#stream_plan = reschedule_stream_plan(evaluations, target_facts, domain, stream_results)
# visualize_constraints(map(fact_from_fd, target_facts))
# TODO: get_steps_from_stream
stream_plan = []
step_from_stream = {}
for result in current_plan:
# TODO: actually compute when these are needed + dependencies
step_from_stream[result] = 0
if isinstance(result.external, Function) or (result.external in negative):
function_plan.add(result) # Prevents these results from being pruned
else:
stream_plan.append(result)
curr_evaluations = evaluations_from_stream_plan(evaluations, stream_plan, max_effort=None)
extraction_facts = target_facts - set(map(fact_from_evaluation, curr_evaluations))
step_from_fact = {fact: min(steps_from_fact[fact]) for fact in extraction_facts}
extract_stream_plan(node_from_atom, extraction_facts, stream_plan,
step_from_fact, step_from_stream)
stream_plan = postprocess_stream_plan(evaluations, domain, stream_plan, target_facts)
eager_plan = []
actions_from_step = {}
for result in (stream_plan + list(function_plan)):
if (result.opt_index != 0) or (step_from_stream.get(result, 0) < replan_step):
eager_plan.append(result)
else:
actions_from_step.setdefault(step_from_stream[result], []).append(result.get_action())
eager_plan = convert_fluent_streams(eager_plan, real_states, action_plan, steps_from_fact, node_from_atom)
# print(action_plan)
# # TODO: propagate this forward in the future
# start_from_stream = {}
# for result in eager_plan:
# stuff = list(map(fd_from_fact, get_fluent_domain(result)))
# index = len(real_states)
# for i, state in enumerate(real_states):
# if conditions_hold(state, stuff):
# start_from_stream[result] = i
# index = i
# break
# #else:
# #start_from_stream[result] = len(real_states)
# print(index, result)
# TODO: some sort of obj side-effect bug that requires obj_from_pddl to be applied last (likely due to fluent streams)
#action_plan = transform_plan_args(map(pddl_from_instance, action_instances), obj_from_pddl)
for step, action in enumerate(action_plan):
actions_from_step.setdefault(step, []).append(transform_action_args(
pddl_from_instance(action), obj_from_pddl))
action_plan = list(flatten(actions_from_step[step] for step in sorted(actions_from_step)))
return eager_plan, action_plan
##################################################
def solve_optimistic_temporal(domain, stream_domain, applied_results, all_results,
opt_evaluations, node_from_atom, goal_expression,
effort_weight, debug=False, **kwargs):
# TODO: assert that the unused parameters are off
assert domain is stream_domain
#assert len(applied_results) == len(all_results)
problem = get_problem(opt_evaluations, goal_expression, domain)
with Verbose():
instantiated = instantiate_task(task_from_domain_problem(domain, problem))
if instantiated is None:
return instantiated, None, None, INF
problem = get_problem_pddl(opt_evaluations, goal_expression, domain.pddl)
pddl_plan, makespan = solve_tfd(domain.pddl, problem, debug=debug)
if pddl_plan is None:
return instantiated, None, pddl_plan, makespan
instance_from_action_args = defaultdict(list)
for instance in instantiated.actions:
tokens = instance.name.strip('()').split(' ')
name, args = tokens[0], tuple(tokens[1:])
instance_from_action_args[name, args].append(instance)
#instance.action, instance.var_mapping
action_instances = []
for action in pddl_plan:
instances = instance_from_action_args[action.name, action.args]
assert len(instances) == 1 # TODO: support 2 <= case
action_instances.append(instances[0])
plan = obj_from_pddl_plan(pddl_plan)
return instantiated, action_instances, plan, makespan
def solve_optimistic_sequential(domain, stream_domain, applied_results, all_results,
opt_evaluations, node_from_atom, goal_expression,
effort_weight, debug=False, **kwargs):
problem = get_problem(opt_evaluations, goal_expression, stream_domain) # begin_metric
with Verbose():
instantiated = instantiate_task(task_from_domain_problem(stream_domain, problem))
if instantiated is None:
return instantiated, None, INF
cost_from_action = add_stream_efforts(node_from_atom, instantiated, effort_weight)
if using_optimizers(applied_results):
add_optimizer_effects(instantiated, node_from_atom)
# TODO: reachieve=False when using optimizers or should add applied facts
instantiate_optimizer_axioms(instantiated, domain, all_results)
action_from_name = rename_instantiated_actions(instantiated)
with Verbose(debug):
sas_task = sas_from_instantiated(instantiated)
sas_task.metric = True
# TODO: apply renaming to hierarchy as well
# solve_from_task | serialized_solve_from_task | abstrips_solve_from_task | abstrips_solve_from_task_sequential
renamed_plan, _ = solve_from_task(sas_task, debug=debug, **kwargs)
if renamed_plan is None:
return instantiated, None, INF
action_instances = [action_from_name[name] for name, _ in renamed_plan]
cost = get_plan_cost(action_instances, cost_from_action)
return instantiated, action_instances, cost
def plan_streams(evaluations, goal_expression, domain, all_results, negative, effort_weight, max_effort,
simultaneous=False, reachieve=True, replan_actions=set(), **kwargs):
# TODO: alternatively could translate with stream actions on real opt_state and just discard them
# TODO: only consider axioms that have stream conditions?
#reachieve = reachieve and not using_optimizers(all_results)
applied_results, deferred_results = partition_results(
evaluations, all_results, apply_now=lambda r: not (simultaneous or r.external.info.simultaneous))
stream_domain, deferred_from_name = add_stream_actions(domain, deferred_results)
if reachieve and not using_optimizers(all_results):
achieved_results = {n.result for n in evaluations.values() if isinstance(n.result, Result)}
init_evaluations = {e for e, n in evaluations.items() if n.result not in achieved_results}
applied_results = achieved_results | set(applied_results)
evaluations = init_evaluations # For clarity
# TODO: could iteratively increase max_effort
node_from_atom = get_achieving_streams(evaluations, applied_results, # TODO: apply to all_results?
max_effort=max_effort)
opt_evaluations = {evaluation_from_fact(f): n.result for f, n in node_from_atom.items()}
if UNIVERSAL_TO_CONDITIONAL or using_optimizers(all_results):
goal_expression = add_unsatisfiable_to_goal(stream_domain, goal_expression)
optimistic_fn = solve_optimistic_temporal if isinstance(stream_domain, SimplifiedDomain) \
else solve_optimistic_sequential
instantiated, action_instances, cost = optimistic_fn(
domain, stream_domain, applied_results, all_results, opt_evaluations,
node_from_atom, goal_expression, effort_weight, **kwargs)
if action_instances is None:
return None, None, cost
axiom_plans = recover_axioms_plans(instantiated, action_instances)
# TODO: extract out the minimum set of conditional effects that are actually required
#simplify_conditional_effects(instantiated.task, action_instances)
stream_plan, action_instances = recover_simultaneous(
applied_results, negative, deferred_from_name, action_instances)
action_plan = transform_plan_args(map(pddl_from_instance, action_instances), obj_from_pddl)
replan_step = min([step+1 for step, action in enumerate(action_plan)
if action.name in replan_actions] or [len(action_plan)])
stream_plan, action_plan = recover_stream_plan(evaluations, stream_plan, opt_evaluations, goal_expression, stream_domain,
node_from_atom, action_instances, axiom_plans, negative, replan_step)
return stream_plan, action_plan, cost
|
hiowenluke/noapi
|
src/index.js
|
<reponame>hiowenluke/noapi<filename>src/index.js
const caller = require('caller');
const config = require('./config');
const data = require('./data');
const biz = require('./biz');
const server = require('./server');
const fn = (...args) => {
config.init(caller(), args);
data.init();
biz.init();
server.start();
};
module.exports = fn;
|
Chpark/itomp
|
itomp_cio_planner/include/itomp_cio_planner/contact/contact_util.h
|
<gh_stars>10-100
#ifndef CONTACT_UTIL_H_
#define CONTACT_UTIL_H_
#include <itomp_cio_planner/common.h>
#include <itomp_cio_planner/contact/contact_variables.h>
#include <itomp_cio_planner/model/itomp_planning_group.h>
#include <rbdl/Model.h>
namespace itomp_cio_planner
{
double getContactActiveValue(unsigned int contact, unsigned int contact_point,
const std::vector<ContactVariables>& contact_variables);
};
#endif /* CONTACT_UTIL_H_ */
|
jychoi0918/neconico
|
src/main/java/com/neconico/neconico/dto/store/StoreInquireInfoDto.java
|
package com.neconico.neconico.dto.store;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.apache.ibatis.type.Alias;
@Getter @Setter
@NoArgsConstructor
@Alias("storeInquireInfoDto")
public class StoreInquireInfoDto {
private Long userId;
private String storeName;
private String storeImgPath;
private String accountId;
}
|
alipay/antchain-openapi-prod-sdk
|
baasdt/java/src/main/java/com/antgroup/antchain/openapi/baasdt/models/BatchqueryIpApprovalwithupdateRequest.java
|
// This file is auto-generated, don't edit it. Thanks.
package com.antgroup.antchain.openapi.baasdt.models;
import com.aliyun.tea.*;
public class BatchqueryIpApprovalwithupdateRequest extends TeaModel {
// OAuth模式下的授权token
@NameInMap("auth_token")
public String authToken;
@NameInMap("product_instance_id")
public String productInstanceId;
// 基础字段
@NameInMap("base_request")
@Validation(required = true)
public BaseRequestInfo baseRequest;
// ip名称
@NameInMap("ip_name")
public String ipName;
// ip的id
@NameInMap("ip_id")
public String ipId;
// ip审批状态 0待审批,1 审批通过,2 审批拒绝
@NameInMap("approval_status")
public Long approvalStatus;
// 分页
@NameInMap("page_size")
@Validation(required = true)
public Long pageSize;
// 分页
@NameInMap("page_index")
@Validation(required = true)
public Long pageIndex;
// ip类型
@NameInMap("ip_type")
public String ipType;
// 受众人群
@NameInMap("audience_group")
public String audienceGroup;
// 开始时间
@NameInMap("create_begin_time")
@Validation(required = true, pattern = "\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})")
public String createBeginTime;
// 结束时间
@NameInMap("create_end_time")
@Validation(required = true, pattern = "\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})")
public String createEndTime;
// 商品的链上租户id
@NameInMap("account_id")
public String accountId;
// 是否需要按照创建时间倒序排序
//
@NameInMap("is_create_time_sort_desc")
@Validation(required = true)
public Boolean isCreateTimeSortDesc;
// 渠道名称
@NameInMap("channel_name")
public String channelName;
public static BatchqueryIpApprovalwithupdateRequest build(java.util.Map<String, ?> map) throws Exception {
BatchqueryIpApprovalwithupdateRequest self = new BatchqueryIpApprovalwithupdateRequest();
return TeaModel.build(map, self);
}
public BatchqueryIpApprovalwithupdateRequest setAuthToken(String authToken) {
this.authToken = authToken;
return this;
}
public String getAuthToken() {
return this.authToken;
}
public BatchqueryIpApprovalwithupdateRequest setProductInstanceId(String productInstanceId) {
this.productInstanceId = productInstanceId;
return this;
}
public String getProductInstanceId() {
return this.productInstanceId;
}
public BatchqueryIpApprovalwithupdateRequest setBaseRequest(BaseRequestInfo baseRequest) {
this.baseRequest = baseRequest;
return this;
}
public BaseRequestInfo getBaseRequest() {
return this.baseRequest;
}
public BatchqueryIpApprovalwithupdateRequest setIpName(String ipName) {
this.ipName = ipName;
return this;
}
public String getIpName() {
return this.ipName;
}
public BatchqueryIpApprovalwithupdateRequest setIpId(String ipId) {
this.ipId = ipId;
return this;
}
public String getIpId() {
return this.ipId;
}
public BatchqueryIpApprovalwithupdateRequest setApprovalStatus(Long approvalStatus) {
this.approvalStatus = approvalStatus;
return this;
}
public Long getApprovalStatus() {
return this.approvalStatus;
}
public BatchqueryIpApprovalwithupdateRequest setPageSize(Long pageSize) {
this.pageSize = pageSize;
return this;
}
public Long getPageSize() {
return this.pageSize;
}
public BatchqueryIpApprovalwithupdateRequest setPageIndex(Long pageIndex) {
this.pageIndex = pageIndex;
return this;
}
public Long getPageIndex() {
return this.pageIndex;
}
public BatchqueryIpApprovalwithupdateRequest setIpType(String ipType) {
this.ipType = ipType;
return this;
}
public String getIpType() {
return this.ipType;
}
public BatchqueryIpApprovalwithupdateRequest setAudienceGroup(String audienceGroup) {
this.audienceGroup = audienceGroup;
return this;
}
public String getAudienceGroup() {
return this.audienceGroup;
}
public BatchqueryIpApprovalwithupdateRequest setCreateBeginTime(String createBeginTime) {
this.createBeginTime = createBeginTime;
return this;
}
public String getCreateBeginTime() {
return this.createBeginTime;
}
public BatchqueryIpApprovalwithupdateRequest setCreateEndTime(String createEndTime) {
this.createEndTime = createEndTime;
return this;
}
public String getCreateEndTime() {
return this.createEndTime;
}
public BatchqueryIpApprovalwithupdateRequest setAccountId(String accountId) {
this.accountId = accountId;
return this;
}
public String getAccountId() {
return this.accountId;
}
public BatchqueryIpApprovalwithupdateRequest setIsCreateTimeSortDesc(Boolean isCreateTimeSortDesc) {
this.isCreateTimeSortDesc = isCreateTimeSortDesc;
return this;
}
public Boolean getIsCreateTimeSortDesc() {
return this.isCreateTimeSortDesc;
}
public BatchqueryIpApprovalwithupdateRequest setChannelName(String channelName) {
this.channelName = channelName;
return this;
}
public String getChannelName() {
return this.channelName;
}
}
|
dan-blanchard/incubator-storm
|
storm-client/src/jvm/org/apache/storm/executor/ExecutorTransfer.java
|
<gh_stars>1-10
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.executor;
import com.google.common.annotations.VisibleForTesting;
import com.lmax.disruptor.EventHandler;
import org.apache.storm.Config;
import org.apache.storm.daemon.worker.WorkerState;
import org.apache.storm.serialization.KryoTupleSerializer;
import org.apache.storm.tuple.AddressedTuple;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.utils.DisruptorQueue;
import org.apache.storm.utils.MutableObject;
import org.apache.storm.utils.ObjectReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Map;
import java.util.concurrent.Callable;
public class ExecutorTransfer implements EventHandler, Callable {
private static final Logger LOG = LoggerFactory.getLogger(ExecutorTransfer.class);
private final WorkerState workerData;
private final DisruptorQueue batchTransferQueue;
private final Map<String, Object> topoConf;
private final KryoTupleSerializer serializer;
private final MutableObject cachedEmit;
private final boolean isDebug;
public ExecutorTransfer(WorkerState workerData, DisruptorQueue batchTransferQueue, Map<String, Object> topoConf) {
this.workerData = workerData;
this.batchTransferQueue = batchTransferQueue;
this.topoConf = topoConf;
this.serializer = new KryoTupleSerializer(topoConf, workerData.getWorkerTopologyContext());
this.cachedEmit = new MutableObject(new ArrayList<>());
this.isDebug = ObjectReader.getBoolean(topoConf.get(Config.TOPOLOGY_DEBUG), false);
}
public void transfer(int task, Tuple tuple) {
AddressedTuple val = new AddressedTuple(task, tuple);
if (isDebug) {
LOG.info("TRANSFERRING tuple {}", val);
}
batchTransferQueue.publish(val);
}
@VisibleForTesting
public DisruptorQueue getBatchTransferQueue() {
return this.batchTransferQueue;
}
@Override
public Object call() throws Exception {
batchTransferQueue.consumeBatchWhenAvailable(this);
return 0L;
}
public String getName() {
return batchTransferQueue.getName();
}
@Override
public void onEvent(Object event, long sequence, boolean endOfBatch) throws Exception {
ArrayList cachedEvents = (ArrayList) cachedEmit.getObject();
cachedEvents.add(event);
if (endOfBatch) {
workerData.transfer(serializer, cachedEvents);
cachedEmit.setObject(new ArrayList<>());
}
}
}
|
jpkmiller/Three-Easy-Pieces
|
30_CV/pc-header.h
|
#ifndef __pc_header_h__
#define __pc_header_h__
#define MAX_THREADS (100) // maximum number of producers/consumers
int producers = 1; // number of producers
int consumers = 1; // number of consumers
int *buffer; // the buffer itself: malloc in main()
int max; // size of the producer/consumer buffer
int use_ptr = 0; // tracks where next consume should come from
int fill_ptr = 0; // tracks where next produce should go to
int num_full = 0; // counts how many entries are full
int loops; // number of items that each producer produces
#define EMPTY (-2) // buffer slot has nothing in it
#define END_OF_STREAM (-1) // consumer who grabs this should exit
#endif // __pc_header_h__
|
lack/kubernetes
|
test/e2e/storage/vsphere/bootstrap.go
|
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package vsphere
import (
"context"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/kubernetes/test/e2e/framework"
"sync"
)
var once sync.Once
var waiting = make(chan bool)
var f *framework.Framework
// Bootstrap takes care of initializing necessary test context for vSphere tests
func Bootstrap(fw *framework.Framework) {
done := make(chan bool)
f = fw
go func() {
once.Do(bootstrapOnce)
<-waiting
done <- true
}()
<-done
}
func bootstrapOnce() {
// 1. Read vSphere conf and get VSphere instances
vsphereInstances, err := GetVSphereInstances()
if err != nil {
framework.Failf("Failed to bootstrap vSphere with error: %v", err)
}
// 2. Get all nodes
nodeList, err := f.ClientSet.CoreV1().Nodes().List(context.TODO(), metav1.ListOptions{})
if err != nil {
framework.Failf("Failed to get nodes: %v", err)
}
TestContext = Context{NodeMapper: &NodeMapper{}, VSphereInstances: vsphereInstances}
// 3. Get Node to VSphere mapping
err = TestContext.NodeMapper.GenerateNodeMap(vsphereInstances, *nodeList)
if err != nil {
framework.Failf("Failed to bootstrap vSphere with error: %v", err)
}
// 4. Generate Zone to Datastore mapping
err = TestContext.NodeMapper.GenerateZoneToDatastoreMap()
if err != nil {
framework.Failf("Failed to generate zone to datastore mapping with error: %v", err)
}
close(waiting)
}
|
tannerjfco/hedera-services
|
hedera-node/src/test/java/com/hedera/services/ledger/HederaLedgerLiveTest.java
|
package com.hedera.services.ledger;
/*-
*
* Hedera Services Node
*
* Copyright (C) 2018 - 2020 Hedera Hashgraph, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import com.hedera.services.config.MockGlobalDynamicProps;
import com.hedera.services.exceptions.InconsistentAdjustmentsException;
import com.hedera.services.ledger.accounts.BackingTokenRels;
import com.hedera.services.ledger.accounts.HashMapBackingAccounts;
import com.hedera.services.ledger.accounts.HashMapBackingTokenRels;
import com.hedera.services.ledger.accounts.HederaAccountCustomizer;
import com.hedera.services.ledger.properties.AccountProperty;
import com.hedera.services.ledger.properties.ChangeSummaryManager;
import com.hedera.services.ledger.properties.TokenRelProperty;
import com.hedera.services.state.merkle.MerkleAccount;
import com.hedera.services.state.merkle.MerkleEntityId;
import com.hedera.services.state.merkle.MerkleToken;
import com.hedera.services.state.merkle.MerkleTokenRelStatus;
import com.hedera.services.store.tokens.HederaTokenStore;
import com.hedera.test.factories.scenarios.TxnHandlingScenario;
import com.hedera.test.mocks.TestContextValidator;
import com.hedera.test.utils.TxnUtils;
import com.hederahashgraph.api.proto.java.AccountAmount;
import com.hederahashgraph.api.proto.java.AccountID;
import com.hederahashgraph.api.proto.java.Timestamp;
import com.hederahashgraph.api.proto.java.TokenCreateTransactionBody;
import com.hederahashgraph.api.proto.java.TokenID;
import com.hederahashgraph.api.proto.java.TokenTransferList;
import com.swirlds.fcmap.FCMap;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.util.List;
import static com.hedera.test.utils.IdUtils.asAccount;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.BDDMockito.verify;
@RunWith(JUnitPlatform.class)
public class HederaLedgerLiveTest extends BaseHederaLedgerTest {
long thisSecond = 1_234_567L;
@BeforeEach
void setup() {
commonSetup();
accountsLedger = new TransactionalLedger<>(
AccountProperty.class,
() -> new MerkleAccount(),
new HashMapBackingAccounts(),
new ChangeSummaryManager<>());
FCMap<MerkleEntityId, MerkleToken> tokens = new FCMap<>();
tokenRelsLedger = new TransactionalLedger<>(
TokenRelProperty.class,
() -> new MerkleTokenRelStatus(),
new HashMapBackingTokenRels(),
new ChangeSummaryManager<>());
tokenRelsLedger.setKeyToString(BackingTokenRels::readableTokenRel);
tokenStore = new HederaTokenStore(
ids,
TestContextValidator.TEST_VALIDATOR,
new MockGlobalDynamicProps(),
() -> tokens,
tokenRelsLedger);
subject = new HederaLedger(tokenStore, ids, creator, historian, accountsLedger);
}
@Test
public void throwsOnCommittingInconsistentAdjustments() {
// when:
subject.begin();
subject.adjustBalance(genesis, -1L);
// then:
assertThrows(InconsistentAdjustmentsException.class, () -> subject.commit());
}
@Test
public void resetsNetTransfersAfterCommit() {
// when:
subject.begin();
AccountID a = subject.create(genesis, 1_000L, new HederaAccountCustomizer().memo("a"));
subject.commit();
// and:
subject.begin();
AccountID b = subject.create(genesis, 2_000L, new HederaAccountCustomizer().memo("b"));
// then:
assertEquals(2L, subject.netTransfersInTxn().getAccountAmountsList().size());
}
@Test
public void doesntIncludeZeroAdjustsInNetTransfers() {
// when:
subject.begin();
AccountID a = subject.create(genesis, 1_000L, new HederaAccountCustomizer().memo("a"));
subject.delete(a, genesis);
// then:
assertEquals(0L, subject.netTransfersInTxn().getAccountAmountsList().size());
}
@Test
public void doesntAllowDestructionOfRealCurrency() {
// when:
subject.begin();
AccountID a = subject.create(genesis, 1_000L, new HederaAccountCustomizer().memo("a"));
subject.destroy(a);
// then:
assertThrows(InconsistentAdjustmentsException.class, () -> subject.commit());
}
@Test
public void allowsDestructionOfEphemeralCurrency() {
// when:
subject.begin();
AccountID a = asAccount("1.2.3");
subject.spawn(a, 1_000L, new HederaAccountCustomizer().memo("a"));
subject.destroy(a);
subject.commit();
// then:
assertFalse(subject.exists(a));
assertEquals(GENESIS_BALANCE, subject.getBalance(genesis));
}
@Test
public void recordsCreationOfAccountDeletedInSameTxn() {
// when:
subject.begin();
AccountID a = subject.create(genesis, 1_000L, new HederaAccountCustomizer().memo("a"));
subject.delete(a, genesis);
int numNetTransfers = subject.netTransfersInTxn().getAccountAmountsCount();
subject.commit();
// then:
assertEquals(0, numNetTransfers);
assertTrue(subject.exists(a));
assertEquals(GENESIS_BALANCE, subject.getBalance(genesis));
}
@Test
public void addsRecordsBeforeCommitting() {
// when:
subject.begin();
AccountID a = subject.create(genesis, 1_000L, new HederaAccountCustomizer().memo("a"));
subject.commit();
// then:
verify(historian).addNewRecords();
}
@Test
public void resetsNetTransfersAfterRollback() {
// when:
subject.begin();
AccountID a = subject.create(genesis, 1_000L, new HederaAccountCustomizer().memo("a"));
subject.rollback();
// and:
subject.begin();
AccountID b = subject.create(genesis, 2_000L, new HederaAccountCustomizer().memo("b"));
// then:
assertEquals(2L, subject.netTransfersInTxn().getAccountAmountsList().size());
}
@Test
public void returnsNetTransfersInBalancedTxn() {
setup();
// and:
TokenID tA, tB;
// when:
subject.begin();
AccountID a = subject.create(genesis, 1_000L, new HederaAccountCustomizer().memo("a"));
AccountID b = subject.create(genesis, 2_000L, new HederaAccountCustomizer().memo("b"));
AccountID c = subject.create(genesis, 3_000L, new HederaAccountCustomizer().memo("c"));
AccountID d = subject.create(genesis, 4_000L, new HederaAccountCustomizer().memo("d"));
// and:
var rA = tokenStore.createProvisionally(stdWith("MINE", "MINE", a), a, thisSecond);
tA = rA.getCreated().get();
tokenStore.commitCreation();
var rB = tokenStore.createProvisionally(stdWith("YOURS", "YOURS", b), b, thisSecond);
tB = rB.getCreated().get();
tokenStore.commitCreation();
// and:
tokenStore.associate(a, List.of(tA, tB));
tokenStore.associate(b, List.of(tA, tB));
tokenStore.associate(c, List.of(tA, tB));
tokenStore.associate(d, List.of(tA, tB));
// and:
subject.doTransfer(d, a, 1_000L);
subject.delete(d, b);
subject.adjustBalance(c, 1_000L);
subject.adjustBalance(genesis, -1_000L);
subject.doTransfers(TxnUtils.withAdjustments(a, -500L, b, 250L, c, 250L));
// and:
subject.adjustTokenBalance(a, tA, +10_000);
subject.adjustTokenBalance(a, tA, -5_000);
subject.adjustTokenBalance(a, tB, +1);
subject.adjustTokenBalance(a, tB, -1);
subject.adjustTokenBalance(b, tB, +10_000);
subject.adjustTokenBalance(c, tB, +50);
subject.adjustTokenBalance(c, tB, +50);
subject.adjustTokenBalance(c, tB, -50);
subject.adjustTokenBalance(c, tA, +5000);
subject.freeze(a, tB);
subject.adjustTokenBalance(a, tB, +1_000_000);
accountsLedger.changeSetSoFar();
// then:
assertThat(
subject.netTransfersInTxn().getAccountAmountsList(),
containsInAnyOrder(
AccountAmount.newBuilder().setAccountID(a).setAmount(1_500L).build(),
AccountAmount.newBuilder().setAccountID(b).setAmount(5_250L).build(),
AccountAmount.newBuilder().setAccountID(c).setAmount(4_250L).build(),
AccountAmount.newBuilder().setAccountID(genesis).setAmount(-11_000L).build()));
// and:
assertThat(subject.netTokenTransfersInTxn(),
contains(
construct(tA, aa(a, +5_000), aa(c, +5_000)),
construct(tB, aa(b, +10_000), aa(c, +50))
));
}
@Test
public void recognizesPendingCreates() {
setup();
// when:
subject.begin();
AccountID a = subject.create(genesis, 1L, new HederaAccountCustomizer().memo("a"));
// then:
assertTrue(subject.isPendingCreation(a));
assertFalse(subject.isPendingCreation(genesis));
}
private TokenCreateTransactionBody stdWith(String symbol, String tokenName, AccountID account) {
var key = TxnHandlingScenario.COMPLEX_KEY_ACCOUNT_KT.asKey();
return TokenCreateTransactionBody.newBuilder()
.setAdminKey(key)
.setFreezeKey(TxnHandlingScenario.COMPLEX_KEY_ACCOUNT_KT.asKey())
.setSymbol(symbol)
.setName(tokenName)
.setInitialSupply(0)
.setTreasury(account)
.setExpiry(Timestamp.newBuilder().setSeconds(2 * thisSecond))
.setDecimals(0)
.setFreezeDefault(false)
.build();
}
private TokenTransferList construct(TokenID token, AccountAmount... xfers) {
return TokenTransferList.newBuilder()
.setToken(token)
.addAllTransfers(List.of(xfers))
.build();
}
}
|
metux/chromium-deb
|
third_party/WebKit/Source/platform/graphics/compositing/ContentLayerClientImplTest.cpp
|
<filename>third_party/WebKit/Source/platform/graphics/compositing/ContentLayerClientImplTest.cpp<gh_stars>0
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "platform/graphics/compositing/ContentLayerClientImpl.h"
#include "cc/layers/picture_layer.h"
#include "platform/graphics/paint/PaintArtifact.h"
#include "platform/testing/FakeDisplayItemClient.h"
#include "platform/testing/RuntimeEnabledFeaturesTestHelpers.h"
#include "platform/wtf/dtoa/utils.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace blink {
static const IntRect kDefaultLayerBounds(-9999, -7777, 18888, 16666);
class ContentLayerClientImplTest : public ::testing::Test,
private ScopedSlimmingPaintV2ForTest {
protected:
ContentLayerClientImplTest() : ScopedSlimmingPaintV2ForTest(true) {}
static PropertyTreeState DefaultPropertyTreeState() {
return PropertyTreeState(TransformPaintPropertyNode::Root(),
ClipPaintPropertyNode::Root(),
EffectPaintPropertyNode::Root());
}
static PaintChunk Chunk(
int type,
int raster_invalidation_count = 0,
PaintChunk::Cacheable cacheable = PaintChunk::kCacheable) {
DEFINE_STATIC_LOCAL(FakeDisplayItemClient, fake_client, ());
fake_client.ClearIsJustCreated();
// The enum arithmetics and magic numbers are to produce different values
// of paint chunk and raster invalidation properties.
PaintChunk::Id id(fake_client, static_cast<DisplayItem::Type>(
DisplayItem::kDrawingFirst + type));
PaintChunk chunk(0, 0, id, PaintChunkProperties(DefaultPropertyTreeState()),
cacheable);
chunk.bounds =
FloatRect(type * 110, type * 220, type * 220 + 200, type * 110 + 200);
for (int i = 0; i < raster_invalidation_count; ++i) {
chunk.raster_invalidation_rects.push_back(FloatRect(
type * 11, type * 22, type * 22 + 100 + i, type * 11 + 100 + i));
RasterInvalidationInfo info;
info.client = &id.client;
info.reason = static_cast<PaintInvalidationReason>(
static_cast<int>(PaintInvalidationReason::kFull) + type + i);
chunk.raster_invalidation_tracking.push_back(info);
}
return chunk;
}
static const Vector<RasterInvalidationInfo> TrackedRasterInvalidations(
const ContentLayerClientImpl& c) {
return c.TrackedRasterInvalidations();
}
static IntRect ChunkRectToLayer(const FloatRect& rect,
const IntPoint& layer_offset) {
FloatRect r = rect;
r.MoveBy(-layer_offset);
return EnclosingIntRect(r);
}
static void ExpectDisplayItemInvalidations(
const Vector<RasterInvalidationInfo>& invalidations,
size_t index,
const PaintChunk& chunk,
const IntPoint& layer_offset = kDefaultLayerBounds.Location()) {
for (size_t i = 0; i < chunk.raster_invalidation_rects.size(); ++i) {
SCOPED_TRACE(index + i);
const auto& info = invalidations[index + i];
EXPECT_EQ(
ChunkRectToLayer(chunk.raster_invalidation_rects[i], layer_offset),
info.rect);
EXPECT_EQ(&chunk.id.client, info.client);
EXPECT_EQ(chunk.raster_invalidation_tracking[i].reason, info.reason);
}
}
static void ExpectChunkInvalidation(
const Vector<RasterInvalidationInfo>& invalidations,
size_t index,
const PaintChunk& chunk,
PaintInvalidationReason reason,
const IntPoint& layer_offset = kDefaultLayerBounds.Location()) {
SCOPED_TRACE(index);
const auto& info = invalidations[index];
EXPECT_EQ(ChunkRectToLayer(chunk.bounds, layer_offset), info.rect);
EXPECT_EQ(&chunk.id.client, info.client);
EXPECT_EQ(reason, info.reason);
}
};
#define CHUNKS(name, ...) \
PaintChunk name##_array[] = {__VA_ARGS__}; \
Vector<const PaintChunk*> name; \
for (size_t i = 0; i < ARRAY_SIZE(name##_array); ++i) \
name.push_back(&name##_array[i]);
TEST_F(ContentLayerClientImplTest, LayerBounds) {
ContentLayerClientImpl c;
c.SetTracksRasterInvalidations(true);
CHUNKS(chunks, Chunk(0));
auto cc_layer =
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, chunks,
DefaultPropertyTreeState(), false);
ASSERT_TRUE(cc_layer);
EXPECT_EQ(gfx::Rect(kDefaultLayerBounds.Size()), c.PaintableRegion());
EXPECT_EQ(gfx::Size(kDefaultLayerBounds.Size()), cc_layer->bounds());
// No raster invalidations needed for a new layer.
EXPECT_TRUE(TrackedRasterInvalidations(c).IsEmpty());
auto cc_layer1 = c.UpdateCcPictureLayer(
PaintArtifact(),
IntRect(kDefaultLayerBounds.Location(), IntSize(1234, 2345)), chunks,
DefaultPropertyTreeState(), false);
EXPECT_EQ(cc_layer, cc_layer1);
EXPECT_EQ(gfx::Rect(0, 0, 1234, 2345), c.PaintableRegion());
EXPECT_EQ(gfx::Size(1234, 2345), cc_layer->bounds());
// No raster invalidations needed if layer origin doesn't change.
EXPECT_TRUE(TrackedRasterInvalidations(c).IsEmpty());
auto cc_layer2 =
c.UpdateCcPictureLayer(PaintArtifact(), IntRect(-555, -666, 777, 888),
chunks, DefaultPropertyTreeState(), false);
EXPECT_EQ(cc_layer, cc_layer2);
EXPECT_EQ(gfx::Rect(0, 0, 777, 888), c.PaintableRegion());
EXPECT_EQ(gfx::Size(777, 888), cc_layer->bounds());
// Invalidate the whole layer on layer origin change.
const auto& invalidations = TrackedRasterInvalidations(c);
ASSERT_EQ(1u, invalidations.size());
EXPECT_EQ(IntRect(0, 0, 777, 888), invalidations[0].rect);
EXPECT_EQ(PaintInvalidationReason::kFullLayer, invalidations[0].reason);
}
TEST_F(ContentLayerClientImplTest, RasterInvalidationReorderChunks) {
ContentLayerClientImpl c;
CHUNKS(chunks, Chunk(0), Chunk(1), Chunk(2));
c.SetTracksRasterInvalidations(true);
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, chunks,
DefaultPropertyTreeState(), false);
EXPECT_TRUE(TrackedRasterInvalidations(c).IsEmpty());
// Swap chunk 1 and 2. All chunks have their own local raster invalidations.
CHUNKS(new_chunks, Chunk(0, 2), Chunk(2, 4), Chunk(1, 3));
new_chunks_array[1].bounds = FloatRect(11, 22, 33, 44);
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, new_chunks,
DefaultPropertyTreeState(), false);
const auto& invalidations = TrackedRasterInvalidations(c);
ASSERT_EQ(5u, invalidations.size());
// The first chunk should always match because otherwise we won't reuse the
// ContentLayerClientImpl (which is according to the first chunk's id).
// For matched chunk, we issue raster invalidations if any found by
// PaintController.
ExpectDisplayItemInvalidations(invalidations, 0, *new_chunks[0]);
// Invalidated new chunk 1's old (as chunks[2]) and new (as new_chunks[1])
// bounds.
ExpectChunkInvalidation(invalidations, 2, *chunks[2],
PaintInvalidationReason::kChunkReordered);
ExpectChunkInvalidation(invalidations, 3, *new_chunks[1],
PaintInvalidationReason::kChunkReordered);
// Invalidated new chunk 2's new bounds. Didn't invalidate old bounds because
// it's the same as the new bounds.
ExpectChunkInvalidation(invalidations, 4, *new_chunks[2],
PaintInvalidationReason::kChunkReordered);
}
TEST_F(ContentLayerClientImplTest, RasterInvalidationAppearAndDisappear) {
ContentLayerClientImpl c;
CHUNKS(chunks, Chunk(0), Chunk(1), Chunk(2));
c.SetTracksRasterInvalidations(true);
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, chunks,
DefaultPropertyTreeState(), false);
EXPECT_TRUE(TrackedRasterInvalidations(c).IsEmpty());
// Chunk 1 and 2 disappeared, 3 and 4 appeared. All chunks have their own
// local raster invalidations.
CHUNKS(new_chunks, Chunk(0, 2), Chunk(3, 3), Chunk(4, 3));
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, new_chunks,
DefaultPropertyTreeState(), false);
const auto& invalidations = TrackedRasterInvalidations(c);
ASSERT_EQ(6u, invalidations.size());
ExpectDisplayItemInvalidations(invalidations, 0, *new_chunks[0]);
ExpectChunkInvalidation(invalidations, 2, *new_chunks[1],
PaintInvalidationReason::kAppeared);
ExpectChunkInvalidation(invalidations, 3, *new_chunks[2],
PaintInvalidationReason::kAppeared);
ExpectChunkInvalidation(invalidations, 4, *chunks[1],
PaintInvalidationReason::kDisappeared);
ExpectChunkInvalidation(invalidations, 5, *chunks[2],
PaintInvalidationReason::kDisappeared);
}
TEST_F(ContentLayerClientImplTest, RasterInvalidationAppearAtEnd) {
ContentLayerClientImpl c;
CHUNKS(chunks, Chunk(0));
c.SetTracksRasterInvalidations(true);
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, chunks,
DefaultPropertyTreeState(), false);
EXPECT_TRUE(TrackedRasterInvalidations(c).IsEmpty());
CHUNKS(new_chunks, Chunk(0, 2), Chunk(1, 3), Chunk(2, 3));
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, new_chunks,
DefaultPropertyTreeState(), false);
const auto& invalidations = TrackedRasterInvalidations(c);
ASSERT_EQ(4u, invalidations.size());
ExpectDisplayItemInvalidations(invalidations, 0, *new_chunks[0]);
ExpectChunkInvalidation(invalidations, 2, *new_chunks[1],
PaintInvalidationReason::kAppeared);
ExpectChunkInvalidation(invalidations, 3, *new_chunks[2],
PaintInvalidationReason::kAppeared);
}
TEST_F(ContentLayerClientImplTest, RasterInvalidationUncacheableChunks) {
ContentLayerClientImpl c;
CHUNKS(chunks, Chunk(0), Chunk(1, 0, PaintChunk::kUncacheable), Chunk(2));
c.SetTracksRasterInvalidations(true);
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, chunks,
DefaultPropertyTreeState(), false);
EXPECT_TRUE(TrackedRasterInvalidations(c).IsEmpty());
CHUNKS(new_chunks, Chunk(0, 2), Chunk(2, 3),
Chunk(1, 3, PaintChunk::kUncacheable));
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, new_chunks,
DefaultPropertyTreeState(), false);
const auto& invalidations = TrackedRasterInvalidations(c);
ASSERT_EQ(5u, invalidations.size());
ExpectDisplayItemInvalidations(invalidations, 0, *new_chunks[0]);
ExpectChunkInvalidation(invalidations, 2, *new_chunks[1],
PaintInvalidationReason::kChunkReordered);
ExpectChunkInvalidation(invalidations, 3, *new_chunks[2],
PaintInvalidationReason::kChunkUncacheable);
ExpectChunkInvalidation(invalidations, 4, *chunks[1],
PaintInvalidationReason::kChunkUncacheable);
}
TEST_F(ContentLayerClientImplTest, RasterInvalidationPaintPropertyChange) {
ContentLayerClientImpl c;
CHUNKS(chunks, Chunk(0), Chunk(1), Chunk(2));
FloatRoundedRect clip_rect(-100000, -100000, 200000, 200000);
RefPtr<ClipPaintPropertyNode> clip0 = ClipPaintPropertyNode::Create(
ClipPaintPropertyNode::Root(), TransformPaintPropertyNode::Root(),
clip_rect);
RefPtr<ClipPaintPropertyNode> clip2 = ClipPaintPropertyNode::Create(
clip0, TransformPaintPropertyNode::Root(), clip_rect);
PropertyTreeState layer_state(TransformPaintPropertyNode::Root(), clip0.Get(),
EffectPaintPropertyNode::Root());
chunks_array[0].properties = PaintChunkProperties(layer_state);
chunks_array[1].properties = PaintChunkProperties(layer_state);
chunks_array[2].properties = PaintChunkProperties(
PropertyTreeState(TransformPaintPropertyNode::Root(), clip2.Get(),
EffectPaintPropertyNode::Root()));
c.SetTracksRasterInvalidations(true);
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, chunks,
layer_state, false);
EXPECT_TRUE(TrackedRasterInvalidations(c).IsEmpty());
// Change both clip0 and clip2.
CHUNKS(new_chunks, Chunk(0), Chunk(1), Chunk(2));
FloatRoundedRect new_clip_rect(-200000, -200000, 400000, 400000);
clip0->Update(clip0->Parent(), clip0->LocalTransformSpace(), new_clip_rect);
clip2->Update(clip2->Parent(), clip2->LocalTransformSpace(), new_clip_rect);
new_chunks_array[0].properties = chunks[0]->properties;
new_chunks_array[1].properties = chunks[1]->properties;
new_chunks_array[2].properties = chunks[2]->properties;
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, new_chunks,
layer_state, false);
const auto& invalidations = TrackedRasterInvalidations(c);
ASSERT_EQ(1u, invalidations.size());
// Property change in the layer state should not trigger raster invalidation.
// |clip2| change should trigger raster invalidation.
ExpectChunkInvalidation(invalidations, 0, *new_chunks[2],
PaintInvalidationReason::kPaintProperty);
c.SetTracksRasterInvalidations(false);
clip2->ClearChangedToRoot();
// Change chunk1's properties to use a different property tree state.
CHUNKS(new_chunks1, Chunk(0), Chunk(1), Chunk(2));
new_chunks1_array[0].properties = chunks[0]->properties;
new_chunks1_array[1].properties = chunks[2]->properties;
new_chunks1_array[2].properties = chunks[2]->properties;
c.SetTracksRasterInvalidations(true);
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, new_chunks1,
layer_state, false);
const auto& invalidations1 = TrackedRasterInvalidations(c);
ASSERT_EQ(1u, invalidations1.size());
ExpectChunkInvalidation(invalidations1, 0, *new_chunks1[1],
PaintInvalidationReason::kPaintProperty);
c.SetTracksRasterInvalidations(false);
// Change of layer state invalidates the whole layer.
c.SetTracksRasterInvalidations(true);
c.UpdateCcPictureLayer(PaintArtifact(), kDefaultLayerBounds, new_chunks1,
DefaultPropertyTreeState(), false);
const auto& invalidations2 = TrackedRasterInvalidations(c);
ASSERT_EQ(1u, invalidations2.size());
EXPECT_EQ(PaintInvalidationReason::kFullLayer, invalidations2[0].reason);
}
} // namespace blink
|
Thiq/scripts
|
node_modules/quests2/types.js
|
<reponame>Thiq/scripts<filename>node_modules/quests2/types.js
const QuestType = {
BREAK: 0,
PLACE: 1,
CRAFT: 2,
FISH: 3,
KILL: 4,
BREED: 5,
COLLECT: 6,
SMELT: 7,
LOCATE: 8,
0: 'BREAK',
1: 'PLACE',
2: 'CRAFT',
3: 'FISH',
4: 'KILL',
5: 'BREED',
6: 'COLLECT',
7: 'SMELT',
8: 'LOCATE'
}
module.exports = QuestType;
|
willdunklin/kwiver
|
vital/plugin_loader/plugin_manager.h
|
// This file is part of KWIVER, and is distributed under the
// OSI-approved BSD 3-Clause License. See top-level LICENSE file or
// https://github.com/Kitware/kwiver/blob/master/LICENSE for details.
/// \file
/// \brief Interface for plugin manager.
#ifndef KWIVER_VITAL_PLUGIN_MANAGER_H
#define KWIVER_VITAL_PLUGIN_MANAGER_H
#include <vital/plugin_loader/vital_vpm_export.h>
#include <vital/plugin_loader/plugin_loader.h>
#include <vital/plugin_loader/plugin_factory.h>
#include <vital/exceptions/plugin.h>
#include <vital/logger/logger.h>
#include <vital/util/demangle.h>
#include <vital/bitflags.h>
#include <vital/noncopyable.h>
#include <memory>
#include <sstream>
namespace kwiver {
namespace vital {
// ----------------------------------------------------------------------------
/// @brief Vital plugin manager.
///
/// This class is the main plugin manager for all kwiver components.
///
/// Behaves as a decorator for plugin_loader
class VITAL_VPM_EXPORT plugin_manager
: private kwiver::vital::noncopyable
{
public:
typedef std::string module_t; // module name type
enum class plugin_type
{
PROCESSES = 0x0001,
ALGORITHMS = 0x0002,
APPLETS = 0x0004,
EXPLORER = 0x0008,
OTHERS = 0x0020,
LEGACY = 0x0040,
DEFAULT = 0x00f7,
ALL = 0xffff
};
KWIVER_DECLARE_BITFLAGS( plugin_types, plugin_type );
static plugin_manager& instance(); // singleton interface
/// @brief Load all reachable plugins.
///
/// This method loads all plugins that can be discovered on the
/// currently active search path. This method is called after all
/// search paths have been added with the add_search_path() method.
///
/// The first call to this method will load all known
/// plugins. Subsequent calls will not load anything. If the plugins
/// need to be reloaded, call the reload_plugins() method. if an
/// additional directory list must be scanned after plugins are
/// loaded, call load_plugins() with a list of directories to add
/// more plugins to the manager.
///
/// @throws plugin_already_exists - if a duplicate plugin is detected
void load_all_plugins( plugin_types types = plugin_type::DEFAULT );
/// @brief Load plugins from list of directories.
///
/// Load plugins from the specified list of directories. The
/// directories are scanned immediately and all recognized plugins
/// are loaded.
///
/// @param dirpath List of directories to search.
///
/// @throws plugin_already_exists - if a duplicate plugin is detected
void load_plugins( path_list_t const& dirpath );
/// @brief Add an additional directories to search for plugins in.
///
/// This method adds the specified directory list to the end of the
/// internal path used when loading plugins. This method can be
/// called multiple times to add multiple sets of directories. Each
/// directory is separated from the next by the standard system path
/// separator character.
///
/// Single directories can be added with this method.
///
/// Call the load_plugins() method to load plugins after you have
/// added all additional directories.
///
/// Directory paths that don't exist will simply be ignored.
///
/// \param dirpath Path to the directories to add to the plugin search path.
void add_search_path(path_t const& dirpath);
/// @brief Add an additional directories to search for plugins in.
///
/// This method adds the specified directory list to the end of the
/// internal path used when loading plugins. This method can be
/// called multiple times to add multiple sets of directories.
///
/// Call the load_plugins() method to load plugins after you have
/// added all additional directories.
///
/// Directory paths that don't exist will simply be ignored.
///
/// \param dirpath Path to the directories to add to the plugin search path.
void add_search_path( path_list_t const& dirpath );
/// @brief Add factory to manager.
///
/// This method adds the specified plugin factory to the plugin
/// manager. This method is usually called from the plugin
/// registration function in the loadable module to self-register all
/// plugins in a module.
///
/// The plugin_manager takes ownership of the factory object supplied
/// and deletes it when the program terminates. Therefore the factory
/// object must be allocated from the heap and never allocated on the
/// stack.
///
/// Plugin factory objects are grouped under the interface type name,
/// so all factories that create the same interface are together.
///
/// @param fact Plugin factory object to register
///
/// @return A pointer is returned to the added factory so attributes
/// can to be added to the factory.
///
/// Example:
/// \code
/// void add_factories( plugin_loader* pm )
/// {
/// plugin_factory_handle_t fact = pm->add_factory( new foo_factory() );
/// fact->add_attribute( "file-type", "xml mit" );
/// }
/// \endcode
plugin_factory_handle_t add_factory( plugin_factory* fact );
/// @brief Get list of factories for interface type.
///
/// This method returns a list of pointer to factory methods that
/// create objects of the desired interface type.
///
/// @param type_name Type name of the interface required
///
/// @return Vector of factories. (vector may be empty)
plugin_factory_vector_t const& get_factories( std::string const& type_name );
/// @brief Get list of factories for interface type.
///
/// This method returns a list of pointer to factory methods that
/// create objects of the desired interface type.
///
/// @tparam T Type of the interface required
///
/// @return Vector of factories. (vector may be empty)
template <class T>
plugin_factory_vector_t const& get_factories()
{
return get_factories (typeid( T ).name() );
}
/// @brief Reload all plugins.
///
/// The current list of factories is deleted, all currently open
/// files are closed, and storage released. The module loading
/// process is performed using the current state of this manager.
///
/// This effectively resets the singleton.
void reload_plugins();
/// @brief Has the module been loaded
///
/// This method reports if the specified module has been loaded.
///
/// @return \b true if module has been loaded. \b false otherwise.
bool is_module_loaded( module_t const& name) const;
/// @brief Mark module as loaded.
///
/// This method adds the specified module name to the list of loaded
/// modules. The presence of a module name can be determined with the
/// is_module_loaded() method.
///
/// @param name Module to mark as loaded.
void mark_module_as_loaded( module_t const& name );
/// @brief Add path from environment variable name.
///
/// This method adds the path from the environment variable to the end
/// of the current search path.
///
/// @param env_var Name of environment variable.
void add_path_from_environment( std::string env_var);
protected:
plugin_loader* get_loader();
/// @brief Get list of files loaded.
///
/// This method returns the list of shared object file names that
/// successfully loaded.
///
/// @return List of file names.
std::vector< std::string > file_list();
/// @brief Get map of known plugins.
///
/// Get the map of all known registered plugins.
///
/// @return Map of plugins
plugin_map_t const& plugin_map();
/// @brief Get list of loaded modules
///
/// This call returns a map of loaded modules with the files they
/// were defined in.
///
/// @return Map of loaded modules.
std::map< std::string, std::string > const& module_map() const;
/// @brief Get plugin manager search path
///
/// This method returns the search path used to load algorithms.
///
/// @return vector of paths that are searched
path_list_t const& search_path() const;
plugin_manager();
~plugin_manager();
private:
/// @brief Get logger handle.
///
/// This method returns the handle for the plugin manager
/// logger. This logger can be used by the plugin module to log
/// diagnostics during the factory creation process.
///
/// @return Handle to plugin_manager logger
logger_handle_t logger();
class priv;
const std::unique_ptr< priv > m_priv;
static plugin_manager* s_instance;
}; // end class plugin_manager
// ----------------------------------------------------------------------------
/// \brief Typed implementation factory.
///
/// This struct implements a typed implementation factory. It uses the
/// \ref plugin_manager to create an instance of a class that
/// creates a specific variant of the interface type.
///
/// The list of factories that create variants for the specified
/// interface type is queried from the \ref plugin_manager. This list
/// is searched for an entry that has the desired value in the
/// specified factory attribute.
///
/// This struct is intended as a base class with derived structs
/// specifying the desired attribute name, as in \ref
/// implementation_factory_by_name.
///
/// \tparam I Interface type that is created
template <typename I>
class implementation_factory
{
public:
/// @brief CTOR
///
/// This constructor creates an implementation factory that uses a
/// specific attribute to chose the factory object. The name of the
/// attribute is supplied in this call is used as the key field. The
/// create() selects the factory which has a specific value in this
/// field.
///
/// @param attr Name of attribute to use as key field.
implementation_factory( std::string const& attr)
: m_attr( attr)
{ }
/// @brief Find object factory based on attribute value.
///
/// @param attr Attribute value string.
///
/// @return Address of the factory object for the templated type with
/// the specified attribute value.
///
/// @throws kwiver::vital::plugin_factory_not_found
plugin_factory_handle_t find_factory( const std::string& value )
{
// Get singleton plugin manager
kwiver::vital::plugin_manager& pm = kwiver::vital::plugin_manager::instance();
auto fact_list = pm.get_factories( typeid( I ).name() );
// Scan fact_list for CONCRETE_TYPE
for( kwiver::vital::plugin_factory_handle_t a_fact : fact_list )
{
std::string attr_val;
if ( a_fact->get_attribute( m_attr, attr_val ) && ( attr_val == value ) )
{
return a_fact;
}
} // end foreach
std::stringstream str;
str << "Could not find factory where attr \"" << m_attr << "\" is \"" << value
<< "\" for interface type \"" << demangle( typeid(I).name() )
<< "\"";
VITAL_THROW( kwiver::vital::plugin_factory_not_found, str.str() );
}
/// @brief Create object based on attribute value.
///
/// The list of factories which create the interface type I is
/// scanned for an entry which contains the supplied value in the
/// attribute field. When one is found, that factory is used to
/// create a new object. An exception is thrown if the attribute
/// field is not present or no factory has the requested value.
///
/// @param value Attribute value.
///
/// @return Pointer to new object of type I.
///
/// @throws kwiver::vital::plugin_factory_not_found
I* create( const std::string& value )
{
plugin_factory_handle_t a_fact = this->find_factory( value );
return a_fact->create_object<I>();
}
private:
// member data
std::string m_attr; // Name of the attribute
};
// ----------------------------------------------------------------------------
/// @brief Implementation factory that uses name attribute.
///
/// This struct provides a common implementation for creating objects
/// of a specific type based on the "name" attribute.
///
/// Example usage:
/// \code
/// // create name for factory to create specific interface object.
/// typedef kwiver::vital::implementation_factory_by_name< sprokit::process_instrumentation > instrumentation_factory;
///
/// // instantiate factory class when needed.
/// instrumentation_factory ifact;
/// auto instr = ifact.create( provider );
/// \endcode
///
/// \throws plugin_factory_not_found
template <typename T>
class implementation_factory_by_name
: public implementation_factory< T >
{
public:
implementation_factory_by_name()
: implementation_factory<T>( kwiver::vital::plugin_factory::PLUGIN_NAME )
{ }
};
KWIVER_DECLARE_OPERATORS_FOR_BITFLAGS( plugin_manager::plugin_types )
} } // end namespace
#endif // KWIVER_VITAL_PLUGIN_MANAGER_H
|
JesperKrogh/nbgallery
|
app/mailers/subscription_mailer.rb
|
<reponame>JesperKrogh/nbgallery
# Send mail to users for notebook actions
class SubscriptionMailer < ApplicationMailer
# Daily subscription email
def daily_subscription_email(user_id, url)
@user_id = user_id
@url = url
mail(to: User.find(@user_id).email,
subject: "NBGallery Subscriptions - #{Time.now.strftime('%A, %B %d, %Y')}") do |format|
format.html {render 'daily_subscription_email'}
format.text {render 'daily_subscription_email'}
end
end
end
|
MrBellamonte/MT-VAEs-TDA
|
scripts/ssc/models/TopoAE_ext/config_libraries/euler_configs/swissroll.py
|
<filename>scripts/ssc/models/TopoAE_ext/config_libraries/euler_configs/swissroll.py<gh_stars>0
import itertools
import numpy as np
from scripts.ssc.models.TopoAE_ext.config_libraries.euler_configs.euler_wc_offline_configs.swissroll_nonoise import \
(
SWISSROLL_NONOISE3288, SWISSROLL_NONOISE_all, SWISSROLL_NONOISE_h1, SWISSROLL_NONOISE_h2)
from src.datasets.datasets import SwissRoll
from src.evaluation.config import ConfigEval
from src.models.WitnessComplexAE.config import ConfigGrid_WCAE
from src.models.autoencoder.autoencoders import Autoencoder_MLP_topoae
seeds = [36, 3851, 2570, 4304, 1935, 7954, 5095, 5310, 1577, 3288]
seeds_h1 = [36, 3851, 2570, 4304, 1935]
seeds_h2 = [7954, 5095, 5310, 1577, 3288]
bs = [64,128,256,512]
bs_all = len(seeds)*bs
bs_all_h1 = len(seeds_h1)*bs
bs_all_h2 = len(seeds_h2)*bs
seeds_all = np.repeat(seeds,4)
seeds_h1_all = np.repeat(seeds_h1,4)
seeds_h2_all = np.repeat(seeds_h2,4)
swissroll_h1 = [ConfigGrid_WCAE(
learning_rate=[1/10, 1/100, 1/1000],
batch_size=[int(bs)],
n_epochs=[1000],
weight_decay=[1e-6],
early_stopping=[50],
rec_loss_weight=[1],
top_loss_weight=[int(i) for i in np.logspace(9, 13, num=5, base=2.0)],
match_edges=['push_active'],
k=[1,2,3,4,5,6],
r_max=[10],
model_class=[Autoencoder_MLP_topoae],
model_kwargs={
'input_dim' : [3],
'latent_dim' : [2],
'size_hidden_layers': [[32, 32]]
},
dataset=[SwissRoll()],
sampling_kwargs={
'n_samples': [2560]
},
eval=[ConfigEval(
active=True,
evaluate_on='test',
eval_manifold=True,
save_eval_latent=True,
save_train_latent=True,
online_visualization=False,
k_min=15,
k_max=45,
k_step=15,
)],
uid=[''],
toposig_kwargs=[dict()],
method_args=dict(n_jobs=[1], normalize=[True], mu_push=[1,1.05,1.1,1.15,1.2,1.25], online_wc=[True], wc_offline = [dict_wc]),
experiment_dir='/cluster/scratch/schsimo/output/WCAE_swissroll_nonoise_FINAL',
seed=int(seed),
device='cpu',
num_threads=1,
verbose=False,
) for seed, bs, dict_wc in zip(seeds_h1_all, bs_all_h1, SWISSROLL_NONOISE_h1)]
swissroll_h22 = [ConfigGrid_WCAE(
learning_rate=[1/10, 1/100, 1/1000],
batch_size=[int(bs)],
n_epochs=[1000],
weight_decay=[1e-6],
early_stopping=[50],
rec_loss_weight=[1],
top_loss_weight=[int(i) for i in np.logspace(9, 13, num=5, base=2.0)],
match_edges=['push_active'],
k=[1,2,3,4,5,6],
r_max=[10],
model_class=[Autoencoder_MLP_topoae],
model_kwargs={
'input_dim' : [3],
'latent_dim' : [2],
'size_hidden_layers': [[32, 32]]
},
dataset=[SwissRoll()],
sampling_kwargs={
'n_samples': [2560]
},
eval=[ConfigEval(
active=True,
evaluate_on='test',
eval_manifold=True,
save_eval_latent=True,
save_train_latent=True,
online_visualization=False,
k_min=15,
k_max=45,
k_step=15,
)],
uid=[''],
toposig_kwargs=[dict()],
method_args=dict(n_jobs=[1], normalize=[True], mu_push=[1,1.05,1.1,1.15,1.2,1.25], online_wc=[True], wc_offline = [dict_wc]),
experiment_dir='/cluster/scratch/schsimo/output/WCAE_swissroll_nonoise_FINAL',
seed=int(seed),
device='cpu',
num_threads=1,
verbose=False,
) for seed, bs, dict_wc in zip(seeds_h2_all, bs_all_h2, SWISSROLL_NONOISE_h2)]
swissroll_h22_list = list(itertools.chain(*[config_grid.configs_from_grid() for config_grid in swissroll_h22]))
swissroll_h11_list = list(itertools.chain(*[config_grid.configs_from_grid() for config_grid in swissroll_h1]))
swissroll_h1122_list = swissroll_h11_list + swissroll_h22_list
|
windyziheng/ExCamera
|
libexcamera/src/main/java/com/serenegiant/usb/config/base/UVCAutoConfig.java
|
<reponame>windyziheng/ExCamera
package com.serenegiant.usb.config.base;
public abstract class UVCAutoConfig extends UVCConfig {
protected boolean def;
protected boolean isAuto;
private boolean isInit = false;
protected UVCAutoConfig(String tag) {
super(tag);
}
public boolean getDef() {
return def;
}
public void setDef(boolean def) {
this.def = def;
if (!isInit) {
isAuto = def;
isInit = true;
}
}
public boolean isAuto() {
return isAuto;
}
public void setAuto(boolean auto) {
isAuto = auto;
}
@Override
public String toString() {
return "Config <" + tag + "> : flag = " + flag + " , isEnable = " + isEnable + " , isAuto = " + isAuto;
}
}
|
CurieBSP/zephyr
|
boards/quark_se_crb/pinmux.c
|
<filename>boards/quark_se_crb/pinmux.c
/* pinmux.c - general pinmux operation */
/*
* Copyright (c) 2015-2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <nanokernel.h>
#include <device.h>
#include <init.h>
#include <pinmux.h>
#include <sys_io.h>
#include "pinmux/pinmux.h"
#include "quark_se_pinmux_common.h"
#define PINMUX_SELECT_OFFSET 0x30
#define PINMUX_SELECT_REGISTER(base, reg_offset) \
(base + PINMUX_SELECT_OFFSET + (reg_offset << 2))
/*
* A little decyphering of what is going on here:
*
* Each pinmux register rperesents a bank of 16 pins, 2 bits per pin for a total
* of four possible settings per pin.
*
* The first argument to the macro is name of the uint32_t's that is being used
* to contain the bit patterns for all the configuration registers. The pin
* number divided by 16 selects the correct register bank based on the pin
* number.
*
* The pin number % 16 * 2 selects the position within the register bank for the
* bits controlling the pin.
*
* All but the lower two bits of the config values are masked off to ensure
* that we don't inadvertently affect other pins in the register bank.
*/
#define PIN_CONFIG(A, _pin, _func) \
(A[((_pin) / 16)] |= ((0x3 & (_func)) << (((_pin) % 16) * 2)))
/*
* This is the full pinmap that we have available on the board for configuration
* including the ball position and the various modes that can be set. In the
* _pinmux_defaults we do not spend any time setting values that are using mode
* A as the hardware brings up all devices by default in mode A.
*/
/* pin, ball, mode A, mode B, mode C */
/* 0 F02, gpio_0, ain_0, spi_s_cs */
/* 1 G04, gpio_1, ain_1, spi_s_miso */
/* 2 H05, gpio_2, ain_2, spi_s_sck */
/* 3 J06, gpio_3, ain_3, spi_s_mosi */
/* 4 K06, gpio_4, ain_4, NA */
/* 5 L06, gpio_5, ain_5, NA */
/* 6 H04, gpio_6, ain_6, NA */
/* 7 G03, gpio_7, ain_7, NA */
/* 8 L05, gpio_ss_0, ain_8, uart1_cts */
/* 9 M05, gpio_ss_1, ain_9, uart1_rts */
/* 10 K05, gpio_ss_2, ain_10 */ /* AD0 */
/* 11 G01, gpio_ss_3, ain_11 */ /* AD1 */
/* 12 J04, gpio_ss_4, ain_12 */ /* AD2 */
/* 13 G02, gpio_ss_5, ain_13 */ /* AD3 */
/* 14 F01, gpio_ss_6, ain_14 */ /* AD4 */
/* 15 J05, gpio_ss_7, ain_15 */ /* AD5 */
/* 16 L04, gpio_ss_8, ain_16, uart1_txd */ /* IO1 */
/* 17 M04, gpio_ss_9, ain_17, uart1_rxd */ /* IO0 */
/* 18 K04, uart0_rx, ain_18, NA */
/* 19 B02, uart0_tx, gpio_31, NA */
/* 20 C01, i2c0_scl, NA, NA */
/* 21 C02, i2c0_sda, NA, NA */
/* 22 D01, i2c1_scl, NA, NA */
/* 23 D02, i2c1_sda, NA, NA */
/* 24 E01, i2c0_ss_sda, NA, NA */
/* 25 E02, i2c0_ss_scl, NA, NA */
/* 26 B03, i2c1_ss_sda, NA, NA */
/* 27 A03, i2c1_ss_scl, NA, NA */
/* 28 C03, spi0_ss_miso, NA, NA */
/* 29 E03, spi0_ss_mosi, NA, NA */
/* 30 D03, spi0_ss_sck, NA, NA */
/* 31 D04, spi0_ss_cs0, NA, NA */
/* 32 C04, spi0_ss_cs1, NA, NA */
/* 33 B04, spi0_ss_cs2, gpio_29, NA */
/* 34 A04, spi0_ss_cs3, gpio_30, NA */
/* 35 B05, spi1_ss_miso, NA, NA */
/* 36 C05, spi1_ss_mosi, NA, NA */
/* 37 D05, spi1_ss_sck, NA, NA */
/* 38 E05, spi1_ss_cs0, NA, NA */
/* 39 E04, spi1_ss_cs1, NA, NA */
/* 40 A06, spi1_ss_cs2, uart0_cts, NA */
/* 41 B06, spi1_ss_cs3, uart0_rts, NA */
/* 42 C06, gpio_8, spi1_m_sck, NA */ /* IO13 */
/* 43 D06, gpio_9, spi1_m_miso, NA */ /* IO12 */
/* 44 E06, gpio_10, spi1_m_mosi, NA */ /* IO11 */
/* 45 D07, gpio_11, spi1_m_cs0, NA */ /* IO10 */
/* 46 C07, gpio_12, spi1_m_cs1, NA */
/* 47 B07, gpio_13, spi1_m_cs2, NA */
/* 48 A07, gpio_14, spi1_m_cs3, NA */
/* 49 B08, gpio_15, i2s_rxd, NA */ /* IO5 */
/* 50 A08, gpio_16, i2s_rscki, NA */ /* IO8 */
/* 51 B09, gpio_17, i2s_rws, NA */ /* IO3 */
/* 52 A09, gpio_18, i2s_tsck, NA */ /* IO2 */
/* 53 C09, gpio_19, i2s_twsi, NA */ /* IO4 */
/* 54 D09, gpio_20, i2s_txd, NA */ /* IO7 */
/* 55 D08, gpio_21, spi0_m_sck, NA */
/* 56 E07, gpio_22, spi0_m_miso, NA */
/* 57 E09, gpio_23, spi0_m_mosi, NA */
/* 58 E08, gpio_24, spi0_m_cs0, NA */
/* 59 A10, gpio_25, spi0_m_cs1, NA */
/* 60 B10, gpio_26, spi0_m_cs2, NA */
/* 61 C10, gpio_27, spi0_m_cs3, NA */
/* 62 D10, gpio_28, NA, NA */
/* 63 E10, gpio_ss_10, pwm_0, NA */ /* IO3 */
/* 64 D11, gpio_ss_11, pwm_1, NA */ /* IO5 */
/* 65 C11, gpio_ss_12, pwm_2, NA */ /* IO6 */
/* 66 B11, gpio_ss_13, pwm_3, NA */ /* IO9 */
/* 67 D12, gpio_ss_14, clkout_32khz, NA */
/* 68 C12, gpio_ss_15, clkout_16mhz, NA */
static uint32_t mux_config[PINMUX_MAX_REGISTERS] = { 0, 0, 0, 0, 0};
struct pinmux_config board_pmux = {
.base_address = CONFIG_PINMUX_BASE,
};
int pinmux_initialize(struct device *port)
{
int i=0;
quark_se_pinmux_initialize_common(port, mux_config);
PIN_CONFIG(mux_config, 10, PINMUX_FUNC_B);
PIN_CONFIG(mux_config, 42, PINMUX_FUNC_B);
PIN_CONFIG(mux_config, 43, PINMUX_FUNC_B);
PIN_CONFIG(mux_config, 44, PINMUX_FUNC_B);
PIN_CONFIG(mux_config, 46, PINMUX_FUNC_B);
PIN_CONFIG(mux_config, 47, PINMUX_FUNC_B);
PIN_CONFIG(mux_config, 55, PINMUX_FUNC_B);
PIN_CONFIG(mux_config, 56, PINMUX_FUNC_B);
PIN_CONFIG(mux_config, 57, PINMUX_FUNC_B);
for (i = 0; i < PINMUX_MAX_REGISTERS; i++) {
sys_write32(mux_config[i], PINMUX_SELECT_REGISTER(board_pmux.base_address, i));
}
return DEV_OK;
}
DEVICE_INIT(pmux, /* config name */
PINMUX_NAME, /* driver name */
&pinmux_initialize, /* init function */
NULL,
&board_pmux, /* config options*/
SECONDARY,
CONFIG_KERNEL_INIT_PRIORITY_DEFAULT);
|
BristicWang95/azure-iot-sdk-java
|
provisioning/provisioning-service-client/src/main/java/com/microsoft/azure/sdk/iot/provisioning/service/configs/CustomAllocationDefinition.java
|
<filename>provisioning/provisioning-service-client/src/main/java/com/microsoft/azure/sdk/iot/provisioning/service/configs/CustomAllocationDefinition.java
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
package com.microsoft.azure.sdk.iot.provisioning.service.configs;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import lombok.Getter;
import lombok.Setter;
import java.io.Serializable;
public class CustomAllocationDefinition implements Serializable
{
// the webhook url for allocation requests
private static final String WEBHOOK_URL_TAG = "webhookUrl";
@Expose
@SerializedName(WEBHOOK_URL_TAG)
@Getter
@Setter
private String webhookUrl;
// the API version of the provisioning service types (such as IndividualEnrollment) sent in the custom allocation request.
private static final String API_VERSION_TAG = "apiVersion";
@Expose
@SerializedName(API_VERSION_TAG)
@Getter
@Setter
private String apiVersion;
}
|
Gundul42/minishell
|
src/ms_execute.c
|
<reponame>Gundul42/minishell
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ms_execute.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: dmylonas <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2021/10/27 10:57:00 by graja #+# #+# */
/* Updated: 2021/11/30 15:35:34 by graja ### ########.fr */
/* */
/* ************************************************************************** */
#include "../header/minishell.h"
char **get_argv(t_split *data, char *name)
{
int i;
int max;
char **argv;
max = 0;
while (data->tokens[max] != NULL)
max++;
max++;
argv = ft_calloc(max + 1, sizeof(char *));
if (!argv)
return (NULL);
argv[0] = ft_strdup(name);
i = 1;
while (data->tokens[i] != NULL)
{
if (ft_strlen(data->tokens[i]))
argv[i] = ft_strdup(data->tokens[i]);
else
argv[i] = ft_calloc(1, sizeof(char));
i++;
}
argv[i] = NULL;
return (argv);
}
static
char *ms_is_path(t_list **head)
{
char *path;
path = ms_getenv(*head, "PATH");
if (path)
return (path);
else
return ("./");
}
static
void ms_run_prog(t_list **head, t_split *data)
{
char *name;
int status;
pid_t pid;
pid = 0;
status = 0;
name = NULL;
name = ms_file_exists(data->tokens[0], ms_is_path(head), 0);
if (name)
{
pid = fork();
if (!pid)
pipe_exec(name, head, data);
}
else
ms_c_error(head, data->tokens[0], ": command not found", 127);
waitpid(pid, &status, 0);
if (status && name)
ms_print_error(head, NULL, errno);
else if (name)
ms_print_error(head, NULL, 0);
close_one_pipe(data);
free(name);
}
static
int ms_builtin(t_split *data, t_list **head, t_list **lsthead)
{
int len;
int blt;
len = ft_strlen(data->tokens[0]);
blt = chk_builtin(data, len);
if (blt)
return (built_exec(data, head, len, lsthead));
else if (!blt && len)
ms_run_prog(head, data);
return (0);
}
int ms_execute(t_list **head, t_list **lsthead)
{
t_split *content;
int err;
err = 0;
signal_for_blocking_cmd();
if (!head || !lsthead)
return (0);
if (ft_lstsize(*lsthead) > 1)
err = err | init_pipes(lsthead);
while (*lsthead && err >= 0)
{
content = (t_split *)((*lsthead)->content);
err = ms_redirect(head, content);
if (!err)
err = err | ms_builtin(content, head, lsthead);
err = err | ms_close_redir(content);
ms_delfirst_entry(lsthead);
}
return (err);
}
|
minux/go.sparc64
|
src/cmd/compile/internal/ssa/nilcheck.go
|
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
// TODO: return value from newobject/newarray is non-nil.
// nilcheckelim eliminates unnecessary nil checks.
func nilcheckelim(f *Func) {
// A nil check is redundant if the same nil check was successful in a
// dominating block. The efficacy of this pass depends heavily on the
// efficacy of the cse pass.
idom := dominators(f)
domTree := make([][]*Block, f.NumBlocks())
// Create a block ID -> [dominees] mapping
for _, b := range f.Blocks {
if dom := idom[b.ID]; dom != nil {
domTree[dom.ID] = append(domTree[dom.ID], b)
}
}
// TODO: Eliminate more nil checks.
// We can recursively remove any chain of fixed offset calculations,
// i.e. struct fields and array elements, even with non-constant
// indices: x is non-nil iff x.a.b[i].c is.
type walkState int
const (
Work walkState = iota // clear nil check if we should and traverse to dominees regardless
RecPtr // record the pointer as being nil checked
ClearPtr
)
type bp struct {
block *Block // block, or nil in RecPtr/ClearPtr state
ptr *Value // if non-nil, ptr that is to be set/cleared in RecPtr/ClearPtr state
op walkState
}
work := make([]bp, 0, 256)
work = append(work, bp{block: f.Entry})
// map from value ID to bool indicating if value is known to be non-nil
// in the current dominator path being walked. This slice is updated by
// walkStates to maintain the known non-nil values.
nonNilValues := make([]bool, f.NumValues())
// make an initial pass identifying any non-nil values
for _, b := range f.Blocks {
// a value resulting from taking the address of a
// value, or a value constructed from an offset of a
// non-nil ptr (OpAddPtr) implies it is non-nil
for _, v := range b.Values {
if v.Op == OpAddr || v.Op == OpAddPtr {
nonNilValues[v.ID] = true
} else if v.Op == OpPhi {
// phis whose arguments are all non-nil
// are non-nil
argsNonNil := true
for _, a := range v.Args {
if !nonNilValues[a.ID] {
argsNonNil = false
}
}
if argsNonNil {
nonNilValues[v.ID] = true
}
}
}
}
// perform a depth first walk of the dominee tree
for len(work) > 0 {
node := work[len(work)-1]
work = work[:len(work)-1]
switch node.op {
case Work:
checked := checkedptr(node.block) // ptr being checked for nil/non-nil
nonnil := nonnilptr(node.block) // ptr that is non-nil due to this blocks pred
if checked != nil {
// already have a nilcheck in the dominator path, or this block is a success
// block for the same value it is checking
if nonNilValues[checked.ID] || checked == nonnil {
// Eliminate the nil check.
// The deadcode pass will remove vestigial values,
// and the fuse pass will join this block with its successor.
// Logging in the style of the former compiler -- and omit line 1,
// which is usually in generated code.
if f.Config.Debug_checknil() && int(node.block.Control.Line) > 1 {
f.Config.Warnl(int(node.block.Control.Line), "removed nil check")
}
switch node.block.Kind {
case BlockIf:
node.block.Kind = BlockFirst
node.block.Control = nil
case BlockCheck:
node.block.Kind = BlockPlain
node.block.Control = nil
default:
f.Fatalf("bad block kind in nilcheck %s", node.block.Kind)
}
}
}
if nonnil != nil && !nonNilValues[nonnil.ID] {
// this is a new nilcheck so add a ClearPtr node to clear the
// ptr from the map of nil checks once we traverse
// back up the tree
work = append(work, bp{op: ClearPtr, ptr: nonnil})
}
// add all dominated blocks to the work list
for _, w := range domTree[node.block.ID] {
work = append(work, bp{block: w})
}
if nonnil != nil && !nonNilValues[nonnil.ID] {
work = append(work, bp{op: RecPtr, ptr: nonnil})
}
case RecPtr:
nonNilValues[node.ptr.ID] = true
continue
case ClearPtr:
nonNilValues[node.ptr.ID] = false
continue
}
}
}
// checkedptr returns the Value, if any,
// that is used in a nil check in b's Control op.
func checkedptr(b *Block) *Value {
if b.Kind == BlockCheck {
return b.Control.Args[0]
}
if b.Kind == BlockIf && b.Control.Op == OpIsNonNil {
return b.Control.Args[0]
}
return nil
}
// nonnilptr returns the Value, if any,
// that is non-nil due to b being the successor block
// of an OpIsNonNil or OpNilCheck block for the value and having a single
// predecessor.
func nonnilptr(b *Block) *Value {
if len(b.Preds) == 1 {
bp := b.Preds[0]
if bp.Kind == BlockCheck {
return bp.Control.Args[0]
}
if bp.Kind == BlockIf && bp.Control.Op == OpIsNonNil && bp.Succs[0] == b {
return bp.Control.Args[0]
}
}
return nil
}
|
kentfrazier/Exhibitionist
|
Examples/pandas/handlers.py
|
<filename>Examples/pandas/handlers.py<gh_stars>1-10
# -*- coding: utf-8 -*-
from __future__ import print_function
import codecs
import os
import logging
from exhibitionist.toolbox import ( getLogger, http_handler,
JSONRequestHandler, StaticFileHandler,
HTTPError)
context = None # eliminate tooling "symbol not found"
logger = getLogger(__name__)
from tornado.template import Template
@http_handler(r'/pandas/df/{{objid}}$', view_name="dfView")
class GetDataFrameView(JSONRequestHandler):
def prepare(self):
tmpl_file = os.path.join(self.get_template_path(),"jqgrid_view.html")
if not(os.path.isdir(self.get_template_path())):
self.set_status(500)
self.finish("Template path does not exist")
return
with codecs.open(tmpl_file) as f:
self.tmpl = Template(f.read())
def get(self, objid):
import pandas as pd
# by default the object is placed in self.object
if not isinstance(context.object, pd.DataFrame):
raise (HTTPError(500, "Object exists, but is not a dataframe"))
base = "http://{host}/pandas".format(host=self.request.host)
body = self.tmpl.generate(api_url=base,
objid=objid,
static_url=self.static_url)
self.write(body)
@http_handler(r'/pandas/(?P<noun>columns|rows)/{{objid}}$')
class jqGridPandasAjax(JSONRequestHandler):
def get(self, objid, noun):
import math
import pandas as pd
# logger.info(self.request.arguments)
def listify(o):
if not isinstance(o, (list, tuple)):
o = [o, ]
return list(o)
df = context.object # we set @http_handler(obj_attr='the_object')
if not isinstance(df, pd.DataFrame):
raise (HTTPError(500, "Object exists, but is not a dataframe"))
if len(df.columns) == 0:
cidx_nlevels = 0
else:
cidx_nlevels = 1 if not hasattr(df.columns, "levels") else len(df.columns[0])
if len(df.index) == 0:
ridx_nlevels = 0
else:
ridx_nlevels = 1 if not hasattr(df.index, "levels") else len(df.index[0])
if noun == "columns":
def mk_col(index, name, width=80, cssClass="", formatter=None, **kwds):
d = dict(index=index, name=name,
width=width, cssClass=cssClass, formatter=formatter)
d.update(kwds)
return d
if (cidx_nlevels == 0):
raise (HTTPError(500, "no columns"))
# fake multirow header for pandas Column MultiIndex as multiple lines of text
# the name field contains a list of string (possibly singleton)
# one per level of column index
if cidx_nlevels == 1:
cols = [[""] * cidx_nlevels] * ridx_nlevels + map(lambda x: [unicode(x)], list(df.columns))
else:
cols = [[""] * cidx_nlevels] * ridx_nlevels + map(lambda x: map(unicode, x), list(df.columns))
columns = [mk_col(i, name, cssClass="", is_index=i < ridx_nlevels)
for i, name in enumerate(cols)]
payload = dict(columns=columns)
self.write_json(payload)
elif noun == "rows":
# the returned json schema is forced by jqGrid
rows = int(self.get_argument("rows")) # rows per page
page = int(self.get_argument("page")) # #page requesed
offset = ((page - 1) * rows)
count = rows
payload = dict(total=int(math.ceil(len(df) // rows)), # total number of pages
page=page, # current page number
records=len(df)) # total rows in dataframe
if offset < 0 or count < 0 or offset >= len(df):
# empty response, probably shouldn't happen, try to recover
payload.update(dict(rows=[]))
logger.warn("Bad request: offset:%s count:%s" % (offset, count))
else:
count = min(count, len(df) - offset) # num rows to return
# all data gets converted to string, circumvent
# the dtypes trap. json can't serialize int64, NaNs, etc
payload.update(dict(rows=[{i: unicode(data) for i, data in
enumerate(listify(df.index[i]) + list(df.irow(i).tolist()))}
for i in range(offset, offset + count)]))
# logger.info(payload)
self.write_json(payload)
|
vberetti/tatami
|
src/main/webapp/js/app/views/vNavbar.js
|
(function(Backbone, _, Tatami){
var Navbar = Backbone.Marionette.Layout.extend({
initialize: function(){
this.$el.find('[name="search"]').typeahead(new Tatami.Search());
$(".deleteicon").hide();
var navbar = this;
$("#searchinput").keyup(function(event) {
navbar.displayHideDeleteIcon();
});
},
events: {
'click .editTatam': 'editTatam',
'submit #searchform' : 'search',
'click .deleteicon' : 'clear'
},
displayHideDeleteIcon: function(){
var input = $('[name="search"]').val();
if(input.length > 0){
$(".deleteicon").show();
} else {
$(".deleteicon").hide();
}
},
displaySearch: function(input){
$('[name="search"]').val(input);
this.displayHideDeleteIcon();
},
editTatam: function(){
Tatami.app.trigger('edit:show');
},
search: function(event){
event.preventDefault();
var input = $('[name="search"]').val();
if(input.indexOf("#") == 0){
Backbone.history.navigate('tags/' + input.substring(1, input.length), true);
} else if(input.indexOf("@") == 0){
Backbone.history.navigate('users/' + input.substring(1, input.length), true);
} else {
Backbone.history.navigate('search/' + input, true);
}
},
clear: function(event){
Backbone.history.navigate('', true);
}
});
Tatami.Views.Navbar = Navbar;
})(Backbone, _, Tatami);
|
SayanGhoshBDA/code-backup
|
java_backup/my java/tapabrata/iisscc/maxWordLine.java
|
<reponame>SayanGhoshBDA/code-backup
package iisscc;
import java.io.*;
class maxWordLine
{//declaration of class
public static int wordCount(String text)
{//defining function
int i,wc,ln;
char ch=' ';//declaring variables
text=text.trim();
ln=text.length();
wc=0;
for(i=0;i<ln;i++)
{
ch=text.charAt(i);
if(ch==' ')
{
wc++;
}
}
return wc+1;
}
public static void main(String args[])throws IOException
{//defining main method
BufferedReader br=new BufferedReader(new InputStreamReader(System.in));
String lines,tmpword,sent;
int ln,lnc,i,j,c,maxw,wc;//declaring variables
char ch= ' ';
System.out.print("\nEnter lines of text :");
lines=br.readLine();
lines=lines.trim();
ln=lines.length();
lnc=0;
for(i=0;i<ln;i++)
{
ch=lines.charAt(i);
if(ch==',' || ch=='!' || ch=='.')
lnc++;
}
String text[]=new String[lnc];//string array
c=0;
sent="";
for(i=0;i<ln;i++)
{
ch=lines.charAt(i);
if(ch==',' || ch=='!' || ch=='.')
{
text[c++]=sent;
sent="";
}
else
{
sent=sent+ch;
}
}
maxw=0;
sent="";
for(i=0;i<c;i++)
{
if(wordCount(text[i])>maxw)
{
maxw=wordCount(text[i]);
sent=text[i];
}
}
String word[]=new String[maxw];
sent=sent+" ";
ln=sent.length();
wc=0;
tmpword="";
for(i=0;i<ln;i++)
{
ch=sent.charAt(i);
if(ch==' ')
{
word[wc++]=tmpword;
tmpword="";
}
else
{
tmpword=tmpword+ch;
}
}
for(i=0;i<wc-1;i++)//sorting
{
for(j=0;j<wc-1-i;j++)
{
if(word[j+1].compareToIgnoreCase(word[j])<0)
{
tmpword=word[j];
word[j]=word[j+1];
word[j+1]=tmpword;
}
}
}
System.out.print("\nLine that contains maximum number of words in asc order\n");
for(i=0;i<wc;i++)
{
System.out.print(word[i]+" ");//displaying the line
}
}//end of main
}//end of class
|
sittercity/bureaucrat
|
lib/bureaucrat/validators/min_value.rb
|
<gh_stars>1-10
require 'bureaucrat/validators/base'
module Bureaucrat
module Validators
class MinValueValidator < BaseValidator
def message
I18n.t('bureaucrat.default_errors.validators.min_value_validator', limit_value: @formatted_value)
end
def code
:min_value
end
def compare(a, b)
a < b
end
end
end
end
|
LoserPanda/plantclient
|
src/components/landingpage/bootstrapcomponents/TableHead.js
|
import React from 'react';
import PropTypes from 'prop-types';
import classNames from 'classnames';
const TableHead = (props) => {
const {
children,
color,
columns,
textWhite,
...attributes
} = props;
const classes = classNames(
(color !== 'dark' && color !== 'light') ? color : `thead-${color}`,
{
'text-white' : textWhite
}
);
return (
<thead {...attributes} className={classes}>
{
columns &&
<tr>
{ columns.map(col => <th key={col.field} className={col.hasOwnProperty('minimal') ? `th-${col.minimal}` : ''}>{col.label}</th>) }
</tr>
}
{children}
</thead>
);
};
TableHead.propTypes = {
children: PropTypes.node,
color: PropTypes.string,
columns: PropTypes.arrayOf(PropTypes.object),
textWhite: PropTypes.bool
};
TableHead.defaultProps = {
textWhite: false
};
export default TableHead;
export { TableHead as MDBTableHead };
|
kobezzza/Collection
|
dist/node/iterators/remove.js
|
'use strict';
/*!
* Collection
* https://github.com/kobezzza/Collection
*
* Released under the MIT license
* https://github.com/kobezzza/Collection/blob/master/LICENSE
*/
var _core = require("../core");
var _types = require("../helpers/types");
var _link = require("../helpers/link");
var _gcc = require("../helpers/gcc");
/**
* Removes elements from the collection by the specified condition/link
*
* @see Collection.prototype.forEach
* @param {($$CollectionFilter|$$CollectionBase|$$CollectionLink)=} [opt_filter] - link, function filter or an array of functions
* @param {?$$CollectionBase=} [opt_params] - additional parameters
* @return {($$CollectionReport|!Promise<$$CollectionReport>)}
*/
_core.Collection.prototype.remove = function (opt_filter, opt_params) {
let p = opt_params || {};
if (!(0, _types.isFunction)(opt_filter) && ((0, _types.isArray)(opt_filter) && !(0, _types.isFunction)(opt_filter[1]) || opt_filter != null && typeof opt_filter !== 'object')) {
return (0, _link.byLink)(this.data, opt_filter, {
delete: true
});
}
if (!(0, _types.isArray)(opt_filter) && !(0, _types.isFunction)(opt_filter)) {
p = opt_filter || p;
opt_filter = null;
}
this._initParams(p, opt_filter);
p = (0, _gcc.any)(Object.assign(Object.create(this.p), p));
const isRealArray = p.type === 'array' && (0, _types.isArray)(this.data);
if (_types.iterators[p.type]) {
throw new TypeError('Incorrect data type');
}
const mult = p.mult !== false,
res = [];
if (mult) {
p.result = res;
} else {
p.result = {
notFound: true,
result: false,
key: undefined,
value: undefined
};
}
let fn;
switch (p.type) {
case 'map':
fn = (value, key, data) => {
data.delete(key);
const o = {
result: !data.has(key),
key,
value
};
if (mult) {
res.push(o);
} else {
p.result = o;
}
};
break;
case 'set':
fn = (value, key, data) => {
data.delete(value);
const o = {
result: !data.has(value),
key: null,
value
};
if (mult) {
res.push(o);
} else {
p.result = o;
}
};
break;
case 'array':
if (p.reverse) {
fn = (value, key, data) => {
if (isRealArray) {
data.splice(key, 1);
} else {
_link.splice.call(data, key, 1);
}
const o = {
result: data[key] !== value,
key,
value
};
if (mult) {
res.push(o);
} else {
p.result = o;
}
};
} else {
let rm = 0;
if (p.live) {
fn = (value, key, data, ctx) => {
if (isRealArray) {
data.splice(key, 1);
} else {
_link.splice.call(data, key, 1);
}
ctx.i(-1);
const o = {
result: data[key] !== value,
key: key + rm,
value
};
if (mult) {
res.push(o);
} else {
p.result = o;
}
rm++;
};
} else {
fn = (value, key, data, ctx) => {
const ln = ctx.length();
const f = length => {
if (isRealArray) {
data.splice(key, 1);
} else {
_link.splice.call(data, key, 1);
}
ctx.i(-1);
const o = {
result: data[key] !== value,
key: key + rm,
value
};
if (mult) {
res.push(o);
} else {
p.result = o;
}
if (++rm === length) {
return ctx.break;
}
};
if ((0, _types.isNumber)(ln)) {
f(ln);
} else {
return ctx.wait(ln).then(f);
}
};
}
}
break;
default:
fn = (value, key, data) => {
delete data[key];
const o = {
result: key in data === false,
key,
value
};
if (mult) {
res.push(o);
} else {
p.result = o;
}
};
}
const returnVal = (0, _gcc.any)(this.forEach((0, _gcc.any)(fn), p));
if (returnVal !== this) {
return returnVal;
}
return p.result;
};
|
vvelikodny/aws-sdk-go-v2
|
service/alexaforbusiness/api_op_ListGatewayGroups.go
|
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package alexaforbusiness
import (
"context"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/internal/awsutil"
)
type ListGatewayGroupsInput struct {
_ struct{} `type:"structure"`
// The maximum number of gateway group summaries to return. The default is 50.
MaxResults *int64 `min:"1" type:"integer"`
// The token used to paginate though multiple pages of gateway group summaries.
NextToken *string `min:"1" type:"string"`
}
// String returns the string representation
func (s ListGatewayGroupsInput) String() string {
return awsutil.Prettify(s)
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *ListGatewayGroupsInput) Validate() error {
invalidParams := aws.ErrInvalidParams{Context: "ListGatewayGroupsInput"}
if s.MaxResults != nil && *s.MaxResults < 1 {
invalidParams.Add(aws.NewErrParamMinValue("MaxResults", 1))
}
if s.NextToken != nil && len(*s.NextToken) < 1 {
invalidParams.Add(aws.NewErrParamMinLen("NextToken", 1))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
type ListGatewayGroupsOutput struct {
_ struct{} `type:"structure"`
// The gateway groups in the list.
GatewayGroups []GatewayGroupSummary `type:"list"`
// The token used to paginate though multiple pages of gateway group summaries.
NextToken *string `min:"1" type:"string"`
}
// String returns the string representation
func (s ListGatewayGroupsOutput) String() string {
return awsutil.Prettify(s)
}
const opListGatewayGroups = "ListGatewayGroups"
// ListGatewayGroupsRequest returns a request value for making API operation for
// Alexa For Business.
//
// Retrieves a list of gateway group summaries. Use GetGatewayGroup to retrieve
// details of a specific gateway group.
//
// // Example sending a request using ListGatewayGroupsRequest.
// req := client.ListGatewayGroupsRequest(params)
// resp, err := req.Send(context.TODO())
// if err == nil {
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/alexaforbusiness-2017-11-09/ListGatewayGroups
func (c *Client) ListGatewayGroupsRequest(input *ListGatewayGroupsInput) ListGatewayGroupsRequest {
op := &aws.Operation{
Name: opListGatewayGroups,
HTTPMethod: "POST",
HTTPPath: "/",
Paginator: &aws.Paginator{
InputTokens: []string{"NextToken"},
OutputTokens: []string{"NextToken"},
LimitToken: "MaxResults",
TruncationToken: "",
},
}
if input == nil {
input = &ListGatewayGroupsInput{}
}
req := c.newRequest(op, input, &ListGatewayGroupsOutput{})
return ListGatewayGroupsRequest{Request: req, Input: input, Copy: c.ListGatewayGroupsRequest}
}
// ListGatewayGroupsRequest is the request type for the
// ListGatewayGroups API operation.
type ListGatewayGroupsRequest struct {
*aws.Request
Input *ListGatewayGroupsInput
Copy func(*ListGatewayGroupsInput) ListGatewayGroupsRequest
}
// Send marshals and sends the ListGatewayGroups API request.
func (r ListGatewayGroupsRequest) Send(ctx context.Context) (*ListGatewayGroupsResponse, error) {
r.Request.SetContext(ctx)
err := r.Request.Send()
if err != nil {
return nil, err
}
resp := &ListGatewayGroupsResponse{
ListGatewayGroupsOutput: r.Request.Data.(*ListGatewayGroupsOutput),
response: &aws.Response{Request: r.Request},
}
return resp, nil
}
// NewListGatewayGroupsRequestPaginator returns a paginator for ListGatewayGroups.
// Use Next method to get the next page, and CurrentPage to get the current
// response page from the paginator. Next will return false, if there are
// no more pages, or an error was encountered.
//
// Note: This operation can generate multiple requests to a service.
//
// // Example iterating over pages.
// req := client.ListGatewayGroupsRequest(input)
// p := alexaforbusiness.NewListGatewayGroupsRequestPaginator(req)
//
// for p.Next(context.TODO()) {
// page := p.CurrentPage()
// }
//
// if err := p.Err(); err != nil {
// return err
// }
//
func NewListGatewayGroupsPaginator(req ListGatewayGroupsRequest) ListGatewayGroupsPaginator {
return ListGatewayGroupsPaginator{
Pager: aws.Pager{
NewRequest: func(ctx context.Context) (*aws.Request, error) {
var inCpy *ListGatewayGroupsInput
if req.Input != nil {
tmp := *req.Input
inCpy = &tmp
}
newReq := req.Copy(inCpy)
newReq.SetContext(ctx)
return newReq.Request, nil
},
},
}
}
// ListGatewayGroupsPaginator is used to paginate the request. This can be done by
// calling Next and CurrentPage.
type ListGatewayGroupsPaginator struct {
aws.Pager
}
func (p *ListGatewayGroupsPaginator) CurrentPage() *ListGatewayGroupsOutput {
return p.Pager.CurrentPage().(*ListGatewayGroupsOutput)
}
// ListGatewayGroupsResponse is the response type for the
// ListGatewayGroups API operation.
type ListGatewayGroupsResponse struct {
*ListGatewayGroupsOutput
response *aws.Response
}
// SDKResponseMetdata returns the response metadata for the
// ListGatewayGroups request.
func (r *ListGatewayGroupsResponse) SDKResponseMetdata() *aws.Response {
return r.response
}
|
nqbinh47/Maze-Runner
|
src/Coordinate.cpp
|
<gh_stars>1-10
#include "Coordinate.hpp"
MazeCoordinate::MazeCoordinate(int x, int y) : x(x), y(y)
{
}
MazeCoordinate::MazeCoordinate()
{
}
MazeCoordinate::~MazeCoordinate()
{
}
|
xcesco/Kripton
|
kripton-core/src/main/java/com/abubusoft/kripton/common/UrlUtils.java
|
<filename>kripton-core/src/main/java/com/abubusoft/kripton/common/UrlUtils.java<gh_stars>100-1000
/*******************************************************************************
* Copyright 2015, 2017 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.abubusoft.kripton.common;
import java.net.MalformedURLException;
import java.net.URL;
/**
* The Class UrlUtils.
*/
public abstract class UrlUtils {
private UrlUtils() {
}
/**
* Write.
*
* @param value the value
* @return the string
*/
public static String write(URL value) {
if (value==null) return null;
return value.toString();
}
/**
* Convert a string to relative locale.
*
* @param string the string
* @return the url
*/
public static URL read(String value) {
if (value == null)
return null;
try {
return new URL(value);
} catch (MalformedURLException e) {
e.printStackTrace();
return null;
}
}
}
|
juanfelipe82193/opensap
|
sapui5-sdk-1.74.0/resources/sap/ushell/services/Personalization-dbg.js
|
<reponame>juanfelipe82193/opensap
// Copyright (c) 2009-2017 SAP SE, All Rights Reserved
/**
* @fileOverview The Unified Shell's personalization service, which provides generic read and write access to the currently logged on user's
* personalization settings for the app currently executed in the shell.
*
* @version 1.74.0
*/
sap.ui.define([
"sap/ushell/utils",
"sap/ushell/services/_Personalization/utils",
"sap/ushell/services/_Personalization/constants",
"sap/ushell/services/_Personalization/ContextContainer",
"sap/ushell/services/_Personalization/WindowAdapter",
"sap/ushell/services/_Personalization/TransientPersonalizer", // private
"sap/ushell/services/_Personalization/PersonalizationContainer", // private
"sap/ushell/services/_Personalization/Personalizer", // private
"sap/ushell/services/_Personalization/VariantSetAdapter",
"sap/ushell/services/_Personalization/Variant",
"sap/ushell/services/_Personalization/VariantSet",
"sap/ushell/services/_Personalization/WindowAdapterContainer",
"sap/ui/base/ManagedObject",
"sap/ui/thirdparty/jquery"
], function (
utils,
personalizationUtils,
publicConstants,
ContextContainer,
WindowAdapter,
TransientPersonalizer,
PersonalizationContainer,
Personalizer,
VariantSetAdapter,
Variant,
VariantSet,
WindowAdapterContainer,
ManagedObject,
jQuery
) {
"use strict";
/* global Uint32Array */
// TODO conditional loading
/*
* Implementation note:
*
* ITEM#<itemkey>
* VARIANTSET#<variantset>
* sap-ushell-container-scope : {}
* sap-ushell-container-
*/
/**
* This method MUST be called by the Unified Shell's container only, others
* MUST call <code>sap.ushell.Container.getService("Personalization")</code>.
* Constructs a new instance of the personalization service.
*
* @name sap.ushell.services.Personalization
* @class The Unified Shell's personalization service, which provides a personalizer object that handles all personalization operations.
* @param {object} oAdapter the service adapter for the personalization service, as already provided by the container
* @public
* @constructor
* @see sap.ushell.services.Container#getService
* @since 1.15.0
*/
function Personalization (oAdapter, oContainerInterface, sParameter, oConfig) {
this._oConfig = (oConfig && oConfig.config) || {};
this._seed = (oConfig && oConfig.config && typeof oConfig.config.seed === "string" && oConfig.config.seed) || "12345678901AFERANDOMBETTER";
// suitable random string provided by the backend, should be an unpredictable random value of cryptographic strength (see abap platform)
while (this._seed.length < 40) {
this._seed = this._seed + this._seed;
}
this._oAppVariantAdapterWithBackendAdapter = this._configureAppVariantStorage(this._oConfig.appVariantStorage);
this._oAdapterWithBackendAdapter = {
lazy: false,
instance: new WindowAdapter(this, oAdapter)
};
this._oAdapterWindowOnly = {
lazy: false,
instance: new WindowAdapter(this, undefined)
};
this._oContainerMap = new utils.Map();
// map: sPrefixedContainerKey -> promise object of getPersonalizationContainer
this._oPendingOperationsMap = new utils.Map();
// map: sContainerKey -> pending operation (deferred object, potentially extended with _sapTimeoutId, _sapFnSave)
}
Personalization.prototype.SAVE_DEFERRED_DROPPED = "Deferred save dropped (OK) - Data superseeded by subsequent save";
//constants for scope of personalization service
Personalization.prototype.constants = publicConstants;
/**
* Configures the adapter to store app variants. When app variant storage is enabled,
* personalization on app variants is handled and stored using a separate adapter.
*
* @param {object} oAppVariantStorageConfig The service configuration for app variant storage.
* @return {object} An object like:
* <pre>
* {
* lazy: true,
* create: <function>
* }
* </pre>
* Where <function> returns a promise that resolves with the app variant adapter or rejects with an error message.
* @private
*/
Personalization.prototype._configureAppVariantStorage = function (oAppVariantStorageConfig) {
var that = this,
sDefaultAppVariantAdapter = "sap.ushell.adapters.AppVariantPersonalizationAdapter";
if (!oAppVariantStorageConfig) {
// default
oAppVariantStorageConfig = { adapter: { module: sDefaultAppVariantAdapter } };
}
if (Object.keys(oAppVariantStorageConfig).length === 0 || oAppVariantStorageConfig.enabled === false) {
return;
}
var sAdapterModuleName = (oAppVariantStorageConfig.adapter && oAppVariantStorageConfig.adapter.module) || sDefaultAppVariantAdapter,
sAdapterModulePath = sAdapterModuleName.split(".").join("/");
// lazy load
var fnCreate = function () {
that._oAppVariantAdapterLoadPromise = that._oAppVariantAdapterLoadPromise || (function () {
var oDeferred = new jQuery.Deferred();
try {
sap.ui.require([sAdapterModulePath], fnRequireSuccess);
} catch (oRequireError) {
oDeferred.reject(oRequireError);
}
function fnRequireSuccess (AppVariantPersonalizationAdapter) {
try {
var oAdapter = new AppVariantPersonalizationAdapter();
var oWrappedAdapter = new WindowAdapter(that, oAdapter);
oDeferred.resolve(oWrappedAdapter);
} catch (oError) {
oDeferred.reject(oError);
}
}
return oDeferred.promise();
})();
return that._oAppVariantAdapterLoadPromise;
};
return {
lazy: true,
create: fnCreate
};
};
/**
* Returns a generated key
* This key is suitably random depending on the platform, but it is viable to brute force attacks and
* storages based on it shall not be used for sensitive data
*
* @returns {string} 40 character string consisting of A-Z and 0-9 which can be used as a generated key for personalization container.
* Every invocation returns a new key. Seed of Math.random is OS Random Seed
*
* Actual randomness of browser random is at most 2^128 -1 (Period length), way less on older platforms which is way
* below the 36^40 ~ 2^(5.16*40) =~ 2^206 suggested by the key. or 2^(196) for 38 characters used in the appstate.
*
* To support suitable randomness on platforms not having a crypto api we also mix in a seed value generated from the server
* (btw: in general using only lower bits of random values is not a good pattern)
*
* Note that the appstate is not intended for any security critical content
*
* @public
* @alias sap.ushell.services.Personalization#getGeneratedKey
* @since 1.28.0
*/
Personalization.prototype.getGeneratedKey = function () {
var sChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789",
sCharLen = sChars.length,
sResult = "",
that = this;
// to return a number between [0 ... CHARLEN-1]
var fnRandom = function (i) {
return (that._seed.charCodeAt(i) + Math.floor(Math.random() * sCharLen)) % sCharLen;
};
if (window && window.crypto && window.crypto.getRandomValues) {
var arr40 = new Uint32Array(40);
window.crypto.getRandomValues(arr40);
fnRandom = function (i) {
return (arr40[i] + that._seed.charCodeAt(i)) % sChars.length;
};
}
var getRandomAlphaNumeric = function (i) {
return sChars[fnRandom(i)];
};
while (sResult.length < 40) {
sResult += getRandomAlphaNumeric(sResult.length);
}
return sResult;
};
/**
* Returns a personalizer object which handles personalization by asynchronous operations storing
* the personalization data immediately via the connected adapter.
* For each operation a round trip is executed.
*
* Do not mix the usage of a personalizer and a personalization container for one containerKey.
*
* @param {object} oPersId JSON object consisting of the following parts:
* container - Identifies the set of personalization data that is loaded/saved as one bundle from the front-end server.
* item - The name of the object the personalization is applied to.
* @param {object} oScope - scope object<br/>
* currently the validity property of the scope object is relevant:
* oScope.validity : validity of the container persistence in minutes<br/>
* oScope.keyCategory : Type or category of key<br/>
* oScope.writeFrequency : Expected frequency how often users will use this container to store data inside<br/>
* oScope.clientStorageAllowed : Defines if storage on client side should be allowed or not<br/>
* oScope.shared: Indicates the container is intended to be shared across multiple applications<br/>
* E.g. <code> { validity : 30}</code> indicates a validity of the data for 30 minutes.
* @param {sap.ui.core.Component} oComponent Since 1.27.0.
* SAPUI5 component which uses the personalizer. This allows to associate the stored data with the application.
* @returns {object} {@link sap.ushell.services.Personalizer} which provides generic read and write access to
* the currently logged on user's personalization settings.
* @public
* @alias sap.ushell.services.Personalization#getPersonalizer
* @since 1.15.0
*/
Personalization.prototype.getPersonalizer = function (oPersId, oScope, oComponent) {
oComponent = oComponent || this._getApplicationComponent();
return new Personalizer(this, this._oAdapterWithBackendAdapter.instance, oPersId, oScope, oComponent);
};
/**
* Attempts to retrieve the component of the currently running application.
*
* @return {object} sap.ui.core.UIComponent
* @private
*/
Personalization.prototype._getApplicationComponent = function () {
var oComponent;
var sOwnerId = ManagedObject._sOwnerId;
if (sOwnerId) {
oComponent = sap.ui.getCore().getComponent(sOwnerId);
if (oComponent instanceof sap.ui.core.UIComponent) {
return oComponent;
}
}
return undefined;
};
/**
* Returns a transient personalizer object which handles personalization by asynchronous operations storing
* the personalization data transiently as an object property.
* Primary usage of the transient personalizer is a personalization scenario with variants where
* the transient personalizer is used as a buffer for table personalization data.
*
* @returns {object} {@link sap.ushell.services.TransientPersonalizer} which provides asynchronous read and write access
* to a transient personalization data storage.
* @public
* @alias sap.ushell.services.Personalization#getTransientPersonalizer
* @since 1.18.0
*/
Personalization.prototype.getTransientPersonalizer = function () {
return new TransientPersonalizer();
};
/**
* Factory method to obtain a Data Context object, which is a local copy of the persistence layer data.
* The Container data is asynchronously read on creation if present, otherwise an initial object is created.
* The Container data can then be *synchronously* modified (getItemValue, setItemValue).
* Only on invoking the save()/saveDeferred() method the data is transferred to the persistence.
* This allows the application to perform multiple local modifications and delay the save operation.
*
* Every getContainer operation returns a new local copy, containing the full data at the point of creation.
*
* Executing load() on the container reloads the data from the persistence, discarding local changes.
*
* Note that the container allows the application to control the round trips to the front-end server persistence.
* The factory method getContainer is asynchronous and loads the container via the connected adapter from the front-end server.
* All operations (but for the save operation) are executed synchronously, operating on the local data.
* This allows the application to control the round trips to the front-end server persistence.
*
* A container can contain a set of items, identified by a key.
*
* You can wrap a container in a VariantSetAdapter to read and write a more complex structure (with multiple keys (variantSet,variant,item)).
*
* Do not mix up the usage of a personalizer and a container for one containerKey.
* Do not use a PersonalizationContainer and a Container for the same key except for migration scenarios.
*
* scope / validity parameter (@since 1.22.0):
* An unspecified (undefined validity) or infinite (Infinity) validity indicates that data is persisted in the
* Personalization data of the front-end server. A round trip is executed on an initial get and at least every save operation.
* Data is stored per user and retained indefinitely at the front-end server.
*
* The validity parameter allows a designated storage validity for the created container.
* A 0 validity indicates the data is only persisted within the Fiori Launchpad window.
* No round trips to the front-end server are executed. Data is lost if the Fiori Launchpad window state is lost
* (e.g. by navigating to a different page, pressing F5 (reload page) or duplicating the window).
*
* For versions > 1.24 it may happen that for cross-app navigation a reload of the Fiori Launchpad is triggered.
* In this case a storage of the personalization data in the Fiori lauchpad window would lead to data loss.
* To overcome this a validity 0 is automatically changed to a validity 1440 (24h; storage on the front-end server).
* This is only done if a reload of the Fiori Launchpad is triggered for a cross-app navigation.
*
* Security: It is the responsibility of the application to not persist information relevant to auditing or security
* using the PersonalizationService with inappropriate validity models.
* No mechanisms exist to destroy or selectively destroy application-specific data in the front-end server persistence (especially for validity Infinity).
*
* For non-zero validity scopes, data will be transmitted and persisted in the front-end server system.
*
* For limited validity, actual deletion of data on the front-end server is subject to explicit cleanup
* execution of front-end server jobs and not guaranteed.
* The data may still be persisted and retrievable.
* The interface only assures that expired data is no longer exposed to the application code in the Fiori Launchpad.
*
* The ContainerKey uniquely defines the Container, validity is not part of the key (there are no separate namespaces per validity).
*
* In general, mixing different validity models for a given container key is not supported.
* Fast chaining of different methods may source arbitrary persistence layers.
* The validity of the resulting object in the done function of a promise is the last get validity.
*
* The validity associated with the last getContainer or createEmptyContainer determines the current
* validity of the container and the validity used during the next save operation.
*
* Naturally, if a delete or get with validity 0 is issued, it will *not* delete or retrieve a front-end server persistent storage.
* Thus a sequence delete( [validity 0])/wait for promise, getContainer(sKey,{ validity : Infinity}) may return a valid dataset.
*
* @param {string} sContainerKey - identifies the container. The string length is restricted to 40 characters
* @param {Object} oScope - scope object. Currently the validity property of the scope object is relevant:
* E.g. <code> { validity : 30}</code> indicates a validity of the data for 30 minutes.<br/>
* oScope.validity : validity of the container persistence in minutes<br/>
* valid values include:
* 0 ( per FLP Window), <br/>
* Infinity, undefined (front-end server persistence per user ) [Default] <br/>
* nn Minutes (front-end server persistence per user, ignored if older than nn minutes)
* oScope.shared To indicate that this container is intended to be shared by several applications<br/>
* @param {sap.ui.core.Component} oComponent Since 1.27.0. SAPUI5 component which uses the container.
* This allows to associate the stored data with the application.
* @returns {object} Promise object whose done function returns a {@link sap.ushell.services.Personalization.ContextContainer} object as parameter.
* The container provides setItemValue / getItemValue methods to synchronously operate on personalization data.
* By wrapping it in a VariantSetAdapter, an alternate interface to maintain variants can be obtained.
* @public
* @alias sap.ushell.services.Personalization#getContainer
* @since 1.22.0
*/
Personalization.prototype.getContainer = function (sContainerKey, oScope, oComponent) {
oComponent = oComponent || this._getApplicationComponent();
return this._createContainer(sContainerKey, oScope, false, oComponent);
};
/**
* Factory method to obtain an empty Data Context object.
* When data present in a prior context is not relevant
* (e.g. when using a "uniquely" generated key and planning to overwrite any colliding front-end server data).
*
* The call always returns an cleared container().
*
* Note that an existing container at the front-end server is not actually deleted or overwritten unless a save operation is executed.
*
* An initial object is returned.
*
* @param {string} sContainerKey - identifies the container. The string length is restricted to 40 characters
* @param {Object} oScope - scope object. Currently the validity property of the scope object is relevant:
* E.g. <code> { validity : 30}</code> indicates a validity of the data for 30 minutes.<br/>
* oScope.validity : validity of the container persistence in minutes
* valid values include:
* 0 ( per FLP Window),
* Infinity, undefined ( Backend persistence per user ) [Default]
* nn Minutes ( Backend persistence per user, ignored if older than nn minutes)
* @param {sap.ui.core.Component} oComponent Since 1.27.0. SAPUI5 component which uses the container.
* This allows to associate the stored data with the application.
* @returns {object} Promise object whose done function returns a {@link sap.ushell.services.Personalization.ContextContainer}
* object as parameter. The personalization container provides two different interfaces to synchronously operate on personalization data.
* In the item mode the container contains items as name value pairs for personalization data.
* In the variant mode the container contains variant sets which contain variants containing items.
* @public
* @alias sap.ushell.services.Personalization#createEmptyContainer
* @since 1.22.0
*/
Personalization.prototype.createEmptyContainer = function (sContainerKey, oScope, oComponent) {
oComponent = oComponent || this._getApplicationComponent();
return this._createContainer(sContainerKey, oScope, true, oComponent);
};
Personalization.prototype._createContainer = function (sContainerKey, oScope, bCreateEmpty, oComponent) {
var that = this,
bLaunchpadReload,
oTransientAdapter,
oPersistentAdapter,
oChosenAdapter,
oAdjustedScope,
sPrefixedContainerKey,
bUseAppVariantStorage,
oDeferred = new jQuery.Deferred();
if (typeof sContainerKey !== "string") {
throw new utils.Error("sContainerKey is not a string: sap.ushell.services.Personalization", " " /* Empty string for missing component information */);
}
bLaunchpadReload = personalizationUtils.isLaunchpadReload();
bUseAppVariantStorage = !!(
personalizationUtils.isAppVariant(oComponent)
&& (!oScope || !oScope.shared)
&& this._oAppVariantAdapterWithBackendAdapter
);
// -- adjust scope
oAdjustedScope = personalizationUtils.adjustScope(oScope, bLaunchpadReload);
if (bUseAppVariantStorage) {
var oManifest = oComponent.getManifestObject();
oAdjustedScope.component = oComponent;
oAdjustedScope.appVarId = oManifest.getEntry("/sap.ui5/appVariantId");
oAdjustedScope.appVersion = oManifest.getEntry("/sap.app/applicationVersion/version");
}
// -- add prefix
sPrefixedContainerKey = personalizationUtils.addContainerPrefix(sContainerKey);
// -- choose adapter
oPersistentAdapter = bUseAppVariantStorage
? this._oAppVariantAdapterWithBackendAdapter
: this._oAdapterWithBackendAdapter;
oTransientAdapter = that._oAdapterWindowOnly;
oChosenAdapter = personalizationUtils.pickAdapter(oScope, bLaunchpadReload, oTransientAdapter, oPersistentAdapter);
personalizationUtils.loadAdapter(oChosenAdapter).then(function (oLoadedAdapter) {
var oLoadPromise,
oContextContainer = new ContextContainer(that, oLoadedAdapter, sPrefixedContainerKey, oAdjustedScope, oComponent);
// Historically, a sequence getContainer / load was always called.
// If an adapter supports returning an initialized container without requiring an subsequent load, it can set the flag
// supportsGetWithoutSubsequentLoad and the load call will be omitted in case an empty container is required.
var bSupportsGetWithoutSubsequentLoad = (oLoadedAdapter && oLoadedAdapter.supportsGetWithoutSubsequentLoad === true);
if (bCreateEmpty && bSupportsGetWithoutSubsequentLoad) {
oLoadPromise = new jQuery.Deferred();
oLoadPromise.resolve(oContextContainer);
} else {
oLoadPromise = oContextContainer.load();
}
oLoadPromise.fail(function () {
oDeferred.reject();
}).done(function () {
if (bCreateEmpty || oContextContainer._isExpired()) {
oContextContainer.clear();
}
oDeferred.resolve(oContextContainer);
});
}, function (oError) {
oDeferred.reject(oError);
});
return oDeferred.promise();
};
/**
* Asynchronously starts a deletion request for the given container identified by sContainerKey.
* Can be called without having ever called getContainer with the corresponding key
*
* Note: After invoking this operation, the state of other Containers obtained for the same key is undefined!
* If you want to use the container after deletion, it is strongly recommended to obtain
* a new instance of a container for the given key *after* the promise has returned.
*
* Note: Invoking this operation while another save or load operation is under way may result in failure.
*
* @param {string} sContainerKey identifies the container
* @returns {object} promise for the deletion operation
* @public
* @alias sap.ushell.services.Personalization#delContainer
* @since 1.22.0
*/
Personalization.prototype.delContainer = function (sContainerKey, oScope) {
// delete the bag, the adapter container & the container
var oDeferred = {},
oPrior,
sPrefixedContainerKey = "",
that = this;
oScope = that._adjustScope(oScope, false /* bLaunchpadReload, keep 1.22 compatibility */);
sPrefixedContainerKey = personalizationUtils.addContainerPrefix(sContainerKey);
oDeferred = new jQuery.Deferred();
oPrior = that._pendingContainerOperations_cancelAddNext(sContainerKey, null);
oPrior.always(function () {
that.getContainer(sContainerKey, oScope) // delays to oPrior! registers a new op!
.fail(function () {
that._pendingContainerOperations_cancelAddNext(sContainerKey, oDeferred); // reinstall oPrior (!)
oDeferred.reject();
})
.done(function (/*oContainer*/) {
var oAdapter;
// install the "latest" deferred
that._pendingContainerOperations_cancelAddNext(sContainerKey, oDeferred); // the getContainer above executed a load --> no flush required
oAdapter = oScope.validity === 0
? that._oAdapterWindowOnly
: that._oAdapterWithBackendAdapter;
personalizationUtils.loadAdapter(oAdapter).then(function (oLoadedAdapter) {
oLoadedAdapter.delAdapterContainer(sPrefixedContainerKey, oScope)
.fail(function () {
oDeferred.reject();
})
.done(function () {
oDeferred.resolve();
});
}, function () {
oDeferred.reject();
});
});
});
return oDeferred.promise();
};
// return old promise, add oDeferred as new, if null, retain old!
Personalization.prototype._pendingContainerOperations_flushAddNext = function (sContainerKey, oDeferred) {
var oPendingOpDeferred,
fnSave;
oPendingOpDeferred = this._oPendingOperationsMap.get(sContainerKey);
if (!oPendingOpDeferred) {
oPendingOpDeferred = new jQuery.Deferred();
oPendingOpDeferred.resolve();
}
if (oDeferred !== null) {
this._oPendingOperationsMap.put(sContainerKey, oDeferred);
}
if (!oPendingOpDeferred || oPendingOpDeferred.state() !== "pending") {
return oPendingOpDeferred;
}
clearTimeout(oPendingOpDeferred._sapTimeoutId); //system function!
oPendingOpDeferred._sapTimeoutId = undefined;
if (typeof oPendingOpDeferred._sapFnSave === "function") {
fnSave = oPendingOpDeferred._sapFnSave;
oPendingOpDeferred._sapFnSave = undefined; // function can only be triggered at most one time
fnSave();
}
return oPendingOpDeferred;
};
Personalization.prototype._pendingContainerOperations_cancelAddNext = function (sContainerKey, oDeferred) {
var oPendingOpDeferred;
oPendingOpDeferred = this._oPendingOperationsMap.get(sContainerKey);
if (!oPendingOpDeferred) {
oPendingOpDeferred = new jQuery.Deferred();
oPendingOpDeferred.resolve();
}
if (oDeferred !== null) {
this._oPendingOperationsMap.put(sContainerKey, oDeferred);
}
if (!oPendingOpDeferred || oPendingOpDeferred.state() !== "pending") {
return oPendingOpDeferred;
}
if (oPendingOpDeferred._sapTimeoutId) {
clearTimeout(oPendingOpDeferred._sapTimeoutId);
oPendingOpDeferred._sapTimeoutId = undefined;
oPendingOpDeferred.resolve(Personalization.prototype.SAVE_DEFERRED_DROPPED);
}
return oPendingOpDeferred;
};
/**
* This interface is deprecated since 1.22, please use getContainer / delContainer.
*
* Note: the underlying storage model for Objects stored with getContainer / getPersonalizationContainer is identical.<br/>
* Thus you can safely migrate your client implementation from the deprecated getContainer to getPersonalizationContainer without loss of data.
* One may even run mixed set of applications on the same container keys.
* The sole differences are w.r.t. client side handling of the Context data within one session.
*
* If you want to use the variant interface, use the following pattern
* <code>
* getContainer(sContainerKey).done(function(oContainer) {
* var variantSetAdapter = new Personalization.VariantSetAdapter(oContainer);
* }
* </code>
*
* Factory method to obtain a personalization container object which is a client-local buffer for personalization data.
* The Container data is asynchronously read on creation (if present, otherwise an initial object is created).
* The Container data can then be *synchronously* modified (read/write/delete).
* Only on invoking the save() method the data is persisted at the front-end server.
* This allows the application to perform multiple local modifications and delay the save operation.
* Note that the personalization container allows the application to control the round trips to the front-end server persistence.
* The factory method getPersonalizationContainer is asynchronous and loads the container via the connected adapter from the front-end server.
* All operations (but for the save operation) are executed synchronously, operating on the local data.
* This allows the application to control the round trips to the front-end server persistence.
*
* A personalization container can contain items as well as variant sets.
* Variant sets have the following structure:
* variantSet.variant.item
* A variant set is enclosing several variants of the same data.
*
* Example: An application has two types of variants.
* Variant type 1 contains filter values for a query, which are stored in item 1 of
* the variant, and personalization data for a table, which are stored in item 2 of the variant.
* Variant type 2 contains a setting (item 3) that is independent of the filtering and the table settings.
* It might be used for a different screen than the variants of type 1.
* In this example you would have 2 variant sets, one for each variant type.
*
* Do not mix up the usage of a personalizer and a personalization container for one containerKey.
*
* @param {string} sContainerKey - identifies the container
* @returns {object} Promise object whose done function returns a {@link sap.ushell.services.PersonalizationContainer} object as parameter.
* The personalization container provides two different interfaces to synchronously operate on personalization data.
* In the item mode the container contains items as name-value pairs for personalization data.
* In the variant mode the container contains variant sets which contain variants containing items.
* @deprecated use getContainer()
* @public
* @alias sap.ushell.services.Personalization#getPersonalizationContainer
* @since 1.18.0
*/
Personalization.prototype.getPersonalizationContainer = function (sContainerKey) {
var sPrefixedContainerKey = "",
oPromiseContainer = {},
oDeferred = {};
if (typeof sContainerKey !== "string") {
throw new utils.Error("sContainerKey is not a string: sap.ushell.services.Personalization", " "); // Empty string for missing component information
}
sPrefixedContainerKey = personalizationUtils.addContainerPrefix(sContainerKey);
if (this._oContainerMap.containsKey(sPrefixedContainerKey)) {
return this._oContainerMap.get(sPrefixedContainerKey).promise();
}
oDeferred = new jQuery.Deferred();
oPromiseContainer = new PersonalizationContainer(this._oAdapterWithBackendAdapter.instance, sPrefixedContainerKey);
oPromiseContainer
.done(function (oContainer) {
oDeferred.resolve(oContainer);
})
.fail(function (oContainer) {
oDeferred.reject(oContainer);
});
this._oContainerMap.put(sPrefixedContainerKey, oDeferred);
return oDeferred.promise();
};
/**
* @deprecated Please use getContainer / delContainer
*
* Asynchronously starts a deletion request for the given container identified by sContainerKey.
* Can be called without having ever created a personalization container.
*
* Note: After invoking this operation, the state of other PersonalizationContainers obtained for the same key is undefined!
* If you want to use the container after deletion, it is strongly recommended to obtain a new instance
* of PersonalizationContainer for the given key *after* the promise has returned.
*
* Note: Invoking this operation while another save or load operation is under way may result in failure.
*
* @param {string} sContainerKey identifies the container
* @returns {object} promise for the deletion operation
* @deprecated
* @public
* @alias sap.ushell.services.Personalization#delPersonalizationContainer
* @since 1.18.0
*/
Personalization.prototype.delPersonalizationContainer = function (sContainerKey) {
// delete the bag, the adapter container & the container
var oDeferred = {},
sPrefixedContainerKey = "",
that = this;
sPrefixedContainerKey = personalizationUtils.addContainerPrefix(sContainerKey);
oDeferred = new jQuery.Deferred();
this.getPersonalizationContainer(sContainerKey)
.fail(function () {
oDeferred.reject();
})
.done(function (/*oContainer*/) {
that._oAdapterWithBackendAdapter.instance.delAdapterContainer(sPrefixedContainerKey)
.fail(function () {
oDeferred.reject();
})
.done(function () {
that._oContainerMap.remove(sPrefixedContainerKey);
oDeferred.resolve();
});
});
return oDeferred.promise();
};
Personalization.prototype._adjustScope = personalizationUtils.adjustScope;
Personalization.hasNoAdapter = false;
Personalization.ContextContainer = ContextContainer;
Personalization.Variant = Variant;
Personalization.VariantSet = VariantSet;
Personalization.VariantSetAdapter = VariantSetAdapter;
Personalization.WindowAdapter = WindowAdapter;
Personalization.WindowAdapterContainer = WindowAdapterContainer;
return Personalization;
}, true /* bExport */);
|
HutRubberDuck/super-mini-divar
|
src/model/advertising.py
|
<reponame>HutRubberDuck/super-mini-divar<gh_stars>0
from sqlalchemy import String, Integer, Column, DateTime, ForeignKey
from sqlalchemy.orm import relationship
from src.core.database import Model
class Advertising(Model):
__tablename__ = 'ads'
id: int = Column(Integer, primary_key=True)
title: str = Column(String(255), nullable=False)
description: str = Column(String(1024), nullable=False)
created_at: str = Column(DateTime, nullable=False)
expired_at: str = Column(DateTime, nullable=False)
user_id: int = Column(Integer, ForeignKey("users.id"))
accepted_by_admin_id: int = Column(Integer, ForeignKey("admins.user_id"))
district_id: int = Column(Integer, ForeignKey("districts.id"))
category_id: int = Column(Integer, ForeignKey("categories.id"))
user = relationship("User", back_populates="ads")
admin = relationship("Admin", back_populates="advertising")
infos = relationship("Info", back_populates="advertising")
district = relationship("District", back_populates="ads")
category = relationship("Category", back_populates="ads")
photos = relationship("Photo", back_populates="advertising")
features = relationship("Feature", back_populates="advertising")
report = relationship("Report", back_populates="advertising")
def __repr__(self):
return f"<Advertising name={self.title}>"
|
leeyazhou/crpc
|
crpc-transport/crpc-transport-netty/src/main/java/com/github/leeyazhou/crpc/transport/netty/handler/NettyServerHandler.java
|
/**
* Copyright © 2016~2020 CRPC (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.leeyazhou.crpc.transport.netty.handler;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import com.github.leeyazhou.crpc.core.Constants;
import com.github.leeyazhou.crpc.core.URL;
import com.github.leeyazhou.crpc.core.logger.Logger;
import com.github.leeyazhou.crpc.core.logger.LoggerFactory;
import com.github.leeyazhou.crpc.rpc.Handler;
import com.github.leeyazhou.crpc.rpc.Invocation;
import com.github.leeyazhou.crpc.transport.connection.ConnectionManager;
import com.github.leeyazhou.crpc.transport.netty.NettyConnection;
import com.github.leeyazhou.crpc.transport.netty.util.ChannelUtil;
import com.github.leeyazhou.crpc.transport.protocol.payload.Payload;
import com.github.leeyazhou.crpc.transport.protocol.payload.RequestPayloadBody;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.timeout.IdleState;
import io.netty.handler.timeout.IdleStateEvent;
/**
* @author zach
* @author leeyazhou
*
*/
@Sharable
public class NettyServerHandler extends SimpleChannelInboundHandler<Payload> {
private static final Logger logger = LoggerFactory.getLogger(NettyServerHandler.class);
private AtomicInteger idleCount = new AtomicInteger(0);
private final int IDLE_MAX_COUNT = 60;
private static final boolean DEBUG_ENABLED = logger.isDebugEnabled();
private final Handler<?> serverHandler;
private final ConnectionManager connectionManager;
public NettyServerHandler(Handler<?> serverHandler, ConnectionManager channelManager) {
this.serverHandler = serverHandler;
this.connectionManager = channelManager;
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
if (!(cause instanceof IOException)) {
logger.error("catch some exception not IOException", cause);
}
if (DEBUG_ENABLED) {
logger.debug("connection reset , address : " + ctx.channel().remoteAddress(), cause);
}
}
@Override
protected void channelRead0(final ChannelHandlerContext ctx, final Payload payload) throws Exception {
idleCount.set(0);
RequestPayloadBody request = (RequestPayloadBody) payload.getPayloadBody();
Invocation invocation = new Invocation();
invocation.setArgs(request.getArgs());
invocation.setArgTypes(request.getArgTypes());
invocation.setServiceTypeName(request.getServiceTypeName());
invocation.setMethodName(request.getMethodName());
invocation.setOneWay(request.isOneWay());
invocation.setTimeout(request.getTimeout());
URL url = ChannelUtil.toUrl(ctx);
invocation.addAttachement("url", url);
invocation.addAttachement("payload", payload);
serverHandler.handle(invocation);
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
super.channelActive(ctx);
if (logger.isInfoEnabled()) {
logger.info("channel active from " + ctx.channel().remoteAddress());
}
URL url = ChannelUtil.toUrl(ctx);
connectionManager.addServerConnection(new NettyConnection(ctx.channel(), url));
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof IdleStateEvent) {
IdleStateEvent event = (IdleStateEvent) evt;
if (event.state() == IdleState.ALL_IDLE && idleCount.incrementAndGet() >= IDLE_MAX_COUNT) {
if (logger.isInfoEnabled()) {
logger.info("channel:" + ctx + " is idle for " + idleCount.get() * Constants.DEFAULT_HEARTBEAT_TIMEOUT
+ " second, so close it !");
}
idleCount.set(0);
ctx.channel().close();
}
}
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
super.channelInactive(ctx);
URL url = ChannelUtil.toUrl(ctx);
connectionManager.removeServerChannel(url.getAddress());
}
}
|
sknyuki/Homework
|
JisangLee/java/homework/fifth/src/Hw7.java
|
<gh_stars>10-100
public class Hw7 {
public static void main(String[] args) {
//회사에 직원이 7명이 있습니다.
//모두 입사동기로 3500만원으로 시작하였다고 합니다.
//각자의 연봉 인상률이 매년 1 ~ 10%라고 합니다.
//이들이 5년후에 받을 연봉에 대한 시뮬레이션을 랜덤하게 돌려봅시다.
//직원이 7명이니 배열을 7개 할당하면 되겠군..
//3,500만원을 초기값으로 설정하면 되는건가...
//각자의 연봉 인상률이 매년 1 ~ 10%니 랜덤함수를 사용하면 되겠군...
//5년 후에 받을 연봉이니 매년 연봉에서 인상률을 곱하고 다음 해는 인상된 연봉에 인상률을 곱해야겠군...
final int EMP_NUM = 7; //직원 수가 7명이므로 7로 설정
final int INIT_SALARY = 3500; //초봉이 3,500이므로 3,500으로 설정
//연봉 인상률이 1 ~ 10프로인데 MAX 값을 10으로 MIN 값을 1로 잡으면 소수점 자리 표현이 안됨
//그래서 MAX 값을 10000으로 잡고 MIN 값을 1000으로 잡아 나누기 1000을 하여 설정
final int MAX = 10000;
final int MIN = 1000;
final int END_YEAR = 5; //5년 후 받는 연봉이므로 5로 설정
final float BIAS = 1000;
final float PERCENT = 1 / 100.f; //나온 랜덤 값에 0.01을 곱해줘야 하므로 1 / 100 으로 설정..
int range = MAX - MIN + 1;
float percent;
float[] emp = new float[EMP_NUM];
for (int i = 0; i < EMP_NUM; i++) { //배열은 0부터 시작하므로 직원1을 0으로 설정
emp[i] = INIT_SALARY; //초봉 설정
}
for (int i = 1; i < END_YEAR; i++) {
//배열은 0부터 시작하고 2년 후 받는 연봉부터 계산하므로 i = 1로 설정
//5년 후 연봉이지만 배열은 0부터 시작하므로 i < END_YEAR 로 설정,등호가 들어가면 안된다.
for (int j = 0; j < EMP_NUM; j++) {
//직원 1부터 직원 7까지 연봉을 나타내기 위해
//배열은 0부터 시작하므로 j = 0으로 설정 후 EMP_NUM 은 등호가 들어가면 안된다.
// (int) (Math.random() * range + MIN) - 1000 ~ 10000: A
// A / BIAS = 1 ~ 10: B
// B * PERCENT = 0.01 ~ 0.1
percent = (float) ((int) (Math.random() * range + MIN) / BIAS) * PERCENT;
// 1%의 인상률이라면
// 최종 연봉 = 현재 연봉 + 현재 연봉 * 인상률
// = 현재 연봉 * (1 + 인상률)
// emp[j] = emp[j] * (1 + percent);
emp[j] += (emp[j] * percent); // emp[j] = emp[j] + emp[j] * percent
System.out.printf("연봉[%d] = %f, 증가율 = %f\n", j, emp[j], percent);
}
System.out.println(); //해가 바뀔 때마다를 구분해 주기 위해서 입력
//int[] employee = new int[END];
/*final int SALARY = 3500; //연봉의 초기값
for (int i = START - 1; i < END; i++) { //배열은 0부터 시작하므로 START 값에서 1을 빼준다...
employee[i] = SALARY; //첫 해 받는 연봉
//System.out.println("arr[%d]: %d\n",i,employee[i]);
}
for (int i = 1; i < END_YEAR; i++) {
for (int j = 0; j < END; j++) { //첫 해는 받았으므로 j=2부터 수행, 5년 후 연봉이므로...
int rand = (int) (Math.random() * range + MIN);
employee[j] = employee[j] + (employee[j] * (rand / 100));
System.out.printf("arr[%d]: %d\n ", j, employee[j]);*/
}
}
}
|
eno-lang/enolib
|
python/tests/generated/api/section/test_assert_all_touched.py
|
import enolib
def test_asserting_everything_was_touched_on_an_empty_document_produces_the_expected_result():
input = ("")
enolib.parse(input).assert_all_touched()
assert bool('it passes') is True
def test_asserting_everything_was_touched_on_an_untouched_document_containing_a_single_field_raises_the_expected_validationerror():
error = None
input = ("field: value")
try:
enolib.parse(input).assert_all_touched()
except enolib.ValidationError as _error:
if isinstance(_error, enolib.ValidationError):
error = _error
else:
raise _error
assert type(error) is enolib.ValidationError
text = ("This element was not expected, make sure it is at the right place in the document and that its key is not mis-typed.")
assert error.text == text
snippet = (" Line | Content\n"
" > 1 | field: value")
assert error.snippet == snippet
assert error.selection['from']['line'] == 0
assert error.selection['from']['column'] == 0
assert error.selection['to']['line'] == 0
assert error.selection['to']['column'] == 12
def test_asserting_everything_was_touched_on_an_untouched_document_containing_a_single_field_with_a_custom_message_raises_the_expected_validationerror():
error = None
input = ("field: value")
try:
enolib.parse(input).assert_all_touched('my message')
except enolib.ValidationError as _error:
if isinstance(_error, enolib.ValidationError):
error = _error
else:
raise _error
assert type(error) is enolib.ValidationError
text = ("my message")
assert error.text == text
snippet = (" Line | Content\n"
" > 1 | field: value")
assert error.snippet == snippet
assert error.selection['from']['line'] == 0
assert error.selection['from']['column'] == 0
assert error.selection['to']['line'] == 0
assert error.selection['to']['column'] == 12
def test_asserting_everything_was_touched_on_an_untouched_document_containing_a_single_field_with_a_custom_message_function_raises_the_expected_validationerror():
error = None
input = ("field: value")
try:
enolib.parse(input).assert_all_touched(lambda element: f"my generated message for unexpected element '{element.string_key()}'")
except enolib.ValidationError as _error:
if isinstance(_error, enolib.ValidationError):
error = _error
else:
raise _error
assert type(error) is enolib.ValidationError
text = ("my generated message for unexpected element 'field'")
assert error.text == text
snippet = (" Line | Content\n"
" > 1 | field: value")
assert error.snippet == snippet
assert error.selection['from']['line'] == 0
assert error.selection['from']['column'] == 0
assert error.selection['to']['line'] == 0
assert error.selection['to']['column'] == 12
|
Shamraev/motion_imitation
|
third_party/eigen3/include/unsupported/test/cxx11_maxsizevector.cpp
|
<gh_stars>1000+
#include "main.h"
#include <exception> // std::exception
#include <unsupported/Eigen/CXX11/Tensor>
struct Foo
{
static Index object_count;
static Index object_limit;
EIGEN_ALIGN_TO_BOUNDARY(128) int dummy;
Foo(int x=0) : dummy(x)
{
#ifdef EIGEN_EXCEPTIONS
// TODO: Is this the correct way to handle this?
if (Foo::object_count > Foo::object_limit) { std::cout << "\nThrow!\n"; throw Foo::Fail(); }
#endif
std::cout << '+';
++Foo::object_count;
eigen_assert((internal::UIntPtr(this) & (127)) == 0);
}
Foo(const Foo&)
{
std::cout << 'c';
++Foo::object_count;
eigen_assert((internal::UIntPtr(this) & (127)) == 0);
}
~Foo()
{
std::cout << '~';
--Foo::object_count;
}
class Fail : public std::exception {};
};
Index Foo::object_count = 0;
Index Foo::object_limit = 0;
EIGEN_DECLARE_TEST(cxx11_maxsizevector)
{
typedef MaxSizeVector<Foo> VectorX;
Foo::object_count = 0;
for(int r = 0; r < g_repeat; r++) {
Index rows = internal::random<Index>(3,30);
Foo::object_limit = internal::random<Index>(0, rows - 2);
std::cout << "object_limit = " << Foo::object_limit << std::endl;
bool exception_raised = false;
#ifdef EIGEN_EXCEPTIONS
try
{
#endif
std::cout << "\nVectorX m(" << rows << ");\n";
VectorX vect(rows);
for(int i=0; i<rows; ++i)
vect.push_back(Foo());
#ifdef EIGEN_EXCEPTIONS
VERIFY(false); // not reached if exceptions are enabled
}
catch (const Foo::Fail&) { exception_raised = true; }
VERIFY(exception_raised);
#endif
VERIFY_IS_EQUAL(Index(0), Foo::object_count);
{
Foo::object_limit = rows+1;
VectorX vect2(rows, Foo());
VERIFY_IS_EQUAL(Foo::object_count, rows);
}
VERIFY_IS_EQUAL(Index(0), Foo::object_count);
std::cout << '\n';
}
}
|
corindwyer/titus-control-plane
|
titus-server-federation/src/test/java/com/netflix/titus/federation/service/AggregatingJobServiceGatewayWithSingleCellTest.java
|
/*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.titus.federation.service;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableMap;
import com.netflix.titus.api.federation.model.Cell;
import com.netflix.titus.api.model.Page;
import com.netflix.titus.common.runtime.TitusRuntime;
import com.netflix.titus.common.runtime.TitusRuntimes;
import com.netflix.titus.common.util.time.Clocks;
import com.netflix.titus.common.util.time.TestClock;
import com.netflix.titus.federation.service.router.ApplicationCellRouter;
import com.netflix.titus.federation.startup.GrpcConfiguration;
import com.netflix.titus.federation.startup.TitusFederationConfiguration;
import com.netflix.titus.grpc.protogen.Job;
import com.netflix.titus.grpc.protogen.JobDescriptor;
import com.netflix.titus.grpc.protogen.JobQuery;
import com.netflix.titus.grpc.protogen.JobQueryResult;
import com.netflix.titus.grpc.protogen.Task;
import com.netflix.titus.grpc.protogen.TaskQuery;
import com.netflix.titus.grpc.protogen.TaskQueryResult;
import com.netflix.titus.runtime.endpoint.metadata.AnonymousCallMetadataResolver;
import com.netflix.titus.runtime.endpoint.v3.grpc.GrpcJobManagementModelConverters;
import com.netflix.titus.runtime.jobmanager.JobManagerCursors;
import io.grpc.testing.GrpcServerRule;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import rx.observers.AssertableSubscriber;
import rx.subjects.PublishSubject;
import static com.netflix.titus.api.jobmanager.service.JobManagerConstants.UNDEFINED_CALL_METADATA;
import static com.netflix.titus.runtime.endpoint.v3.grpc.GrpcJobQueryModelConverters.toGrpcPage;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class AggregatingJobServiceGatewayWithSingleCellTest {
private static final int TASKS_IN_GENERATED_JOBS = 10;
private final TitusRuntime titusRuntime = TitusRuntimes.internal();
@Rule
public GrpcServerRule remoteFederationRule = new GrpcServerRule().directExecutor();
@Rule
public final GrpcServerRule cell = new GrpcServerRule().directExecutor();
private String stackName;
private AggregatingJobServiceGateway service;
private Map<Cell, GrpcServerRule> cellToServiceMap;
private TestClock clock;
private ServiceDataGenerator dataGenerator;
@Before
public void setUp() {
stackName = UUID.randomUUID().toString();
GrpcConfiguration grpcClientConfiguration = mock(GrpcConfiguration.class);
when(grpcClientConfiguration.getRequestTimeoutMs()).thenReturn(1000L);
TitusFederationConfiguration titusFederationConfiguration = mock(TitusFederationConfiguration.class);
when(titusFederationConfiguration.getStack()).thenReturn(stackName);
when(titusFederationConfiguration.getCells()).thenReturn("one=1");
when(titusFederationConfiguration.getRoutingRules()).thenReturn("one=(app1.*|app2.*);two=(app3.*)");
CellInfoResolver cellInfoResolver = new DefaultCellInfoResolver(titusFederationConfiguration);
ApplicationCellRouter cellRouter = new ApplicationCellRouter(cellInfoResolver, titusFederationConfiguration);
List<Cell> cells = cellInfoResolver.resolve();
cellToServiceMap = ImmutableMap.of(cells.get(0), cell);
RemoteFederationConnector fedConnector = mock(RemoteFederationConnector.class);
when(fedConnector.getChannel()).thenReturn(remoteFederationRule.getChannel());
CellConnector cellConnector = mock(CellConnector.class);
when(cellConnector.getChannels()).thenReturn(cellToServiceMap.entrySet().stream()
.collect(Collectors.toMap(Map.Entry::getKey, cellPairEntry -> cellPairEntry.getValue().getChannel()))
);
when(cellConnector.getChannelForCell(any(Cell.class))).thenAnswer(invocation ->
Optional.ofNullable(cellToServiceMap.get(invocation.<Cell>getArgument(0)))
.map(GrpcServerRule::getChannel)
);
final AggregatingCellClient aggregatingCellClient = new AggregatingCellClient(cellConnector);
final AnonymousCallMetadataResolver anonymousCallMetadataResolver = new AnonymousCallMetadataResolver();
service = new AggregatingJobServiceGateway(
grpcClientConfiguration,
titusFederationConfiguration,
cellConnector,
cellRouter,
aggregatingCellClient,
new AggregatingJobManagementServiceHelper(aggregatingCellClient, grpcClientConfiguration),
titusRuntime
);
clock = Clocks.test();
dataGenerator = new ServiceDataGenerator(clock, TASKS_IN_GENERATED_JOBS);
}
@Test
public void findJobsAddsStackName() {
Random random = new Random();
final List<Job> cellSnapshot = new ArrayList<>();
for (int i = 0; i < 5; i++) {
cellSnapshot.addAll(dataGenerator.newBatchJobs(random.nextInt(10), GrpcJobManagementModelConverters::toGrpcJob));
cellSnapshot.addAll(dataGenerator.newServiceJobs(random.nextInt(10), GrpcJobManagementModelConverters::toGrpcJob));
clock.advanceTime(1, TimeUnit.MINUTES);
}
cell.getServiceRegistry().addService(new CellWithFixedJobsService(cellSnapshot, PublishSubject.create()));
JobQuery query = JobQuery.newBuilder()
.setPage(toGrpcPage(Page.unlimited()))
.build();
final AssertableSubscriber<JobQueryResult> testSubscriber = service.findJobs(query, UNDEFINED_CALL_METADATA).test();
testSubscriber.awaitTerminalEvent(1, TimeUnit.SECONDS);
testSubscriber.assertNoErrors().assertCompleted();
testSubscriber.assertValueCount(1);
final List<JobQueryResult> results = testSubscriber.getOnNextEvents();
assertThat(results).hasSize(1);
// expect stackName to have changed
List<Job> expected = cellSnapshot.stream()
.sorted(JobManagerCursors.jobCursorOrderComparator())
.map(this::withStackName)
.collect(Collectors.toList());
assertThat(results.get(0).getItemsList()).containsExactlyElementsOf(expected);
}
@Test
public void findTasksMergesAllCellsIntoSingleResult() {
List<Task> cellSnapshot = new ArrayList<>();
// 10 jobs on each cell with TASKS_IN_GENERATED_JOBS tasks each
for (int i = 0; i < 5; i++) {
cellSnapshot.addAll(dataGenerator.newBatchJobWithTasks());
cellSnapshot.addAll(dataGenerator.newServiceJobWithTasks());
clock.advanceTime(1, TimeUnit.MINUTES);
}
cell.getServiceRegistry().addService(new CellWithFixedTasksService(cellSnapshot));
TaskQuery query = TaskQuery.newBuilder()
.setPage(toGrpcPage(Page.unlimited()))
.build();
final AssertableSubscriber<TaskQueryResult> testSubscriber = service.findTasks(query, UNDEFINED_CALL_METADATA).test();
testSubscriber.awaitTerminalEvent(1, TimeUnit.SECONDS);
testSubscriber.assertNoErrors().assertCompleted();
testSubscriber.assertValueCount(1);
final List<TaskQueryResult> results = testSubscriber.getOnNextEvents();
assertThat(results).hasSize(1);
// expect stackName to have changed
List<Task> expected = cellSnapshot.stream()
.sorted(JobManagerCursors.taskCursorOrderComparator())
.map(this::withStackName)
.collect(Collectors.toList());
assertThat(results.get(0).getItemsList()).containsExactlyElementsOf(expected);
}
private Job withStackName(Job job) {
JobDescriptor jobDescriptor = job.getJobDescriptor().toBuilder()
.putAttributes("titus.stack", stackName)
.build();
return job.toBuilder().setJobDescriptor(jobDescriptor).build();
}
private Task withStackName(Task task) {
return task.toBuilder()
.putTaskContext("titus.stack", stackName)
.build();
}
}
|
djaodjin/djaodjin-signup
|
signup/migrations/0006_v0_4_7.py
|
<gh_stars>10-100
# Generated by Django 2.2.13 on 2021-02-01 22:49
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import phonenumber_field.modelfields
class Migration(migrations.Migration):
dependencies = [
('signup', '0005_0_2_8'),
]
operations = [
migrations.RemoveField(
model_name='contact',
name='verification_key',
),
migrations.AddField(
model_name='contact',
name='email_verification_at',
field=models.DateTimeField(help_text='Date/time when the e-mail verification key was sent', null=True),
),
migrations.AddField(
model_name='contact',
name='email_verification_key',
field=models.CharField(max_length=40, null=True, verbose_name='Email verification key'),
),
migrations.AddField(
model_name='contact',
name='email_verified_at',
field=models.DateTimeField(help_text='Date/time when the e-mail was last verified', null=True),
),
migrations.AddField(
model_name='contact',
name='phone',
field=phonenumber_field.modelfields.PhoneNumberField(help_text='Phone number', max_length=128, null=True, region=None, unique=True, verbose_name='Phone number'),
),
migrations.AddField(
model_name='contact',
name='phone_verification_at',
field=models.DateTimeField(help_text='Date/time when the phone verification key was sent', null=True),
),
migrations.AddField(
model_name='contact',
name='phone_verification_key',
field=models.CharField(max_length=40, null=True, verbose_name='Phone verification key'),
),
migrations.AddField(
model_name='contact',
name='phone_verified_at',
field=models.DateTimeField(help_text='Date/time when the phone number was last verified', null=True),
),
migrations.AddField(
model_name='credentials',
name='extra',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='activity',
name='account',
field=models.ForeignKey(help_text='Account the activity is associated to', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='activities', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='activity',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='Date/time of creation (in ISO format)'),
),
migrations.AlterField(
model_name='activity',
name='created_by',
field=models.ForeignKey(help_text='User that created the activity', null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='activity',
name='text',
field=models.TextField(blank=True, help_text='Free form text description of the activity'),
),
migrations.AlterField(
model_name='contact',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='Date/time of creation (in ISO format)'),
),
migrations.AlterField(
model_name='contact',
name='email',
field=models.EmailField(help_text='E-mail address', max_length=254, null=True, unique=True, verbose_name='E-mail address'),
),
migrations.AlterField(
model_name='contact',
name='extra',
field=models.TextField(help_text='Extra meta data (can be stringify JSON)', null=True),
),
migrations.AlterField(
model_name='contact',
name='full_name',
field=models.CharField(blank=True, help_text='Full name (effectively first name followed by last name)', max_length=60, verbose_name='Full name'),
),
migrations.AlterField(
model_name='contact',
name='nick_name',
field=models.CharField(blank=True, help_text='Short casual name used to address the contact', max_length=60, verbose_name='<NAME>'),
),
migrations.AlterField(
model_name='contact',
name='picture',
field=models.URLField(blank=True, help_text='Profile picture', max_length=2083, null=True, verbose_name='URL to a profile picture'),
),
migrations.AlterField(
model_name='contact',
name='slug',
field=models.SlugField(help_text='Unique identifier shown in the URL bar, effectively the username for profiles with login credentials.', unique=True),
),
migrations.AlterField(
model_name='contact',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='contacts', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='notification',
name='slug',
field=models.SlugField(help_text='Unique identifier shown in the URL bar', unique=True),
),
]
|
inket/smartPins
|
classdump_SafariShared/NSError-SafariSharedExtras.h
|
<reponame>inket/smartPins<gh_stars>1-10
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import <Foundation/NSError.h>
@interface NSError (SafariSharedExtras)
- (id)safari_privacyPreservingDescription;
- (BOOL)safari_isClientSideNetworkError;
@end
|
JamiePringle/ROMScode-PringleBBLpaper
|
roms_Sep2018/roms/ROMS/Nonlinear/Sediment/sediment_inp.h
|
<reponame>JamiePringle/ROMScode-PringleBBLpaper
SUBROUTINE read_SedPar (model, inp, out, Lwrite)
!
!=======================================================================
! !
! This routine reads in cohesive and non-cohesive sediment model !
! parameters. !
! !
!=======================================================================
!
USE mod_param
USE mod_parallel
USE mod_ncparam
USE mod_scalars
USE mod_sediment
!
implicit none
!
! Imported variable declarations
!
logical, intent(in) :: Lwrite
integer, intent(in) :: model, inp, out
!
! Local variable declarations.
!
integer :: Npts, Nval
integer :: iTrcStr, iTrcEnd
integer :: i, ifield, igrid, itracer, itrc, ng, nline, status
integer :: decode_line, load_i, load_l, load_lbc, load_r
logical, dimension(Ngrids) :: Lbed
logical, dimension(Ngrids) :: Lbottom
logical, dimension(NCS,Ngrids) :: Lmud
logical, dimension(NNS,Ngrids) :: Lsand
real(r8), dimension(Ngrids) :: Rbed
real(r8), dimension(NCS,Ngrids) :: Rmud
real(r8), dimension(NNS,Ngrids) :: Rsand
real(r8), dimension(100) :: Rval
character (len=40 ) :: KeyWord
character (len=256) :: line
character (len=256), dimension(200) :: Cval
!
!-----------------------------------------------------------------------
! Initialize.
!-----------------------------------------------------------------------
!
igrid=1 ! nested grid counter
itracer=0 ! LBC tracer counter
iTrcStr=1 ! first LBC tracer to process
iTrcEnd=NST ! last LBC tracer to process
nline=0 ! LBC multi-line counter
!
!-----------------------------------------------------------------------
! Read in cohesive and non-cohesive model parameters.
!-----------------------------------------------------------------------
!
DO WHILE (.TRUE.)
READ (inp,'(a)',ERR=10,END=20) line
status=decode_line(line, KeyWord, Nval, Cval, Rval)
IF (status.gt.0) THEN
SELECT CASE (TRIM(KeyWord))
CASE ('Lsediment')
Npts=load_l(Nval, Cval, Ngrids, Lsediment)
CASE ('NEWLAYER_THICK')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
newlayer_thick(ng)=Rbed(ng)
END DO
CASE ('MINLAYER_THICK')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
minlayer_thick(ng)=Rbed(ng)
END DO
#ifdef MIXED_BED
CASE ('TRANSC')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
transC(ng)=Rbed(ng)
END DO
CASE ('TRANSN')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
transN(ng)=Rbed(ng)
END DO
#endif
CASE ('BEDLOAD_COEFF')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
bedload_coeff(ng)=Rbed(ng)
END DO
CASE ('LBC(isTvar)')
IF (itracer.lt.NST) THEN
itracer=itracer+1
ELSE
itracer=1 ! next nested grid
END IF
ifield=isTvar(idsed(itracer))
Npts=load_lbc(Nval, Cval, line, nline, ifield, igrid, &
& idsed(iTrcStr), idsed(iTrcEnd), &
& Vname(1,idTvar(idsed(itracer))), LBC)
#if defined ADJOINT || defined TANGENT || defined TL_IOMS
CASE ('ad_LBC(isTvar)')
IF (itracer.lt.NST) THEN
itracer=itracer+1
ELSE
itracer=1 ! next nested grid
END IF
ifield=isTvar(idsed(itracer))
Npts=load_lbc(Nval, Cval, line, nline, ifield, igrid, &
& idsed(iTrcStr), idsed(iTrcEnd), &
& Vname(1,idTvar(idsed(itracer))), ad_LBC)
#endif
CASE ('MUD_SD50')
IF (.not.allocated(Sd50)) allocate (Sd50(NST,Ngrids))
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
Sd50(itrc,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_CSED')
IF (.not.allocated(Csed)) allocate (Csed(NST,Ngrids))
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud )
DO ng=1,Ngrids
DO itrc=1,NCS
Csed(itrc,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_SRHO')
IF (.not.allocated(Srho)) allocate (Srho(NST,Ngrids))
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
Srho(itrc,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_WSED')
IF (.not.allocated(Wsed)) allocate (Wsed(NST,Ngrids))
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
Wsed(itrc,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_ERATE')
IF (.not.allocated(Erate)) allocate (Erate(NST,Ngrids))
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
Erate(itrc,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_TAU_CE')
IF (.not.allocated(tau_ce)) allocate (tau_ce(NST,Ngrids))
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
tau_ce(itrc,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_TAU_CD')
IF (.not.allocated(tau_cd)) allocate (tau_cd(NST,Ngrids))
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
tau_cd(itrc,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_POROS')
IF (.not.allocated(poros)) allocate (poros(NST,Ngrids))
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
poros(itrc,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_TNU2')
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
nl_tnu2(i,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_TNU4')
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
nl_tnu4(i,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('ad_MUD_TNU2')
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
ad_tnu2(i,ng)=Rmud(itrc,ng)
tl_tnu2(i,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('ad_MUD_TNU4')
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
ad_tnu4(i,ng)=Rmud(itrc,ng)
nl_tnu4(i,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_Sponge')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
LtracerSponge(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('MUD_AKT_BAK')
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
Akt_bak(i,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_AKT_fac')
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
ad_Akt_fac(i,ng)=Rmud(itrc,ng)
tl_Akt_fac(i,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_TNUDG')
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
Tnudg(i,ng)=Rmud(itrc,ng)
END DO
END DO
CASE ('MUD_MORPH_FAC')
IF (.not.allocated(morph_fac)) THEN
allocate (morph_fac(NST,Ngrids))
END IF
Npts=load_r(Nval, Rval, NCS*Ngrids, Rmud)
DO ng=1,Ngrids
DO itrc=1,NCS
morph_fac(itrc,ng)=Rmud(itrc,ng)
END DO
END DO
#if defined COHESIVE_BED || defined MIXED_BED
CASE ('MUD_TAUCR_MIN')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
tcr_min(ng)=Rbed(ng)
END DO
CASE ('MUD_TAUCR_MAX')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
tcr_max(ng)=Rbed(ng)
END DO
CASE ('MUD_TAUCR_SLOPE')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
tcr_slp(ng)=Rbed(ng)
END DO
CASE ('MUD_TAUCR_OFF')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
tcr_off(ng)=Rbed(ng)
END DO
CASE ('MUD_TAUCR_TIME')
Npts=load_r(Nval, Rval, Ngrids, Rbed)
DO ng=1,Ngrids
tcr_tim(ng)=Rbed(ng)
END DO
#endif
CASE ('MUD_Ltsrc', 'MUD_Ltracer')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
LtracerSrc(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('MUD_Ltclm')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
LtracerCLM(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('MUD_Tnudge')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsed(itrc)
LnudgeTCLM(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Hout(idmud)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idTvar(idsed(itrc))
Hout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Hout(iMfrac)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idfrac(itrc)
Hout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Hout(iMmass)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idBmas(itrc)
Hout(i,ng)=Lmud(itrc,ng)
END DO
END DO
#ifdef BEDLOAD
CASE ('Hout(iMUbld)')
DO ng=1,Ngrids
DO itrc=1,NCS
IF (idUbld(itrc).eq.0) THEN
IF (Master) WRITE (out,30) 'idUbld'
exit_flag=5
RETURN
END IF
END DO
END DO
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idUbld(itrc)
Hout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Hout(iMVbld)')
DO ng=1,Ngrids
DO itrc=1,NCS
IF (idVbld(itrc).eq.0) THEN
IF (Master) WRITE (out,30) 'idVbld'
exit_flag=5
RETURN
END IF
END DO
END DO
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idVbld(itrc)
Hout(i,ng)=Lmud(itrc,ng)
END DO
END DO
#endif
CASE ('Qout(idmud)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idTvar(idsed(itrc))
Qout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Qout(iSmud)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idsurT(idsed(itrc))
Qout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Qout(iMfrac)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idfrac(itrc)
Qout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Qout(iMmass)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idBmas(itrc)
Hout(i,ng)=Lmud(itrc,ng)
END DO
END DO
#ifdef BEDLOAD
CASE ('Qout(iMUbld)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idUbld(itrc)
Qout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Qout(iMVbld)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idVbld(itrc)
Qout(i,ng)=Lmud(itrc,ng)
END DO
END DO
#endif
#if defined AVERAGES || \
(defined AD_AVERAGES && defined ADJOINT) || \
(defined RP_AVERAGES && defined TL_IOMS) || \
(defined TL_AVERAGES && defined TANGENT)
CASE ('Aout(idmud)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idTvar(idsed(itrc))
Aout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Aout(iMTTav)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idTTav(idsed(itrc))
Aout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Aout(iMUTav)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idUTav(idsed(itrc))
Aout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Aout(iMVTav)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idVTav(idsed(itrc))
Aout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Aout(MHUTav)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=iHUTav(idsed(itrc))
Aout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Aout(MHVTav)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=iHVTav(idsed(itrc))
Aout(i,ng)=Lmud(itrc,ng)
END DO
END DO
# ifdef BEDLOAD
CASE ('Aout(iMUbld)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idUbld(itrc)
Aout(i,ng)=Lmud(itrc,ng)
END DO
END DO
CASE ('Aout(iMVbld)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO itrc=1,NCS
i=idVbld(itrc)
Aout(i,ng)=Lmud(itrc,ng)
END DO
END DO
# endif
#endif
#ifdef DIAGNOSTICS_TS
CASE ('Dout(MTrate)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iTrate),ng)=Lmud(i,ng)
END DO
END DO
CASE ('Dout(MThadv)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iThadv),ng)=Lmud(i,ng)
END DO
END DO
CASE ('Dout(MTxadv)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iTxadv),ng)=Lmud(i,ng)
END DO
END DO
CASE ('Dout(MTyadv)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iTyadv),ng)=Lmud(i,ng)
END DO
END DO
CASE ('Dout(MTvadv)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iTvadv),ng)=Lmud(i,ng)
END DO
END DO
# if defined TS_DIF2 || defined TS_DIF4
CASE ('Dout(MThdif)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iThdif),ng)=Lmud(i,ng)
END DO
END DO
CASE ('Dout(MTxdif)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iTxdif),ng)=Lmud(i,ng)
END DO
END DO
CASE ('Dout(MTydif)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iTydif),ng)=Lmud(i,ng)
END DO
END DO
# if defined MIX_GEO_TS || defined MIX_ISO_TS
CASE ('Dout(MTsdif)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iTsdif),ng)=Lmud(i,ng)
END DO
END DO
# endif
# endif
CASE ('Dout(MTvdif)')
Npts=load_l(Nval, Cval, NCS*Ngrids, Lmud)
DO ng=1,Ngrids
DO i=1,NCS
itrc=idsed(i)
Dout(idDtrc(itrc,iTvdif),ng)=Lmud(i,ng)
END DO
END DO
#endif
CASE ('SAND_SD50')
IF (.not.allocated(Sd50)) allocate (Sd50(NST,Ngrids))
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=NCS+itrc
Sd50(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_CSED')
IF (.not.allocated(Csed)) allocate (Csed(NST,Ngrids))
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand )
DO ng=1,Ngrids
DO itrc=1,NNS
i=NCS+itrc
Csed(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_SRHO')
IF (.not.allocated(Srho)) allocate (Srho(NST,Ngrids))
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=NCS+itrc
Srho(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_WSED')
IF (.not.allocated(Wsed)) allocate (Wsed(NST,Ngrids))
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=NCS+itrc
Wsed(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_ERATE')
IF (.not.allocated(Erate)) allocate (Erate(NST,Ngrids))
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=NCS+itrc
Erate(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_TAU_CE')
IF (.not.allocated(tau_ce)) allocate (tau_ce(NST,Ngrids))
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=NCS+itrc
tau_ce(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_TAU_CD')
IF (.not.allocated(tau_cd)) allocate (tau_cd(NST,Ngrids))
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=NCS+itrc
tau_cd(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_POROS')
IF (.not.allocated(poros)) allocate (poros(NST,Ngrids))
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=NCS+itrc
poros(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_TNU2')
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
nl_tnu2(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_TNU4')
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
nl_tnu4(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('ad_SAND_TNU2')
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
ad_tnu2(i,ng)=Rsand(itrc,ng)
tl_tnu2(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('ad_SAND_TNU4')
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
ad_tnu4(i,ng)=Rsand(itrc,ng)
tl_tnu4(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_Sponge')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
LtracerSponge(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('SAND_AKT_BAK')
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
Akt_bak(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_AKT_fac')
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
ad_Akt_fac(i,ng)=Rsand(itrc,ng)
tl_Akt_fac(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_TNUDG')
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
Tnudg(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_MORPH_FAC')
IF (.not.allocated(morph_fac)) THEN
allocate (morph_fac(NST,Ngrids))
END IF
Npts=load_r(Nval, Rval, NNS*Ngrids, Rsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=NCS+itrc
morph_fac(i,ng)=Rsand(itrc,ng)
END DO
END DO
CASE ('SAND_Ltsrc', 'SAND_Ltracer')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
LtracerSrc(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('SAND_Ltclm')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
LtracerCLM(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('SAND_Tnudge')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsed(NCS+itrc)
LnudgeTCLM(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Hout(idsand)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idTvar(idsed(NCS+itrc))
Hout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Hout(iSfrac)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idfrac(NCS+itrc)
Hout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Hout(iSmass)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idBmas(NCS+itrc)
Hout(i,ng)=Lsand(itrc,ng)
END DO
END DO
#ifdef BEDLOAD
CASE ('Hout(iSUbld)')
DO ng=1,Ngrids
DO itrc=NCS+1,NST
IF (idUbld(itrc).eq.0) THEN
IF (Master) WRITE (out,30) 'idUbld'
exit_flag=5
RETURN
END IF
END DO
END DO
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idUbld(NCS+itrc)
Hout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Hout(iSVbld)')
DO ng=1,Ngrids
DO itrc=NCS+1,NST
IF (idVbld(itrc).eq.0) THEN
IF (Master) WRITE (out,30) 'idVbld'
exit_flag=5
RETURN
END IF
END DO
END DO
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idVbld(NCS+itrc)
Hout(i,ng)=Lsand(itrc,ng)
END DO
END DO
#endif
CASE ('Qout(idsand)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idTvar(idsed(NCS+itrc))
Qout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Qout(iSsand)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idsurT(idsed(NCS+itrc))
Qout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Qout(iSfrac)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idfrac(NCS+itrc)
Qout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Qout(iSmass)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idBmas(NCS+itrc)
Qout(i,ng)=Lsand(itrc,ng)
END DO
END DO
#ifdef BEDLOAD
CASE ('Qout(iSUbld)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idUbld(NCS+itrc)
Qout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Qout(iSVbld)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idVbld(NCS+itrc)
Qout(i,ng)=Lsand(itrc,ng)
END DO
END DO
#endif
#if defined AVERAGES || \
(defined AD_AVERAGES && defined ADJOINT) || \
(defined RP_AVERAGES && defined TL_IOMS) || \
(defined TL_AVERAGES && defined TANGENT)
CASE ('Aout(idsand)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idTvar(idsed(NCS+itrc))
Aout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Aout(iSTTav)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idTTav(idsed(NCS+itrc))
Aout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Aout(iSUTav)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idUTav(idsed(NCS+itrc))
Aout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Aout(iSVTav)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idVTav(idsed(NCS+itrc))
Aout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Aout(SHUTav)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=iHUTav(idsed(NCS+itrc))
Aout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Aout(SHVTav)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=iHVTav(idsed(NCS+itrc))
Aout(i,ng)=Lsand(itrc,ng)
END DO
END DO
# ifdef BEDLOAD
CASE ('Aout(iSUbld)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idUbld(NCS+itrc)
Aout(i,ng)=Lsand(itrc,ng)
END DO
END DO
CASE ('Aout(iSVbld)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO itrc=1,NNS
i=idVbld(NCS+itrc)
Aout(i,ng)=Lsand(itrc,ng)
END DO
END DO
# endif
#endif
#ifdef DIAGNOSTICS_TS
CASE ('Dout(STrate)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iTrate),ng)=Lsand(i,ng)
END DO
END DO
CASE ('Dout(SThadv)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iThadv),ng)=Lsand(i,ng)
END DO
END DO
CASE ('Dout(STxadv)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iTxadv),ng)=Lsand(i,ng)
END DO
END DO
CASE ('Dout(STyadv)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iTyadv),ng)=Lsand(i,ng)
END DO
END DO
CASE ('Dout(STvadv)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iTvadv),ng)=Lsand(i,ng)
END DO
END DO
# if defined TS_DIF2 || defined TS_DIF4
CASE ('Dout(SThdif)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iThdif),ng)=Lsand(i,ng)
END DO
END DO
CASE ('Dout(STxdif)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iTxdif),ng)=Lsand(i,ng)
END DO
END DO
CASE ('Dout(STydif)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iTydif),ng)=Lsand(i,ng)
END DO
END DO
# if defined MIX_GEO_TS || defined MIX_ISO_TS
CASE ('Dout(STsdif)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iTsdif),ng)=Lsand(i,ng)
END DO
END DO
# endif
# endif
CASE ('Dout(STvdif)')
Npts=load_l(Nval, Cval, NNS*Ngrids, Lsand)
DO ng=1,Ngrids
DO i=1,NNS
itrc=idsed(NCS+i)
Dout(idDtrc(itrc,iTvdif),ng)=Lsand(i,ng)
END DO
END DO
#endif
CASE ('Hout(ithck)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(ithck)
DO ng=1,Ngrids
Hout(i,ng)=Lbed(ng)
END DO
CASE ('Hout(iaged)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(iaged)
DO ng=1,Ngrids
Hout(i,ng)=Lbed(ng)
END DO
CASE ('Hout(iporo)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(iporo)
DO ng=1,Ngrids
Hout(i,ng)=Lbed(ng)
END DO
#if defined COHESIVE_BED || defined SED_BIODIFF || defined MIXED_BED
CASE ('Hout(ibtcr)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(ibtcr)
DO ng=1,Ngrids
Hout(i,ng)=Lbed(ng)
END DO
#endif
CASE ('Hout(idiff)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(idiff)
DO ng=1,Ngrids
Hout(i,ng)=Lbed(ng)
END DO
CASE ('Hout(isd50)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(isd50)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(idens)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(idens)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(iwsed)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(iwsed)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(itauc)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(itauc)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(irlen)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(irlen)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(irhgt)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(irhgt)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(ibwav)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(ibwav)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(izdef)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izdef)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(izapp)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izapp)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(izNik)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izNik)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(izbio)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izbio)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(izbfm)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izbfm)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(izbld)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izbld)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(izwbl)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izwbl)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(iactv)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(iactv)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Hout(ishgt)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(ishgt)
DO ng=1,Ngrids
Hout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(ithck)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(ithck)
DO ng=1,Ngrids
Qout(i,ng)=Lbed(ng)
END DO
CASE ('Qout(iaged)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(iaged)
DO ng=1,Ngrids
Qout(i,ng)=Lbed(ng)
END DO
CASE ('Qout(iporo)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(iporo)
DO ng=1,Ngrids
Qout(i,ng)=Lbed(ng)
END DO
#if defined COHESIVE_BED || defined SED_BIODIFF || defined MIXED_BED
CASE ('Qout(ibtcr)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(ibtcr)
DO ng=1,Ngrids
Qout(i,ng)=Lbed(ng)
END DO
#endif
CASE ('Qout(idiff)')
Npts=load_l(Nval, Cval, Ngrids, Lbed)
i=idSbed(idiff)
DO ng=1,Ngrids
Qout(i,ng)=Lbed(ng)
END DO
CASE ('Qout(isd50)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(isd50)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(idens)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(idens)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(iwsed)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(iwsed)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(itauc)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(itauc)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(irlen)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(irlen)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(irhgt)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(irhgt)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(ibwav)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(ibwav)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(izdef)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izdef)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(izapp)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izapp)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(izNik)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izNik)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(izbio)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izbio)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(izbfm)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izbfm)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(izbld)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izbld)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(izwbl)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(izwbl)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(iactv)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(iactv)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
CASE ('Qout(ishgt)')
Npts=load_l(Nval, Cval, Ngrids, Lbottom)
i=idBott(ishgt)
DO ng=1,Ngrids
Qout(i,ng)=Lbottom(ng)
END DO
END SELECT
END IF
END DO
10 IF (Master) WRITE (out,40) line
exit_flag=4
RETURN
20 CONTINUE
!
!-----------------------------------------------------------------------
! Report input parameters.
!-----------------------------------------------------------------------
!
IF (Lwrite) THEN
DO ng=1,Ngrids
IF (Lsediment(ng)) THEN
WRITE (out,50) ng
WRITE (out,60)
DO itrc=1,NST
WRITE (out,70) itrc, Sd50(itrc,ng), Csed(itrc,ng), &
& Srho(itrc,ng), Wsed(itrc,ng), &
& Erate(itrc,ng), poros(itrc,ng)
END DO
WRITE (out,80)
DO itrc=1,NST
i=idsed(itrc)
WRITE (out,70) itrc, tau_ce(itrc,ng), tau_cd(itrc,ng), &
& nl_tnu2(i,ng), nl_tnu4(i,ng), &
& Akt_bak(i,ng), Tnudg(i,ng)
END DO
WRITE (out,90)
DO itrc=1,NST
WRITE (out,70) itrc, morph_fac(itrc,ng)
END DO
WRITE (out,100) newlayer_thick(ng)
WRITE (out,110) minlayer_thick(ng)
WRITE (out,120) bedload_coeff(ng)
#ifdef MIXED_BED
WRITE (out,130) transC(ng)
WRITE (out,140) transN(ng)
#endif
DO itrc=1,NST
i=idsed(itrc)
IF (LtracerSponge(i,ng)) THEN
WRITE (out,150) LtracerSponge(i,ng), 'LtracerSponge', &
& i, 'Turning ON sponge on tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
ELSE
WRITE (out,150) LtracerSponge(i,ng), 'LtracerSponge', &
& i, 'Turning OFF sponge on tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
END IF
END DO
DO itrc=1,NST
i=idsed(itrc)
IF (LtracerSrc(i,ng)) THEN
WRITE (out,150) LtracerSrc(i,ng), 'LtracerSrc', i, &
& 'Turning ON point sources/Sink on tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
ELSE
WRITE (out,150) LtracerSrc(i,ng), 'LtracerSrc', i, &
& 'Turning OFF point sources/Sink on tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
END IF
END DO
DO itrc=1,NST
i=idsed(itrc)
IF (LtracerCLM(i,ng)) THEN
WRITE (out,150) LtracerCLM(i,ng), 'LtracerCLM', i, &
& 'Turning ON processing of climatology tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
ELSE
WRITE (out,150) LtracerCLM(i,ng), 'LtracerCLM', i, &
& 'Turning OFF processing of climatology tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
END IF
END DO
DO itrc=1,NST
i=idsed(itrc)
IF (LnudgeTCLM(i,ng)) THEN
WRITE (out,150) LnudgeTCLM(i,ng), 'LnudgeTCLM', i, &
& 'Turning ON nudging of climatology tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
ELSE
WRITE (out,150) LnudgeTCLM(i,ng), 'LnudgeTCLM', i, &
& 'Turning OFF nudging of climatology tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
END IF
END DO
IF ((nHIS(ng).gt.0).and.ANY(Hout(:,ng))) THEN
WRITE (out,'(1x)')
DO itrc=1,NST
i=idTvar(idsed(itrc))
IF (Hout(i,ng)) WRITE (out,160) Hout(i,ng), &
& 'Hout(idTvar)', &
& 'Write out sediment', itrc, TRIM(Vname(1,i))
END DO
DO itrc=1,NST
i=idfrac(itrc)
IF (Hout(i,ng)) WRITE (out,160) Hout(i,ng), &
& 'Hout(idfrac)', &
& 'Write out bed fraction, sediment ', itrc, &
& TRIM(Vname(1,i))
END DO
DO itrc=1,NST
i=idBmas(itrc)
IF (Hout(i,ng)) WRITE (out,160) Hout(i,ng), &
& 'Hout(idfrac)', &
& 'Write out mass, sediment ', itrc, &
& TRIM(Vname(1,i))
END DO
#ifdef BEDLOAD
DO itrc=1,NST
i=idUbld(itrc)
IF (Hout(i,ng)) WRITE (out,160) Hout(i,ng), &
& 'Hout(idUbld)', &
& 'Write out bed load at U-points, sediment ', itrc, &
& TRIM(Vname(1,i))
i=idVbld(itrc)
IF (Hout(i,ng)) WRITE (out,160) Hout(i,ng), &
& 'Hout(idVbld)', &
& 'Write out bed load at V-points, sediment ', itrc, &
& TRIM(Vname(1,i))
END DO
#endif
DO itrc=1,MBEDP
i=idSbed(itrc)
IF (Hout(i,ng)) WRITE (out,160) Hout(i,ng), &
& 'Hout(idSbed)', &
& 'Write out BED property ', itrc, TRIM(Vname(1,i))
END DO
DO itrc=1,MBOTP
i=idBott(itrc)
IF (Hout(i,ng)) WRITE (out,160) Hout(i,ng), &
& 'Hout(idBott)', &
& 'Write out BOTTOM property ', itrc, TRIM(Vname(1,i))
END DO
END IF
IF ((nQCK(ng).gt.0).and.ANY(Qout(:,ng))) THEN
WRITE (out,'(1x)')
DO itrc=1,NST
i=idTvar(idsed(itrc))
IF (Qout(i,ng)) WRITE (out,160) Qout(i,ng), &
& 'Qout(idTvar)', &
& 'Write out sediment', itrc, TRIM(Vname(1,i))
END DO
DO itrc=1,NST
i=idsurT(idsed(itrc))
IF (Qout(i,ng)) WRITE (out,160) Qout(i,ng), &
& 'Qout(idTvar)', &
& 'Write out surface sediment', itrc, TRIM(Vname(1,i))
END DO
DO itrc=1,NST
i=idfrac(itrc)
IF (Qout(i,ng)) WRITE (out,160) Qout(i,ng), &
& 'Qout(idfrac)', &
& 'Write out bed fraction, sediment ', itrc, &
& TRIM(Vname(1,i))
END DO
DO itrc=1,NST
i=idBmas(itrc)
IF (Qout(i,ng)) WRITE (out,160) Qout(i,ng), &
& 'Qout(idfrac)', &
& 'Write out mass, sediment ', itrc, &
& TRIM(Vname(1,i))
END DO
#ifdef BEDLOAD
DO itrc=1,NST
i=idUbld(itrc)
IF (Qout(i,ng)) WRITE (out,160) Qout(i,ng), &
& 'Qout(idUbld)', &
& 'Write out bed load at U-points, sediment ', itrc, &
& TRIM(Vname(1,i))
i=idVbld(itrc)
IF (Qout(i,ng)) WRITE (out,160) Qout(i,ng), &
& 'Qout(idVbld)', &
& 'Write out bed load at V-points, sediment ', itrc, &
& TRIM(Vname(1,i))
END DO
#endif
DO itrc=1,MBEDP
i=idSbed(itrc)
IF (Qout(i,ng)) WRITE (out,160) Qout(i,ng), &
& 'Qout(idSbed)', &
& 'Write out BED property ', itrc, TRIM(Vname(1,i))
END DO
DO itrc=1,MBOTP
i=idBott(itrc)
IF (Qout(i,ng)) WRITE (out,160) Qout(i,ng), &
& 'Qout(idBott)', &
& 'Write out BOTTOM property ', itrc, TRIM(Vname(1,i))
END DO
END IF
#if defined AVERAGES || \
(defined AD_AVERAGES && defined ADJOINT) || \
(defined RP_AVERAGES && defined TL_IOMS) || \
(defined TL_AVERAGES && defined TANGENT)
IF ((nAVG(ng).gt.0).and.ANY(Aout(:,ng))) THEN
WRITE (out,'(1x)')
DO itrc=1,NST
i=idTvar(idsed(itrc))
IF (Aout(i,ng)) WRITE (out,160) Aout(i,ng), &
& 'Aout(idTvar)', &
& 'Write out averaged sediment', itrc, &
& TRIM(Vname(1,i))
END DO
DO itrc=1,NST
i=idsed(itrc)
IF (Aout(idTTav(i),ng)) WRITE (out,160) &
& Aout(idTTav(i),ng), 'Aout(idTTav)', &
& 'Write out averaged <t*t> for tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
END DO
DO itrc=1,NST
i=idsed(itrc)
IF (Aout(idUTav(i),ng)) WRITE (out,160) &
& Aout(idUTav(i),ng), 'Aout(idUTav)', &
& 'Write out averaged <u*t> for tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
END DO
DO itrc=1,NST
i=idsed(itrc)
IF (Aout(idVTav(i),ng)) WRITE (out,160) &
& Aout(idVTav(i),ng), 'Aout(idVTav)', &
& 'Write out averaged <v*t> for tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
END DO
DO itrc=1,NST
i=idsed(itrc)
IF (Aout(iHUTav(i),ng)) WRITE (out,160) &
& Aout(iHUTav(i),ng), 'Aout(iHUTav)', &
& 'Write out averaged <Huon*t> for tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
END DO
DO itrc=1,NST
i=idsed(itrc)
IF (Aout(iHVTav(i),ng)) WRITE (out,160) &
& Aout(iHVTav(i),ng), 'Aout(iHVTav)', &
& 'Write out averaged <Hvom*t> for tracer ', i, &
& TRIM(Vname(1,idTvar(i)))
END DO
# ifdef BEDLOAD
DO itrc=1,NST
i=idUbld(itrc)
IF (Aout(i,ng)) WRITE (out,160) Aout(i,ng), &
& 'Aout(idUbld)', &
& 'Write out U-bedload, sediment ', itrc, &
& TRIM(Vname(1,i))
i=idVbld(itrc)
IF (Aout(i,ng)) WRITE (out,160) Aout(i,ng), &
& 'Aout(idVbld)', &
& 'Write out V-bedload, sediment ', itrc, &
& TRIM(Vname(1,i))
END DO
# endif
END IF
#endif
#ifdef DIAGNOSTICS_TS
IF ((nDIA(ng).gt.0).and.ANY(Dout(:,ng))) THEN
WRITE (out,'(1x)')
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(itrc,iTrate),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iTrate)', &
& 'Write out rate of change of tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(itrc,iThadv),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iThadv)', &
& 'Write out horizontal advection, tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(itrc,iTxadv),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iTxadv)', &
& 'Write out horizontal X-advection, tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(itrc,iTyadv),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iTyadv)', &
& 'Write out horizontal Y-advection, tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(itrc,iTvadv),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iTvadv)', &
& 'Write out vertical advection, tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
# if defined TS_DIF2 || defined TS_DIF4
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(itrc,iThdif),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iThdif)', &
& 'Write out horizontal diffusion, tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(i,iTxdif),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iTxdif)', &
& 'Write out horizontal X-diffusion, tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(itrc,iTydif),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iTydif)', &
& 'Write out horizontal Y-diffusion, tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
# if defined MIX_GEO_TS || defined MIX_ISO_TS
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(itrc,iTsdif),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iTsdif)', &
& 'Write out horizontal S-diffusion, tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
# endif
# endif
DO i=1,NST
itrc=idsed(i)
IF (Dout(idDtrc(itrc,iTvdif),ng)) &
& WRITE (out,160) .TRUE., 'Dout(iTvdif)', &
& 'Write out vertical diffusion, tracer ', itrc, &
& TRIM(Vname(1,idTvar(itrc)))
END DO
END IF
#endif
END IF
END DO
END IF
!
!-----------------------------------------------------------------------
! Scale relevant input parameters
!-----------------------------------------------------------------------
!
DO ng=1,Ngrids
DO i=1,NST
Sd50(i,ng)=Sd50(i,ng)*0.001_r8
Wsed(i,ng)=Wsed(i,ng)*0.001_r8
tau_ce(i,ng)=tau_ce(i,ng)/rho0
tau_cd(i,ng)=tau_cd(i,ng)/rho0
nl_tnu4(idsed(i),ng)=SQRT(ABS(nl_tnu4(idsed(i),ng)))
#ifdef ADJOINT
ad_tnu4(idsed(i),ng)=SQRT(ABS(ad_tnu4(idsed(i),ng)))
#endif
#if defined TANGENT || defined TL_IOMS
tl_tnu4(idsed(i),ng)=SQRT(ABS(tl_tnu4(idsed(i),ng)))
#endif
IF (Tnudg(idsed(i),ng).gt.0.0_r8) THEN
Tnudg(idsed(i),ng)=1.0_r8/(Tnudg(idsed(i),ng)*86400.0_r8)
ELSE
Tnudg(idsed(i),ng)=0.0_r8
END IF
END DO
END DO
30 FORMAT (/,' READ_SedPar - variable info not yet loaded, ', a)
40 FORMAT (/,' READ_SedPar - Error while processing line: ',/,a)
50 FORMAT (/,/,' Sediment Parameters, Grid: ',i2.2, &
& /, ' =============================',/)
60 FORMAT (/,1x,'Size',5x,'Sd50',8x,'Csed',8x,'Srho',8x,'Wsed', &
& 8x,'Erate',7x,'poros',/,1x,'Class',4x,'(mm)',7x, &
& '(kg/m3)',5x,'(kg/m3)',5x,'(mm/s)',5x,'(kg/m2/s)',4x, &
& '(nondim)',/)
70 FORMAT (2x,i2,2x,6(1x,1p,e11.4))
80 FORMAT (/,9x,'tau_ce',6x,'tau_cd',6x,'nl_tnu2',5x,'nl_tnu4',5x, &
& 'Akt_bak',6x,'Tnudg',/,9x,'(N/m2)',6x,'(N/m2)',6x, &
& '(m2/s)',6x,'(m4/s)',7x,'(m2/s)',6x,'(day)',/)
90 FORMAT (/,9x,'morph_fac',/,9x,'(nondim)',/)
100 FORMAT (/,' New bed layer formed when deposition exceeds ',e12.5, &
& ' (m).')
110 FORMAT (' Two first layers are combined when 2nd layer smaller ', &
& 'than ',e12.5,' (m).')
120 FORMAT (' Rate coefficient for bed load transport = ',e12.5,/)
130 FORMAT (' Transition for mixed sediment =',e12.5,/)
140 FORMAT (' Transition for cohesive sediment =',e12.5,/)
150 FORMAT (10x,l1,2x,a,'(',i2.2,')',t30,a,i2.2,':',1x,a)
160 FORMAT (10x,l1,2x,a,t29,a,i2.2,':',1x,a)
RETURN
END SUBROUTINE read_SedPar
|
agnor99/OpenBlocks
|
OpenBlocks/broken/main/java/openblocks/enchantments/EnchantmentFlimFlam.java
|
package openblocks.enchantments;
import javax.annotation.Nonnull;
import net.minecraft.enchantment.Enchantment;
import net.minecraft.enchantment.EnchantmentType;
import net.minecraft.inventory.EquipmentSlotType;
import net.minecraft.item.ArmorItem;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.item.SwordItem;
public class EnchantmentFlimFlam extends Enchantment {
private static final EquipmentSlotType[] ALL_ARMOR = new EquipmentSlotType[] { EquipmentSlotType.FEET, EquipmentSlotType.LEGS, EquipmentSlotType.CHEST, EquipmentSlotType.HEAD, EquipmentSlotType.MAINHAND };
public EnchantmentFlimFlam() {
super(Rarity.RARE, EnchantmentType.ALL, ALL_ARMOR);
setName("openblocks.flimflam");
}
@Override
public int getMaxLevel() {
return 4;
}
@Override
public int getMinEnchantability(int level) {
return 31 + level * 10;
}
@Override
public int getMaxEnchantability(int level) {
return getMinEnchantability(level) + 10;
}
@Override
@Nonnull
public boolean canApplyAtEnchantingTable(ItemStack stack) {
final Item item = stack.getItem();
return (item instanceof ArmorItem) || (item instanceof SwordItem);
}
}
|
research-iobserve/iobserve-analysis
|
service-behavior-analysis/src/main/java/org/iobserve/service/behavior/analysis/clustering/OPTICS.java
|
/***************************************************************************
* Copyright (C) 2017 iObserve Project (https://www.iobserve-devops.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
package org.iobserve.service.behavior.analysis.clustering;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.PriorityQueue;
import mtree.MTree;
/**
* An implementation of the OPTICS algorithm. A detailed explanation of the algorithm can be found
* in the paper "OPTICS: ordering points to identify the clustering structure"
*
* @author <NAME>
*/
public class OPTICS {
// This comparator, checks from which model the reachability distance is bigger.
// It is used, to keep the priority queue in order
private static Comparator<OpticsData> reachComparator = new Comparator<OpticsData>() {
@Override
public int compare(final OpticsData model1, final OpticsData model2) {
return (int) (model1.getReachabilityDistance() - model2.getReachabilityDistance());
}
};
private final int minPTs;
private final double maxDistance;
private final MTree<OpticsData> mtree;
private final List<OpticsData> models;
private final List<OpticsData> resultList = new ArrayList<>();
/**
*
* @param mtree
* The M-Tree with the behavior models to be clustered
* @param maxDistance
* The maximal distance two neighbors can have (the epsilon value of the algorithm)
* @param minPTs
* The minimal amount of neighbors a object must have to be called a core-object
* @param models
* A list of all models to be clustered. They have to be the same as the models in
* the M-Tree
*/
public OPTICS(final MTree<OpticsData> mtree, final double maxDistance, final int minPTs,
final List<OpticsData> models) {
this.mtree = mtree;
this.maxDistance = maxDistance;
this.minPTs = minPTs;
this.models = models;
}
private double reachabilityDistance(final OpticsData model1, final OpticsData model2) {
final double coreDistance = model1.getCoreDistance();
if (coreDistance == OpticsData.UNDEFINED) {
return OpticsData.UNDEFINED;
}
final double distance = model1.distanceTo(model2);
return Math.max(distance, coreDistance);
}
/**
* Updates the core distance of a model. The core-distance is the epsilon value (radius) an
* object must have, so it has minPts neighbors. If the result is larger than maxDistance the
* core distance is UNDEFINED.
*
* @param model
* The model, of which the core distance should be updated.
*/
private void updateCoreDistance(final OpticsData model) {
final Iterator<MTree<OpticsData>.ResultItem> results = this.getMtree()
.getNearest(model, this.getMaxDistance(), this.getMinPTs()).iterator();
int resultAmount = 0;
OpticsData last = null;
while (results.hasNext()) {
resultAmount++;
last = results.next().data;
}
if (resultAmount < this.getMinPTs()) {
model.setCoreDistance(OpticsData.UNDEFINED);
} else {
model.setCoreDistance(model.distanceTo(last));
}
}
private List<OpticsData> getNeighbors(final OpticsData model) {
final MTree<OpticsData>.Query query = this.mtree.getNearestByRange(model, this.maxDistance);
final Iterator<MTree<OpticsData>.ResultItem> it = query.iterator();
final List<OpticsData> neighbors = new ArrayList<>();
while (it.hasNext()) {
neighbors.add(it.next().data);
}
return neighbors;
}
/**
* This calculates the OPTICS result.
*
* @return An ordered list of the behavior models. The reachability distances of the models are
* important for the evaluation.
*/
public List<OpticsData> calculate() {
for (final OpticsData model : this.models) {
if (!model.isVisited()) {
this.expandClusterOrder(model);
}
}
return this.resultList;
}
/**
* Updates the reachablity distances of all unvisited neighbors around one centermodel and and
* puts it in the priorityQueue (if it isn't already in it)
*
* @param neighbors
* All neighbors of the center model
* @param centerModel
* The model, from which the update is initialized
* @param seeds
* The current Priority Queue
*/
private void update(final List<OpticsData> neighbors, final OpticsData centerModel,
final PriorityQueue<OpticsData> seeds) {
for (final OpticsData model : neighbors) {
if (!model.isVisited()) {
final double newReachDistance = this.reachabilityDistance(centerModel, model);
if (model.getReachabilityDistance() == OpticsData.UNDEFINED) {
model.setReachabilityDistance(newReachDistance);
seeds.add(model);
} else {
if (newReachDistance < model.getReachabilityDistance()) {
model.setReachabilityDistance(newReachDistance);
// Update the position of the model in priority queue. This can be done by
// removing and adding it back in
seeds.remove(model);
seeds.add(model);
}
}
}
}
}
/**
* Expands the cluster order by adding the next model together with close neighbors to the
* result.
*
* @param model1
* An unvisited behavior model.
*/
private void expandClusterOrder(final OpticsData model1) {
final List<OpticsData> neighbors1 = this.getNeighbors(model1);
model1.setVisited(true);
model1.setReachabilityDistance(OpticsData.UNDEFINED);
this.updateCoreDistance(model1);
this.resultList.add(model1);
if (model1.getCoreDistance() != OpticsData.UNDEFINED) {
final PriorityQueue<OpticsData> seeds = new PriorityQueue<>(5, OPTICS.reachComparator);
this.update(neighbors1, model1, seeds);
while (!seeds.isEmpty()) {
final OpticsData model2 = seeds.poll();
// TODO better naming
final List<OpticsData> neighbors2 = this.getNeighbors(model2);
this.updateCoreDistance(model2);
model2.setVisited(true);
this.resultList.add(model2);
if (model2.getCoreDistance() != OpticsData.UNDEFINED) {
this.update(neighbors2, model2, seeds);
}
}
}
}
public int getMinPTs() {
return this.minPTs;
}
public double getMaxDistance() {
return this.maxDistance;
}
public MTree<OpticsData> getMtree() {
return this.mtree;
}
}
|
dysnomian/benchmarks
|
db/migrate/20190808171544_rename_assessment_field.rb
|
<reponame>dysnomian/benchmarks<filename>db/migrate/20190808171544_rename_assessment_field.rb<gh_stars>1-10
class RenameAssessmentField < ActiveRecord::Migration[5.2]
def change
rename_column :plans, :assessment, :assessment_type
end
end
|
ckadner/eclairjs
|
server/src/main/resources/eclairjs/mllib/fpm/PrefixSpan.js
|
/*
* Copyright 2016 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function () {
var JavaWrapper = require(EclairJS_Globals.NAMESPACE + '/JavaWrapper');
var Logger = require(EclairJS_Globals.NAMESPACE + '/Logger');
var Utils = require(EclairJS_Globals.NAMESPACE + '/Utils');
var PrefixSpanModel = require(EclairJS_Globals.NAMESPACE + '/mllib/fpm/PrefixSpanModel');
/**
* :: Experimental ::
*
* A parallel PrefixSpan algorithm to mine frequent sequential patterns.
* The PrefixSpan algorithm is described in J. Pei, et al., PrefixSpan: Mining Sequential Patterns
* Efficiently by Prefix-Projected Pattern Growth ([[http://doi.org/10.1109/ICDE.2001.914830]]).
*
*
* @see [[https://en.wikipedia.org/wiki/Sequential_Pattern_Mining Sequential Pattern Mining
* (Wikipedia)]]
* @memberof module:eclairjs/mllib/fpm
* @classdesc
* Constructs a default instance with default parameters
* {minSupport: `0.1`, maxPatternLength: `10`, maxLocalProjDBSize: `32000000L`}.
* @class
*/
var PrefixSpan = function(jvmObject) {
this.logger = Logger.getLogger("PrefixSpan_js");
if (!jvmObject) {
jvmObject = new org.apache.spark.mllib.fpm.PrefixSpan();
}
JavaWrapper.call(this, jvmObject);
};
PrefixSpan.prototype = Object.create(JavaWrapper.prototype);
PrefixSpan.prototype.constructor = PrefixSpan;
/**
* Get the minimal support (i.e. the frequency of occurrence before a pattern is considered
* frequent).
* @returns {float}
*/
PrefixSpan.prototype.getMinSupport = function() {
return this.getJavaObject().getMinSupport();
};
/**
* Sets the minimal support level (default: `0.1`).
* @param {float} minSupport
* @returns {module:eclairjs/mllib/fpm.PrefixSpan}
*/
PrefixSpan.prototype.setMinSupport = function(minSupport) {
var javaObject = this.getJavaObject().setMinSupport(minSupport);
return new PrefixSpan(javaObject);
};
/**
* Gets the maximal pattern length (i.e. the length of the longest sequential pattern to consider.
* @returns {integer}
*/
PrefixSpan.prototype.getMaxPatternLength = function() {
return this.getJavaObject().getMaxPatternLength();
};
/**
* Sets maximal pattern length (default: `10`).
* @param {integer} maxPatternLength
* @returns {module:eclairjs/mllib/fpm.PrefixSpan}
*/
PrefixSpan.prototype.setMaxPatternLength = function(maxPatternLength) {
var javaObject = this.getJavaObject().setMaxPatternLength(maxPatternLength);
return new PrefixSpan(javaObject);
};
/**
* Gets the maximum number of items allowed in a projected database before local processing.
* @returns {integer}
*/
PrefixSpan.prototype.getMaxLocalProjDBSize = function() {
return this.getJavaObject().getMaxLocalProjDBSize();
};
/**
* Sets the maximum number of items (including delimiters used in the internal storage format)
* allowed in a projected database before local processing (default: `32000000L`).
* @param {integer} maxLocalProjDBSize
* @returns {module:eclairjs/mllib/fpm.PrefixSpan}
*/
PrefixSpan.prototype.setMaxLocalProjDBSize = function(maxLocalProjDBSize) {
var javaObject = this.getJavaObject().setMaxLocalProjDBSize(maxLocalProjDBSize);
return new PrefixSpan(javaObject);
};
/**
* Finds the complete set of frequent sequential patterns in the input sequences of itemsets.
* @param {module:eclairjs.RDD} data sequences of itemsets.
* @returns {module:eclairjs/mllib/fpm.PrefixSpanModel} a [[module:eclairjs/mllib/fpm.PrefixSpanModel]] that contains the frequent patterns
*/
PrefixSpan.prototype.run = function(data) {
var data_uw = Utils.unwrapObject(data);
var javaObject = this.getJavaObject().run(data_uw);
return new PrefixSpanModel(javaObject);
};
module.exports = PrefixSpan;
})();
|
ambrosejcarr/pysmFISH
|
staining_segmentation.py
|
<filename>staining_segmentation.py
import h5py
import numpy as np
from skimage import filters,io,img_as_float,exposure,morphology,segmentation,measure,feature,color
from scipy import ndimage as nd
import itertools
import pickle
import multiprocessing
from pysmFISH import utils
from pysmFISH import object_based_segmentation
def staining_segmentation():
"""
This script will segment the selected staining and output the identified
objects.
All the parameters are entered via argparse.
Parameters:
-----------
scheduler: string
tcp address of the dask.distributed scheduler (ex. tcp://192.168.0.4:7003).
default = False. If False the process will run on the local computer using nCPUs-1
path: string
Path to the processing directory
processing_file: string
Path to the hdf5 file with the staning to process
segmentation_staining: string
Staining to be segmented
"""
# Inputs of the function
parser = argparse.ArgumentParser(description='Segmentation script')
parser.add_argument('-scheduler', default=False, help='dask scheduler address ex. tcp://192.168.0.4:7003')
parser.add_argument('-path', help='processing directory')
parser.add_argument('-processing_file', help='path to the file with the \
staning to process')
parser.add_argument('-segmentation_staining', help='staining to be \
segmented')
args = parser.parse_args()
# Directory to process
processing_directory = args.path
# File to process
processing_file = args.processing_file
# staining to segment
segmentation_staining = args.segmentation_staining
# Dask scheduler address
scheduler_address = args.scheduler
if scheduler_address:
# Start dask client on server or cluster
client=Client(scheduler_address)
else:
# Start dask client on local machine. It will use all the availabe
# cores -1
# number of core to use
ncores = multiprocessing.cpu_count()-1
cluster = LocalCluster(n_workers=ncores)
client=Client(cluster)
# Determine the operating system running the code
os_windows, add_slash = utils.determine_os()
# Check training slash in the processing directory
processing_directory=utils.check_trailing_slash(processing_directory,os_windows)
segmentation_parameters = utils.general_yaml_parser(processing_directory+'Staining_segmentation.config.yaml')
# Chunking parameters
chunk_size = segmentation_parameters[segmentation_staining]['image_chunking_parameters']['chunk_size']
percent_padding = segmentation_parameters[segmentation_staining]['image_chunking_parameters']['percent_padding']
# Segmentation parameters
trimming = segmentation_parameters[segmentation_staining]['segmentation_parameters']['trimming']
min_object_size = segmentation_parameters[segmentation_staining]['segmentation_parameters']['min_object_size']
disk_radium_rank_filer = segmentation_parameters[segmentation_staining]['segmentation_parameters']['disk_radium_rank_filer']
min_distance = segmentation_parameters[segmentation_staining]['segmentation_parameters']['min_distance']
threshold_rel = segmentation_parameters[segmentation_staining]['segmentation_parameters']['threshold_rel']
# Load the image (will be modified after the change to hdf5 input)
img = io.imread(processing_file)
# Image chunking
nr_chunks,nc_chunks,Coords_Chunks_list, Coords_Padded_Chunks_list,r_coords_tl_all_padded,\
c_coords_tl_all_padded,r_coords_br_all_padded,c_coords_br_all_padded = \
object_based_segmentation.image_chunking(img,chunk_size,percent_padding)
# Create the chunks idx
Chunks_idxs_linear=np.arange(len(Coords_Padded_Chunks_list),dtype='int32')
# Distribute the chunks idx and distridute them in an array according to the position
# in the chunked image
Chunks_idxs=Chunks_idxs_linear.reshape(nr_chunks,nc_chunks)
# Flatten the array for make it easier the creation of the coords combination
Chunks_idxs_rows=np.ravel(Chunks_idxs)
Chunks_idxs_cols=np.ravel(Chunks_idxs,order='F')
# Calculate coords of the overlapping chunks
Overlapping_chunks_coords=list()
counter=0
left_pos=Chunks_idxs_rows[0]
for el in Chunks_idxs_rows[1:]:
if counter < nc_chunks-1:
Coords_left=Coords_Padded_Chunks_list[left_pos]
Coords_right=Coords_Padded_Chunks_list[el]
row_tl=Coords_left[0]
row_br=Coords_left[1]
col_tl=Coords_right[2]
col_br=Coords_left[3]
Overlapping_chunks_coords.append((row_tl,row_br,col_tl,col_br))
left_pos=el
counter+=1
else:
left_pos=el
counter=0
counter=0
top_pos=Chunks_idxs_cols[0]
for el in Chunks_idxs_cols[1:]:
if counter < nr_chunks-1:
Coords_top=Coords_Padded_Chunks_list[top_pos]
Coords_bottom=Coords_Padded_Chunks_list[el]
row_tl=Coords_bottom[0]
row_br=Coords_top[1]
col_tl=Coords_top[2]
col_br=Coords_top[3]
Overlapping_chunks_coords.append((row_tl,row_br,col_tl,col_br))
counter+=1
top_pos=el
else:
top_pos=el
counter=0
# Now i use this approach for testing. If the image gets to big to fit in RAM
# then save the files and load them separately in each node
chunked_image_seq = list()
for coords in Coords_Padded_Chunks_list:
chunked_image_seq.append(img[coords[0]:coords[1],coords[2]:coords[3]])
# Run the segmentation
futures_processes = client.map(object_based_segmentation.polyT_segmentation,chunked_image_seq,
min_object_size=min_object_size,
min_distance=min_distance,
disk_radium_rank_filer=disk_radium_rank_filer,
threshold_rel=threshold_rel,
trimming=trimming)
Objects_list = client.gather(futures_processes)
# Recalculate labels and coords
processed_images_data = dict()
max_starting_label = 0
total_data_dict = dict()
for idx, objs_chunk in enumerate(Objects_list):
for label ,cvalues in objs_chunk.items():
new_label=max_starting_label+1
coords = Coords_Padded_Chunks_list[idx][0::2]
total_data_dict[new_label] = cvalues+coords
max_starting_label = new_label
# Calculate all the intersecting objects
futures_processes = client.map(object_based_segmentation.OverlappingCouples,Overlapping_chunks_coords,
TotalDataDict = total_data_dict)
All_intersecting = client.gather(futures_processes)
# Put together the couple with the same label for multiple intersection
# for the labels of objects where there is intersection between multiple regions
# Then scatter all of them and calculate intersection
# Combine the results from the parallel processing
flatten_couple = [el for grp in All_intersecting for el in grp]
# Remove duplicates
flatten_couple=list(set(flatten_couple))
# Create a list of the labels (removing the repeats)
singles=list()
[singles.append(x) for cpl in flatten_couple for x in cpl]
singles=list(set(singles))
# Identify the couples containing singles
Combined_all_singles=list()
for item in singles:
Combined_single=list()
for couple in flatten_couple:
if item in couple:
Combined_single.append(couple)
Combined_all_singles.append(Combined_single)
if Combined_all_singles:
# Combine all the intersecting labeles
start=Combined_all_singles[0]
ComparisonList=Combined_all_singles[1:].copy()
#merged=start.copy()
merged=list()
SavedCombinations=list()
tmp_list=ComparisonList.copy()
KeepGoing=True
Loop=0
while KeepGoing:
Loop+=1
for idx,el in enumerate(ComparisonList):
if set(start).intersection(set(el)):
#merged=list(set(merged)|set(el))
[merged.append(x) for x in el]
tmp_list = [e for e in tmp_list if e != el]
intersection=list(set.intersection(set(merged),set(start)))
if intersection:
merged=list(set.union(set(merged),set(start)))
#merged=list(set(merged))
start=merged.copy()
merged=list()
ComparisonList=tmp_list.copy()
#tmp_list.append(merged)
else:
SavedCombinations.append(start)
start=tmp_list[0]
tmp_list=tmp_list[1:]
ComparisonList=tmp_list.copy()
if len(tmp_list)<1:
[SavedCombinations.append(x) for x in tmp_list]
KeepGoing =False
# Remove all the duplicated labeled that intersect
# in this case the labeled are merged. It will be nice to run an extra
# segmentation on the merged objects
# If it is too slow this step can be parallelised
SavedLab_list=list()
CleanedDict=total_data_dict.copy()
for couple in SavedCombinations:
SaveLab, RemoveLabs,NewCoords=object_based_segmentation.IntersectionCouples(couple,total_data_dict)
SavedLab_list.append(SaveLab)
for lab in RemoveLabs:
del CleanedDict[lab]
CleanedDict[SaveLab]=NewCoords
else:
CleanedDict=total_data_dict
# Calculate all objects properties
all_objects_list = [(key,coords) for key,coords in CleanedDict.items()]
futures_processes = client.map(object_based_segmentation.obj_properties_calculator,all_objects_list)
all_objects_properties_list = client.gather(futures_processes)
# convert the list to a dictionary
all_objects_properties_dict = { k: v for d in all_objects_properties_list for k, v in d.items() }
# Save all the objects
segmented_objs_fname = processing_directory + 'segmented_' + segmentation_staining + '_all_objs_properties.pkl'
pickle.dump(all_objects_properties_dict,open(segmented_objs_fname,'wb'))
if __name__ == "__main__":
staining_segmentation()
|
h5y1m141/payment_app
|
app/controllers/customer_api/v1/customer_shipping_addresses_controller.rb
|
<reponame>h5y1m141/payment_app
module CustomerApi
module V1
class CustomerShippingAddressesController < ApplicationController
before_action :verify_token, only: %i[index]
def index
@customer_shipping_addresses = @current_customer.shipping_addresses
end
end
end
end
|
AroraCromwell/pliigo-cups-agent-fixing-printers
|
lib/PrinterInstallable.js
|
/**
* Created by johannespichler on 30.01.16.
*/
'use strict'
const util = require("util");
const exec = require('child_process').exec;
const _ = require("lodash");
const Printer = require('./Printer');
//const mdns = require('multicast-dns');
class PrinterInstallable extends Printer {
/**
* constructor
* @param queue
* @param opts
*/
constructor(queue, opts) {
// call constructor of parent class
super();
let queuePredefined = false;
if (typeof queue !== "string") {
opts = queue;
} else {
queuePredefined = true;
this._queue = queue;
}
if (!opts || !_.isObject(opts)) {
throw new Error("You may not instantiate a printer without a full system definition...");
}
let defaultOpts = {
uri: "unknown",
uri_pretty: "unknown",
protocol: "unknown",
model: "unknown",
}
if (_.isObject(opts)) {
_.merge(defaultOpts, opts);
}
if (queuePredefined !== true && !defaultOpts['queue']) {
this._queue = defaultOpts.model.replace(/([^a-zA-Z0-9.])/gmi, "_"); // a printer queue may only contain letters, numbers, - and _
} else if(queuePredefined !== true && defaultOpts['queue']){
this._queue = defaultOpts['queue'].replace(/([^a-zA-Z0-9.])/gmi, "_");
} else if(queuePredefined === true){
this._queue = queue.replace(/([^a-zA-Z0-9.])/gmi, "_");
}
this._uri = defaultOpts.uri;
if(defaultOpts.uri != "unknown" && defaultOpts.uri_pretty != "unknown") {
this._uri_pretty = defaultOpts.uri_pretty;
} else if (defaultOpts.uri != "unknown" && defaultOpts.uri_pretty == "unknown") {
this._uri_pretty = decodeURIComponent(this._uri);
}
this._protocol = defaultOpts.protocol;
this._description = defaultOpts.model || "no description provided";
this._model = /(.*?)(?=\s@|$)/mi.exec(defaultOpts.model)[1]; // removes any @ shared indicator like ... "printer @ ubuntuserver"
this._location = "";
}
/**
* sets the location parameter of the printer
*/
_setLocation() {
}
/**
* set the driver uri
* @param driverUri
*/
setDriver(driverUri) {
if (!driverUri.indexOf("://") > 0) {
driverUri = "/" + driverUri;
}
this._driverOrPpd = driverUri;
}
/**
* installs printer to CUPS server
* returns true|false of operation success
*
* @returns {boolean}
*/
installOnCupsServer(cb) {
if(this._driverOrPpd === false){
throw new Error("You can not install a printer without a driver of PPD defined");
}
// define command to add printer
let cmd = `lpadmin -p "${this._queue}" -v "${this._uri}" -m "${this._driverOrPpd}" -D "${this._description}" -L "${this._location}" -E`;
// set default media
cmd = `${cmd} -o media=${this.defaultMedia}`;
if (this.isShared === false) {
cmd = `${cmd} -o printer-is-shared=false`
} else {
cmd = `${cmd} -o printer-is-shared=true`
}
// execute printer installation
exec(cmd, (error, stdout, stderr) => {
//if(stdout) util.print('stdout: ' + stdout);
if (stderr) {
//util.print('stderr: ' + stderr);
if (typeof cb == 'function') {
cb(stderr, false);
}
}
if (error !== null) {
//console.log('exec error: ' + error);
if (typeof cb == 'function') {
cb(error.message, false);
}
}
if (this.isDefault === true) {
cmd = `lpoptions -d ${this._queue}`;
exec(cmd, function (error, stdout, stderr) {
});
}
});
if (typeof cb == 'function') {
cb(null, true);
}
}
}
var exports = module.exports = PrinterInstallable;
|
mileto94/felicitas
|
felicitas/game_rules/migrations/0008_auto_20190222_1719.py
|
# Generated by Django 2.1.5 on 2019-02-22 17:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('game_rules', '0007_answer_next'),
]
operations = [
migrations.RemoveField(
model_name='answer',
name='next',
),
migrations.AlterField(
model_name='answer',
name='next_poll',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='next_poll', to='game_rules.Poll'),
),
]
|
DimitarPetroww/Softuni-Homeworks
|
JS-applications/Unit testing and modules-lab/06. RGB to Hex/test.js
|
<filename>JS-applications/Unit testing and modules-lab/06. RGB to Hex/test.js
const rgbToHexColor = require("./app")
let { assert } = require("chai")
describe("RGB checker", function () {
it("valid", () => {
assert.equal(rgbToHexColor(25, 125, 255), "#197DFF")
})
it("invalid", () => {
assert.equal(rgbToHexColor(80, 220, 150.5), undefined)
})
it("invalid", () => {
assert.equal(rgbToHexColor(80, 220, 280), undefined)
})
it("invalid", () => {
assert.equal(rgbToHexColor(80, '220', 175), undefined)
})
it("invalid", () => {
assert.equal(rgbToHexColor(-80, 220, 160), undefined)
})
it("valid", () => {
assert.equal(rgbToHexColor(0, 0, 0), "#000000")
})
})
|
gstearmit/data-structure-algorithm
|
src/test/java/ir/sk/adt/queue/ResizingArrayQueueTest.java
|
package ir.sk.adt.queue;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Created by sad.kayvanfar on 3/6/2021.
*/
public class ResizingArrayQueueTest {
ResizingArrayQueue<Integer> theArrayQueue;
@Before
public void setUp() throws Exception {
theArrayQueue = new ResizingArrayQueue<>();
theArrayQueue.enqueue(10); // insert 4 items
theArrayQueue.enqueue(20);
theArrayQueue.enqueue(30);
theArrayQueue.enqueue(40);
}
@After
public void tearDown() throws Exception {
}
@Test
public void enqueue() {
theArrayQueue.enqueue(50);
}
@Test
public void dequeue() {
}
@Test
public void peek() {
}
}
|
UrsKR/updates-r-simple
|
core/src/test/java/de/idos/updates/server/FileServer.java
|
<filename>core/src/test/java/de/idos/updates/server/FileServer.java<gh_stars>10-100
package de.idos.updates.server;
import de.idos.updates.RootFolderSelector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.ContextHandler;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.server.handler.DefaultHandler;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.server.nio.SelectChannelConnector;
import java.io.File;
public class FileServer {
Server server = new Server(8080);
public static void main(String[] args) throws Exception {
new FileServer().start();
}
public void start() throws Exception {
SelectChannelConnector connector = new SelectChannelConnector();
connector.setPort(8080);
server.addConnector(connector);
ContextHandler updateContext = createUpdateContext();
ContextHandler rootContext = createRootContext();
ContextHandlerCollection handlers = new ContextHandlerCollection();
handlers.setHandlers(new Handler[]{updateContext, rootContext});
server.setHandler(handlers);
server.start();
}
private ContextHandler createUpdateContext() {
ResourceHandler resource_handler = new ResourceHandler();
resource_handler.setDirectoriesListed(true);
File rootFolder = new RootFolderSelector().getRootFolder();
resource_handler.setResourceBase(new File(rootFolder, "src/test/resources/httpUpdateServerBase").getAbsolutePath());
ContextHandler contextHandler = new ContextHandler();
contextHandler.setHandler(resource_handler);
contextHandler.setContextPath("/updates");
return contextHandler;
}
private ContextHandler createRootContext() {
ContextHandler rootContext = new ContextHandler("/");
rootContext.setHandler(new DefaultHandler());
return rootContext;
}
public void stop() throws Exception {
server.stop();
}
}
|
ImanolGo/IngoLightAndBuilding
|
ProtoPixel/scripts/webapp/remote/libs/widgets/cssvars.js
|
<gh_stars>0
"use strict"
const PPxCSSvars = {
test: "red",
textColor: "#979797",
textColorBold: "#e2e2e2",
inputBg: "#2c2c2c",
inputBgDark: "#202020",
highlight: "#2995d2",
highlight2: "#1e84bd",
background: "#1b1b1b",
border: "#292929",
}
addEventListener("load", () => { setCSSvars(document.body, PPxCSSvars) })
|
Crankums/little-tale-frontend
|
src/components/posts/Post.js
|
import React, { Component } from 'react'
import CommentsContainer from '../../containers/CommentsContainer'
import { connect } from 'react-redux'
import { fetchPosts, updatePosts } from '../../actions/postActions'
import { Link } from 'react-router-dom'
class Post extends Component{
handleDelete(){
this.props.deletePost(this.props.post.id)
console.log(this.props.post.title, " has been deleted.")
}
render(){
const { post } = this.props
return(
<div className='post'>
<h4>{post.title}</h4>
<p>{post.text}</p>
<button id='delete-button' onClick={()=>this.handleDelete()}>Delete Post!</button>
<Link to={{pathname: `/posts/${post.id}/edit`, postId: post.id}}><button>Edit Post</button></Link>
<CommentsContainer post={post}/>
</div>
)
}
}
export default connect(null, {fetchPosts, updatePosts})(Post)
|
BIJOY-SUST/ACM---ICPC
|
Competitive Programing Problem Solutions/Virtual Judge/fact.c
|
#include<stdio.h>
#include<string.h>
int main(){
int n,fact,len;
char k[25];
scanf("%d",&n);
scanf("%s",k);
len = strlen(k);
fact = 1;
while(n>1){
fact = fact*n;
n = n-len;
}
printf("%d\n",fact);
}
/*if(n%2==0){
fact = fact*n;
n = n-len;
}*/
|
ErickMurillo/aprocacaho
|
organizacion/forms.py
|
# -*- coding: UTF-8 -*-
from django.db import models
from models import *
from django import forms
STATUS_CHOICES = (('','Todos'),(1,'ONG'),(2, 'Cooperativa'),(3, 'Unión de Cooperativa'))
class EncuestaOrgConsulta(forms.Form):
def __init__(self, *args, **kwargs):
super(EncuestaOrgConsulta, self).__init__(*args, **kwargs)
self.fields['status'] = forms.ChoiceField(label=u'Estatus',choices=STATUS_CHOICES,required=False)
|
Chen-Tang/chrono-vehicle
|
subsys/ChPowertrain.h
|
// =============================================================================
// PROJECT CHRONO - http://projectchrono.org
//
// Copyright (c) 2014 projectchrono.org
// All right reserved.
//
// Use of this source code is governed by a BSD-style license that can be found
// in the LICENSE file at the top level of the distribution and at
// http://projectchrono.org/license-chrono.txt.
//
// =============================================================================
// Authors: <NAME>, <NAME>
// =============================================================================
//
// Base class for a vehicle powertrain.
//
// =============================================================================
#ifndef CH_POWERTRAIN_H
#define CH_POWERTRAIN_H
#include "core/ChShared.h"
#include "core/ChVector.h"
#include "physics/ChBody.h"
#include "subsys/ChApiSubsys.h"
namespace chrono {
///
/// Base class for a powertrain system.
///
class CH_SUBSYS_API ChPowertrain : public ChShared
{
public:
enum DriveMode {
FORWARD,
NEUTRAL,
REVERSE
};
ChPowertrain();
virtual ~ChPowertrain() {}
/// Return the current engine speed.
virtual double GetMotorSpeed() const = 0;
/// Return the current engine torque.
virtual double GetMotorTorque() const = 0;
/// Return the value of slippage in the torque converter.
virtual double GetTorqueConverterSlippage() const = 0;
/// Return the input torque to the torque converter.
virtual double GetTorqueConverterInputTorque() const = 0;
/// Return the output torque from the torque converter.
virtual double GetTorqueConverterOutputTorque() const = 0;
/// Return the current transmission gear.
virtual int GetCurrentTransmissionGear() const = 0;
/// Return the ouput torque from the powertrain.
/// This is the torque that is passed to a vehicle system, thus providing the
/// interface between the powertrain and vehcicle cosimulation modules.
virtual double GetOutputTorque() const = 0;
/// Return the current mode of the transmission.
DriveMode GetDriveMode() { return m_drive_mode; }
/// Set the mode of the transmission.
virtual void SetDriveMode(DriveMode mmode) = 0;
/// Update the state of this powertrain system at the current time.
/// The powertrain system is provided the current driver throttle input, a
/// value in the range [0,1], and the current angular speed of the transmission
/// shaft (from the driveline).
virtual void Update(
double time, ///< [in] current time
double throttle, ///< [in] current throttle input [0,1]
double shaft_speed ///< [in] current angular speed of the transmission shaft
) = 0;
/// Advance the state of this powertrain system by the specified time step.
virtual void Advance(double step) = 0;
protected:
DriveMode m_drive_mode;
};
} // end namespace chrono
#endif
|
Kait-tt/tacowassa
|
addons/task_qrcode/public/task_qr.js
|
<gh_stars>0
'use strict';
require('./task_qr.scss');
const caches = {};
class TaskQR {
static createQR (taskId, {width = 88, height = 88, margin = 0} = {}) {
if (caches[taskId]) { return caches[taskId]; }
const ele = document.createElement('div');
ele.classList.add('task-qr');
const qr = TaskQR._generateQR(taskId, {width, height, margin});
ele.appendChild(qr);
caches[taskId] = ele;
return ele;
}
static _generateQR (taskId, {width, height, margin}) {
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d');
const black = '#000000';
const white = '#ffffff';
const gray = '#e2e2e2';
const borderWidth = 2;
const padding = 4;
const n = 4;
const bw = Math.floor((width - (margin + borderWidth + padding) * 2) / n) - padding;
const bh = Math.floor((height - (margin + borderWidth + padding) * 2) / n) - padding;
const w = (borderWidth + padding + margin) * 2 + (bw + padding) * n;
const h = (borderWidth + padding + margin) * 2 + (bh + padding) * n;
// draw border
ctx.fillStyle = gray;
ctx.fillRect(margin, margin, w - margin * 2, h - margin * 2);
ctx.fillStyle = white;
ctx.fillRect(borderWidth + margin, borderWidth + margin,
w - borderWidth * 2 - margin * 2, h - borderWidth * 2 - margin * 2);
const offset = borderWidth + padding + margin;
// draw find pattern
const fw = Math.floor((bw * 2 + padding * 2) / 7);
const fh = Math.floor((bh * 2 + padding * 2) / 7);
ctx.fillStyle = black;
ctx.fillRect(offset, offset, fw * 7, fh * 7);
ctx.fillStyle = white;
ctx.fillRect(offset + fw, offset + fh, fw * 5, fh * 5);
ctx.fillStyle = black;
ctx.fillRect(offset + fw * 2, offset + fh * 2, fw * 3, fh * 3);
// draw each block
let num = Number(taskId);
for (let y = 0; y < n; y++) {
for (let x = 0; x < n; x++) {
if (y < 2 && x < 2) { continue; }
ctx.fillStyle = num & 1 ? black : white;
num >>= 1;
ctx.fillRect(
offset + (bw + padding) * x + padding / 2,
offset + (bh + padding) * y + padding / 2,
bw, bh);
}
}
return canvas;
}
}
module.exports = TaskQR;
|
mpsitech/wzsk-Whiznium-StarterK
|
wzskcmbd/gbl/JobWzskActExposure.cpp
|
<reponame>mpsitech/wzsk-Whiznium-StarterK<gh_stars>1-10
/**
* \file JobWzskActExposure.cpp
* job handler for job JobWzskActExposure (implementation)
* \copyright (C) 2016-2020 MPSI Technologies GmbH
* \author <NAME> (auto-generation)
* \date created: 5 Dec 2020
*/
// IP header --- ABOVE
#ifdef WZSKCMBD
#include <Wzskcmbd.h>
#else
#include <Wzskd.h>
#endif
#include "JobWzskActExposure.h"
#include "JobWzskActExposure_blks.cpp"
using namespace std;
using namespace Sbecore;
using namespace Xmlio;
// IP ns.cust --- INSERT
// IP ns.spec --- INSERT
// IP Shrdat.subs --- INSERT
/******************************************************************************
class JobWzskActExposure::Shrdat
******************************************************************************/
JobWzskActExposure::Shrdat::Shrdat() :
ShrdatWzsk("JobWzskActExposure", "Shrdat")
{
};
void JobWzskActExposure::Shrdat::init(
XchgWzsk* xchg
, DbsWzsk* dbswzsk
) {
// IP Shrdat.init --- IBEGIN
autoNotManual = true;
Texp = 1e-6;
focus = 0.3;
// IP Shrdat.init --- IEND
};
void JobWzskActExposure::Shrdat::term(
XchgWzsk* xchg
) {
// IP Shrdat.term --- INSERT
};
/******************************************************************************
class JobWzskActExposure
******************************************************************************/
JobWzskActExposure::JobWzskActExposure(
XchgWzsk* xchg
, DbsWzsk* dbswzsk
, const ubigint jrefSup
, const uint ixWzskVLocale
) :
CsjobWzsk(xchg, VecWzskVJob::JOBWZSKACTEXPOSURE, jrefSup, ixWzskVLocale)
{
jref = xchg->addJob(dbswzsk, this, jrefSup);
srcv4l2 = NULL;
srcmcvevp = NULL;
srcicicle = NULL;
srcclnxevb = NULL;
srcarty = NULL;
// IP constructor.cust1 --- INSERT
// IP constructor.spec1 --- INSERT
// IP constructor.cust2 --- IBEGIN
if (srvNotCli) {
if ((xchg->stgwzskglobal.ixWzskVTarget == VecWzskVTarget::APALIS) || (xchg->stgwzskglobal.ixWzskVTarget == VecWzskVTarget::WS)) srcv4l2 = new JobWzskSrcV4l2(xchg, dbswzsk, jref, ixWzskVLocale);
else if (xchg->stgwzskglobal.ixWzskVTarget == VecWzskVTarget::ARTY) srcarty = new JobWzskSrcArty(xchg, dbswzsk, jref, ixWzskVLocale);
else if (xchg->stgwzskglobal.ixWzskVTarget == VecWzskVTarget::CLNXEVB) srcclnxevb = new JobWzskSrcClnxevb(xchg, dbswzsk, jref, ixWzskVLocale);
else if (xchg->stgwzskglobal.ixWzskVTarget == VecWzskVTarget::ICICLE) srcicicle = new JobWzskSrcIcicle(xchg, dbswzsk, jref, ixWzskVLocale);
else if (xchg->stgwzskglobal.ixWzskVTarget == VecWzskVTarget::MCVEVP) srcmcvevp = new JobWzskSrcMcvevp(xchg, dbswzsk, jref, ixWzskVLocale);
};
// IP constructor.cust2 --- IEND
// IP constructor.spec2 --- INSERT
// IP constructor.cust3 --- INSERT
// IP constructor.spec3 --- INSERT
};
JobWzskActExposure::~JobWzskActExposure() {
// IP destructor.spec --- INSERT
// IP destructor.cust --- INSERT
xchg->removeJobByJref(jref);
};
// IP cust --- INSERT
// IP spec --- INSERT
bool JobWzskActExposure::setExposure(
DbsWzsk* dbswzsk
, const bool autoNotManual
, const float Texp // in s
) {
bool retval = true;
if (!srvNotCli) {
if (srv) {
retval = ((JobWzskActExposure*) srv)->setExposure(dbswzsk, autoNotManual, Texp);
} else retval = false;
return retval;
};
lockAccess("setExposure");
// IP setExposure --- IBEGIN
if (srcv4l2) retval = srcv4l2->setExposure(autoNotManual, Texp);
else if (srcarty) retval = srcarty->setExposure(autoNotManual, Texp);
else if (srcicicle) retval = srcicicle->setExposure(autoNotManual, Texp);
if (retval) {
shrdat.wlockAccess(jref, "setExposure");
shrdat.autoNotManual = autoNotManual;
shrdat.Texp = Texp;
shrdat.wunlockAccess(jref, "setExposure");
xchg->triggerSrefCall(dbswzsk, VecWzskVCall::CALLWZSKSHRDATCHG, jref, "autoNotManualTexp");
};
// IP setExposure --- IEND
unlockAccess("setExposure");
return retval;
};
bool JobWzskActExposure::setFocus(
DbsWzsk* dbswzsk
, const float focus // 0..1
) {
bool retval = true;
if (!srvNotCli) {
if (srv) {
retval = ((JobWzskActExposure*) srv)->setFocus(dbswzsk, focus);
} else retval = false;
return retval;
};
lockAccess("setFocus");
// IP setFocus --- IBEGIN
if (srcv4l2) retval = srcv4l2->setFocus(focus);
else if (srcarty) retval = srcarty->setFocus(focus);
else if (srcicicle) retval = srcicicle->setFocus(focus);
if (retval) {
shrdat.wlockAccess(jref, "setFocus");
shrdat.focus = focus;
shrdat.wunlockAccess(jref, "setFocus");
xchg->triggerSrefCall(dbswzsk, VecWzskVCall::CALLWZSKSHRDATCHG, jref, "focus");
};
// IP setFocus --- IEND
unlockAccess("setFocus");
return retval;
};
void JobWzskActExposure::handleRequest(
DbsWzsk* dbswzsk
, ReqWzsk* req
) {
if (req->ixVBasetype == ReqWzsk::VecVBasetype::CMD) {
reqCmd = req;
if (req->cmd == "cmdset") {
} else {
cout << "\tinvalid command!" << endl;
};
if (!req->retain) reqCmd = NULL;
} else if ((req->ixVBasetype == ReqWzsk::VecVBasetype::METHOD) && (req->method->ixVFeatgroup == VecWzskVFeatgroup::VECVJOBWZSKACTEXPOSUREMETHOD)) {
uint ixVMethod = VecVMethod::getIx(req->method->srefIxVMethod);
if ((ixVMethod == VecVMethod::SETEXPOSURE) && (req->method->parsInv.size() == 2) && (req->method->parsRet.size() == 1)) {
*((bool*) (req->method->parsRet[0])) = setExposure(dbswzsk, *((const bool*) (req->method->parsInv[0])), *((const float*) (req->method->parsInv[1])));
} else if ((ixVMethod == VecVMethod::SETFOCUS) && (req->method->parsInv.size() == 1) && (req->method->parsRet.size() == 1)) {
*((bool*) (req->method->parsRet[0])) = setFocus(dbswzsk, *((const float*) (req->method->parsInv[0])));
};
};
};
bool JobWzskActExposure::handleClaim(
DbsWzsk* dbswzsk
, map<ubigint,Sbecore::Claim*>& claims
, const ubigint jrefNewest
) {
bool mod = false;
// IP handleClaim --- INSERT
return mod;
};
|
yangshadip/YAP-SELF
|
Libraries/ITK-4.12.1/include/ITKIOMRCExport.h
|
<reponame>yangshadip/YAP-SELF<gh_stars>0
#ifndef ITKIOMRC_EXPORT_H
#define ITKIOMRC_EXPORT_H
#ifdef ITK_STATIC
# define ITKIOMRC_EXPORT
# define ITKIOMRC_HIDDEN
#else
# ifndef ITKIOMRC_EXPORT
# ifdef ITKIOMRC_EXPORTS
/* We are building this library */
# define ITKIOMRC_EXPORT __declspec(dllexport)
# else
/* We are using this library */
# define ITKIOMRC_EXPORT __declspec(dllimport)
# endif
# endif
# ifndef ITKIOMRC_HIDDEN
# define ITKIOMRC_HIDDEN
# endif
#endif
#ifndef ITKIOMRC_DEPRECATED
# define ITKIOMRC_DEPRECATED __declspec(deprecated)
#endif
#ifndef ITKIOMRC_DEPRECATED_EXPORT
# define ITKIOMRC_DEPRECATED_EXPORT ITKIOMRC_EXPORT ITKIOMRC_DEPRECATED
#endif
#ifndef ITKIOMRC_DEPRECATED_NO_EXPORT
# define ITKIOMRC_DEPRECATED_NO_EXPORT ITKIOMRC_HIDDEN ITKIOMRC_DEPRECATED
#endif
#define DEFINE_NO_DEPRECATED 0
#if DEFINE_NO_DEPRECATED
# define ITKIOMRC_NO_DEPRECATED
#endif
#endif
|
huntc/akka
|
akka-stream/src/main/scala/akka/stream/Transformer.scala
|
<reponame>huntc/akka
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.stream
import scala.collection.immutable
/**
* General interface for stream transformation.
*
* It is possible to keep state in the concrete [[Transformer]] instance with
* ordinary instance variables. The [[Transformer]] is executed by an actor and
* therefore you don not have to add any additional thread safety or memory
* visibility constructs to access the state from the callback methods.
*
* @see [[akka.stream.scaladsl.Flow#transform]]
* @see [[akka.stream.javadsl.Flow#transform]]
*/
abstract class Transformer[-T, +U] {
/**
* Invoked for each element to produce a (possibly empty) sequence of
* output elements.
*/
def onNext(element: T): immutable.Seq[U]
/**
* Invoked after handing off the elements produced from one input element to the
* downstream consumers to determine whether to end stream processing at this point;
* in that case the upstream subscription is canceled.
*/
def isComplete: Boolean = false
/**
* Invoked before the Transformer terminates (either normal completion or after an onError)
* to produce a (possibly empty) sequence of elements in response to the
* end-of-stream event.
*
* This method is only called if [[Transformer#onError]] does not throw an exception. The default implementation
* of [[Transformer#onError]] throws the received cause forcing the error to propagate downstream immediately.
*
* @param e Contains a non-empty option with the error causing the termination or an empty option
* if the Transformer was completed normally
*/
def onTermination(e: Option[Throwable]): immutable.Seq[U] = Nil
/**
* Invoked when failure is signaled from upstream. If this method throws an exception, then onError is immediately
* propagated downstream. If this method completes normally then [[Transformer#onTermination]] is invoked as a final
* step, passing the original cause.
*/
def onError(cause: Throwable): Unit = throw cause
/**
* Invoked after normal completion or error.
*/
def cleanup(): Unit = ()
/**
* Name of this transformation step. Used as part of the actor name.
* Facilitates debugging and logging.
*/
def name: String = "transform"
}
|
madbadPi/TelerikAcademy
|
VCPlusPlus/VCPlusPlusLoops/FactorialExpr/FactorialExpr.cpp
|
<reponame>madbadPi/TelerikAcademy<gh_stars>0
#include <iostream>
#include <string>
#include <vector>
long double factorial(int n)
{
long double result = 1;
while (n>0)
{
result *= n;
n /= 10;
}
return result;
}
int main()
{
std::string nnumstr;
std::string xnumstr;
std::cout << "Enter integer number N: ";
getline(std::cin, nnumstr);
std::cout << "Enter integer number X: ";
getline(std::cin, xnumstr);
int n = stoi(nnumstr);
int x = stoi(xnumstr);
long double sum = 1;
for (int i = 1; i <= n; i++)
{
sum += factorial(i) / x;
x = x*x;
}
std::cout << "Result of the sum" << std::endl << "S = 1 + 1!/X + 2!/X2 + + N!/XN";
std::cout << " is " << sum;
std::cout << std::endl;
return 0;
}
|
lavaorg/northstar
|
rte-lua/modules/nsQL/stats.go
|
<gh_stars>0
/*
Copyright (C) 2017 Verizon. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nsQL
import (
"github.com/lavaorg/lrtx/stats"
)
var (
NsQL = stats.New("nsQL")
Connect = NsQL.NewCounter("Connect")
Disconnect = NsQL.NewCounter("Disconnect")
Query = NsQL.NewCounter("Query")
QueryDirect = NsQL.NewCounter("QueryDirect")
ErrConnect = NsQL.NewCounter("ErrConnect")
ErrDisconnect = NsQL.NewCounter("ErrDisconnect")
ErrQuery = NsQL.NewCounter("ErrQuery")
ErrQueryDirect = NsQL.NewCounter("ErrQueryDirect")
)
|
skoussa/hashbrown-cms
|
src/Client/Entity/View/Modal/CreateUser.js
|
'use strict';
/**
* The modal for creating users
*
* @memberof HashBrown.Client.Entity.View.Modal
*/
class CreateUser extends HashBrown.Entity.View.Modal.ModalBase {
/**
* Constructor
*/
constructor(params) {
super(params);
let charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
let password = '';
for(let i = 0, n = charset.length; i < 8; ++i) {
password += charset.charAt(Math.floor(Math.random() * n));
}
this.model = {
password: password
};
this.template = require('template/modal/createUser');
}
/**
* Event: Clicked email
*/
onClickEmail() {
let subject = 'Invitation to HashBrown CMS';
let url = `${location.protocol}//${location.host}`;
let body = [
`Hello ${this.model.fullName || this.model.username}!`,
``,
`You have been invited by ${HashBrown.Context.user.fullName || HashBrown.Context.user.username} to join a HashBrown CMS instance.`,
`Please visit this URL to login:`,
url,
``,
`Username: ${this.model.username}`,
`Password: ${<PASSWORD>}`,
``,
`Make sure to change your password soon after your first login`
].join('%0D%0A');
location.href = `mailto:${this.model.email}?subject=${subject}&body=${body}`;
this.close();
}
/**
* Event: Clicked create
*/
async onClickCreate() {
try {
if(!this.model.username || this.model.username.length < 2) {
throw new Error('The username is too short');
}
if(!this.model.password || this.model.password.length < 2) {
throw new Error('The password is too short');
}
await HashBrown.Service.ResourceService.new(HashBrown.Entity.Resource.User, 'users', '', this.model);
this.setState('success');
this.trigger('change');
} catch(e) {
this.setErrorState(e);
}
}
/**
* Event: Input password
*/
onInputPassword(password) {
this.model.password = password;
}
/**
* Event: Input email
*/
onInputEmail(email) {
this.model.email = email;
}
/**
* Event: Input name
*/
onInputUsername(username) {
this.model.username = username;
}
/**
* Event: Input full name
*/
onInputFullName(fullName) {
this.model.fullName = fullName;
}
}
module.exports = CreateUser;
|
profmikegreene/lrnwebcomponents
|
elements/simple-login/lib/simple-camera-snap.js
|
import "./simple-login-avatar.js";
import "./simple-login-camera.js";
import "@polymer/paper-icon-button/paper-icon-button.js";
import "@polymer/paper-tooltip/paper-tooltip.js";
class SimpleCameraSnap extends HTMLElement {
constructor(delayRender = false) {
super();
import("@polymer/iron-icons/image-icons.js");
this.tag = SimpleCameraSnap.tag;
this.template = document.createElement("template");
this.attachShadow({ mode: "open" });
if (!delayRender) {
this.render();
}
}
static get tag() {
return "simple-camera-snap";
}
render() {
this.shadowRoot.innerHTML = null;
this.template.innerHTML = this.html;
if (window.ShadyCSS) {
window.ShadyCSS.prepareTemplate(this.template, this.tag);
}
this.shadowRoot.appendChild(this.template.content.cloneNode(true));
}
get html() {
return `
<style>
:host {
/* style simple-login-camera according to simple-login-snap styles */
--simple-login-camera-background: var(--simple-camera-snap-color, #36bed4);
--simple-login-camera-error: var(--simple-camera-snap-error, red);
--simple-login-avatar-color: var(--simple-camera-snap-color, #36bed4);
--simple-login-camera-width: var(--simple-camera-snap-width, 200px);
--simple-login-camera-height: var(--simple-camera-snap-height, 200px);
/* style simple-login-avatar according to simple-login-snap styles */
--simple-login-avatar-background: var(--simple-camera-snap-background, white);
--simple-login-avatar-border-radius: var(--simple-camera-snap-border-radius,100%);
--simple-login-avatar-width: var(--simple-camera-snap-width, 200px);
--simple-login-avatar-height: var(--simple-camera-snap-height, 200px);
}
#selfie {
position: absolute;
margin: 0;
width: var(--simple-camera-snap-width, 200px);
height: var(--simple-camera-snap-height, 200px);
display: flex;
justify-content: center;
}
#snap {
color: var(--simple-camera-snap-color, #36bed4);
background-color: var(--simple-camera-snap-background, white);
border-radius: var(--simple-camera-snap-button-border-radius);
opacity: var(--simple-camera-snap-button-opacity);
}
.has-snap {
z-index: 3;
}
#selfie img {
z-index: 2;
position: absolute;
width: calc(var(--simple-camera-snap-height, 200px) * 16 / 9);
height: var(--simple-camera-snap-height, 200px);
}
.buttons {
display: flex;
width: var(--simple-camera-snap-width, 200px);
justify-content: space-evenly;
position: var(--simple-camera-snap-button-container-position);
bottom: var(--simple-camera-snap-button-container-bottom);
z-index: var(--simple-camera-snap-button-container-z-index);
}
</style>
<simple-login-avatar>
<div id="selfie"></div>
<simple-login-camera id="camera" autoplay></simple-login-camera>
</simple-login-avatar>
<div class="buttons">
<paper-icon-button id="snap" icon="image:camera-alt"></paper-icon-button>
<paper-tooltip for="snap">Take Photo</paper-tooltip>
</div>
`;
}
connectedCallback() {
// ensure support for the camera snap functionality...
// this would be an environment like http that doesn't support camera functionality
if (!navigator.mediaDevices) {
this.shadowRoot.querySelector("#snap").style.display = "none";
}
this.shadowRoot
.querySelector("#snap")
.addEventListener("click", this.snapPhoto.bind(this));
}
disconnectedCallback() {
this.shadowRoot
.querySelector("#snap")
.removeEventListener("click", this.snapPhoto.bind(this));
}
async snapPhoto(e) {
const camera = this.shadowRoot.querySelector("#camera");
if (camera.hasAttribute("autoplay")) {
let raw = await camera.takeASnap();
let img = await camera.takeASnap().then(camera.renderImage);
camera.removeAttribute("autoplay");
const selfie = this.shadowRoot.querySelector("#selfie");
selfie.innerHTML = "";
selfie.appendChild(img);
// throw up event for other things to find the image
this.dispatchEvent(
new CustomEvent("simple-camera-snap-image", {
bubbles: true,
composed: true,
cancelable: true,
detail: {
img: img,
raw: raw
}
})
);
selfie.classList.add("has-snap");
} else {
this.clearPhoto(e);
}
}
clearPhoto(e) {
const camera = this.shadowRoot.querySelector("#camera");
camera.setAttribute("autoplay", "autoplay");
const selfie = this.shadowRoot.querySelector("#selfie");
selfie.innerHTML = "";
selfie.classList.remove("has-snap");
}
}
window.customElements.define(SimpleCameraSnap.tag, SimpleCameraSnap);
export { SimpleCameraSnap };
|
keveinliu/pipy
|
docs/reference/MessageEnd.js
|
/**
* MessageEnd marks the end of a message in an event stream.
* It also contains meta-info of that message in its optional tail property.
*/
class MessageEnd {
/**
* Creates an instance of MessageEnd.
*
* @param {Object} [tail] Message meta-info in the tail.
*/
constructor(tail) {}
/**
* Message meta-info in the tail.
*
* @type {Object}
* @readyonly
*/
tail = null;
}
|
royschut/solid-filemanager
|
dist/cjs/Components/Dialogs/Copy/Copy.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var react_1 = require("react");
var react_redux_1 = require("react-redux");
var Actions_1 = require("../../../Actions/Actions");
var actionTypes_1 = require("../../../Actions/actionTypes");
var ChooseLocation_1 = require("../ChooseLocation/ChooseLocation");
function CopyDialog(props) {
var initialHost = props.initialHost, initialPath = props.initialPath, selectedItems = props.selectedItems, open = props.open, handleClose = props.handleClose, copy = props.copy;
return react_1.default.createElement(ChooseLocation_1.default, { open: open, actionName: "Copy", initialHost: initialHost, initialPath: initialPath, handleClose: handleClose, handleSubmit: function (location) { return copy(selectedItems, location); } });
}
var mapStateToProps = function (state) {
return {
open: state.visibleDialogs.COPY,
initialHost: state.account.host || '',
initialPath: state.path,
selectedItems: state.items.selected,
};
};
var mapDispatchToProps = function (dispatch) {
return {
handleClose: function () {
dispatch(Actions_1.closeDialog(actionTypes_1.DIALOGS.COPY));
},
copy: function (selectedItems, targetLocation) {
dispatch(Actions_1.copyItems(selectedItems, targetLocation));
},
};
};
exports.default = react_redux_1.connect(mapStateToProps, mapDispatchToProps)(CopyDialog);
|
edussuriyac/testgrid_wso2
|
web/src/main/java/org/wso2/testgrid/web/sso/SSOContextEventListener.java
|
<reponame>edussuriyac/testgrid_wso2
/*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.testgrid.web.sso;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.identity.sso.agent.SSOAgentConstants;
import org.wso2.carbon.identity.sso.agent.SSOAgentException;
import org.wso2.carbon.identity.sso.agent.bean.SSOAgentConfig;
import org.wso2.carbon.identity.sso.agent.saml.SSOAgentX509Credential;
import org.wso2.testgrid.common.config.ConfigurationContext;
import org.wso2.testgrid.common.exception.TestGridException;
import org.wso2.testgrid.web.api.SSOService;
import java.util.Properties;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
/**
* Implementation of context event listener which contains the SSO related details (X509 Credential of TestGrid,
* configurations in SSO property file) that will be used when generating SAML request.
*/
public class SSOContextEventListener implements ServletContextListener {
private static final Logger logger = LoggerFactory.getLogger(SSOService.class);
private static Properties properties = new Properties();
/**
* Fetch relevant details from
* {@link org.wso2.testgrid.web.utils.Constants#SSO_PROPERTY_FILE_NAME} property file and
* {@link org.wso2.testgrid.web.utils.Constants#JKS_FILE_NAME} JKS file.
*/
public void contextInitialized(ServletContextEvent servletContextEvent) {
String isSsoEnabled = ConfigurationContext.getProperty(ConfigurationContext.ConfigurationProperties.ENABLE_SSO);
if (!Boolean.valueOf(isSsoEnabled)) {
return;
}
SSOConfigurationReader ssoConfigurationReader = new SSOConfigurationReader();
try {
SSOAgentX509Credential credential = ssoConfigurationReader.getIdPX509Credential();
SSOAgentConfig config = new SSOAgentConfig();
config.initConfig(ssoConfigurationReader.getSSOProperties());
config.getSAML2().setSSOAgentX509Credential(credential);
servletContextEvent.getServletContext().
setAttribute(SSOAgentConstants.CONFIG_BEAN_NAME, config);
} catch (SSOAgentException | TestGridException e) {
logger.error(e.getMessage(), e);
}
}
public void contextDestroyed(ServletContextEvent servletContextEvent) {
}
/**
* Get the properties of the sample
* @return Properties
*/
public static Properties getProperties() {
return properties;
}
}
|
couchbase/ns_server
|
priv/public/ui/app/mn_admin/mn_admin_config.js
|
<filename>priv/public/ui/app/mn_admin/mn_admin_config.js<gh_stars>10-100
/*
Copyright 2015-Present Couchbase, Inc.
Use of this software is governed by the Business Source License included in
the file licenses/BSL-Couchbase.txt. As of the Change Date specified in that
file, in accordance with the Business Source License, use of this software will
be governed by the Apache License, Version 2.0, included in the file
licenses/APL2.txt.
*/
import angular from "angular";
import ngAnimate from "angular-animate";
import uiSelect from "ui-select";
import uiBootstrap from "angular-ui-bootstrap";
import uiRouter from "@uirouter/angularjs";
import {downgradeInjectable} from "@angular/upgrade/static";
import mnAdminController from "./mn_admin_controller.js";
import mnAlertsService from "../components/mn_alerts.js";
import mnPoolDefault from "../components/mn_pool_default.js";
import mnPoll from "../components/mn_poll.js";
import mnFilters from "../components/mn_filters.js";
import mnHelper from "../components/mn_helper.js";
import mnSpinner from "../components/directives/mn_spinner.js";
import mnMainSpinner from "../components/directives/mn_main_spinner.js";
import mnLaunchpad from "../components/directives/mn_launchpad.js";
import mnPluggableUiRegistry from "../components/mn_pluggable_ui_registry.js";
import mnSettingsAutoFailoverService from "./mn_settings_auto_failover_service.js";
import mnSettingsClusterService from "./mn_settings_cluster_service.js";
import mnAuthService from "../mn_auth/mn_auth_service.js";
import mnPermissions from "../components/mn_permissions.js";
import mnElementCrane from "../components/directives/mn_element_crane/mn_element_crane.js";
import mnDragAndDrop from "../components/directives/mn_drag_and_drop.js";
import mnTasksDetails from "../components/mn_tasks_details.js";
import mnLostConnection from "./mn_lost_connection_config.js";
import {MnAdminService} from "../mn.admin.service.js";
import {MnSessionService} from "../mn.session.service.js";
import mnDetailStatsModule from "../components/directives/mn_detail_stats_controller.js";
import mnSelect from "../components/directives/mn_select/mn_select.js";
export default 'mnAdmin';
angular.module('mnAdmin', [
ngAnimate,
uiBootstrap,
uiRouter,
uiSelect,
mnPoll,
mnFilters,
mnAlertsService,
mnPoolDefault,
mnAuthService,
mnHelper,
mnSpinner,
mnMainSpinner,
mnTasksDetails,
mnLaunchpad,
mnPluggableUiRegistry,
mnLostConnection,
mnPermissions,
mnElementCrane,
mnDragAndDrop,
mnSettingsAutoFailoverService,
mnSettingsClusterService,
mnDetailStatsModule,
mnSelect
]).config(mnAdminConfig)
.controller('mnAdminController', mnAdminController)
.factory('mnAdminService', downgradeInjectable(MnAdminService))
.factory('mnSessionService', downgradeInjectable(MnSessionService));
//https://github.com/angular-ui/ui-select/issues/1560
angular.module('ui.select').run(function($animate) {
var origEnabled = $animate.enabled
$animate.enabled = function (elem) {
if (arguments.length !== 1) {
return origEnabled.apply($animate, arguments);
} else if (origEnabled(elem)) {
return (/enable-ng-animation/).test(elem.classNames);
}
return false
}
});
function mnAdminConfig($stateProvider, $urlMatcherFactoryProvider, mnPluggableUiRegistryProvider, $httpProvider) {
$httpProvider.interceptors.push(['$q', '$injector', interceptorOf401]);
function interceptorOf401($q, $injector) {
return {
responseError: function (rejection) {
if (rejection.status === 401 &&
rejection.config.url !== "/pools" &&
rejection.config.url !== "/controller/changePassword" &&
rejection.config.url !== "/uilogout" &&
($injector.get('$state').includes('app.admin') ||
$injector.get('$state').includes('app.wizard')) &&
!rejection.config.headers["ignore-401"] &&
!$injector.get('mnLostConnectionService').getState().isActive) {
$injector.get('mnAuthService').logout();
}
return $q.reject(rejection);
}
};
}
function valToString(val) {
return val != null ? val.toString() : val;
}
$urlMatcherFactoryProvider.type("string", {
encode: valToString,
decode: valToString,
is: function (val) {
return (/[^/]*/).test(val);
}
});
mnPluggableUiRegistryProvider.registerConfig({
name: 'Indexes',
state: 'app.admin.gsi',
includedByState: 'app.admin.gsi',
plugIn: 'workbenchTab',
index: 2,
ngShow: "rbac.cluster.bucket['.'].n1ql.index.read"
});
$stateProvider
.state('app.admin', {
url: "?commonBucket&commonScope&commonCollection&scenarioZoom&scenario",
abstract: true,
data: {
requiresAuth: true
},
params: {
openedGroups: {
value: [],
array: true,
dynamic: true
},
commonBucket: {
value: null,
dynamic: true
},
commonScope: {
value: null,
dynamic: true
},
commonCollection: {
value: null,
dynamic: true
},
scenario: {
value: null,
dynamic: true
},
scenarioZoom: {
value: "minute"
}
},
resolve: {
poolDefault: function (mnPoolDefault) {
return mnPoolDefault.getFresh();
},
pools: function (mnPools) {
return mnPools.get();
},
permissions: function (mnPermissions) {
return mnPermissions.check();
},
whoami: function (mnAuthService) {
return mnAuthService.whoami();
}
},
views: {
"": {
controller: 'mnAdminController as adminCtl',
templateUrl: 'app/mn_admin/mn_admin.html'
},
"<EMAIL>": {
templateUrl: 'app/mn_admin/mn_lost_connection.html',
controller: 'mnLostConnectionController as lostConnCtl'
}
}
});
}
|
RedBrumbler/BeatSaber-Quest-Codegen
|
include/System/Net/TimerThread_InfiniteTimerQueue.hpp
|
// Autogenerated from CppHeaderCreator
// Created by Sc2ad
// =========================================================================
#pragma once
// Begin includes
#include "beatsaber-hook/shared/utils/typedefs.h"
#include "beatsaber-hook/shared/utils/byref.hpp"
// Including type: System.Net.TimerThread
#include "System/Net/TimerThread.hpp"
// Including type: System.Net.TimerThread/System.Net.Queue
#include "System/Net/TimerThread_Queue.hpp"
#include "beatsaber-hook/shared/utils/il2cpp-utils-methods.hpp"
#include "beatsaber-hook/shared/utils/il2cpp-utils-properties.hpp"
#include "beatsaber-hook/shared/utils/il2cpp-utils-fields.hpp"
#include "beatsaber-hook/shared/utils/utils.h"
// Completed includes
#include "beatsaber-hook/shared/utils/il2cpp-type-check.hpp"
NEED_NO_BOX(::System::Net::TimerThread::InfiniteTimerQueue);
DEFINE_IL2CPP_ARG_TYPE(::System::Net::TimerThread::InfiniteTimerQueue*, "System.Net", "TimerThread/InfiniteTimerQueue");
// Type namespace: System.Net
namespace System::Net {
// Size: 0x14
#pragma pack(push, 1)
// Autogenerated type: System.Net.TimerThread/System.Net.InfiniteTimerQueue
// [TokenAttribute] Offset: FFFFFFFF
class TimerThread::InfiniteTimerQueue : public ::System::Net::TimerThread::Queue {
public:
// System.Void .ctor()
// Offset: 0x1B14898
// Implemented from: System.Object
// Base method: System.Void Object::.ctor()
template<::il2cpp_utils::CreationType creationType = ::il2cpp_utils::CreationType::Temporary>
static TimerThread::InfiniteTimerQueue* New_ctor() {
static auto ___internal__logger = ::Logger::get().WithContext("::System::Net::TimerThread::InfiniteTimerQueue::.ctor");
return THROW_UNLESS((::il2cpp_utils::New<TimerThread::InfiniteTimerQueue*, creationType>()));
}
}; // System.Net.TimerThread/System.Net.InfiniteTimerQueue
#pragma pack(pop)
}
#include "beatsaber-hook/shared/utils/il2cpp-utils-methods.hpp"
// Writing MetadataGetter for method: System::Net::TimerThread::InfiniteTimerQueue::New_ctor
// Il2CppName: .ctor
// Cannot get method pointer of value based method overload from template for constructor!
// Try using FindMethod instead!
|
Neoksi/Moxy
|
sample-github/src/test/java/com/arellomobile/mvp/sample/github/mvp/presenters/SplashPresenterTest.java
|
package com.arellomobile.mvp.sample.github.mvp.presenters;
import com.arellomobile.mvp.sample.github.mvp.common.AuthUtils;
import com.arellomobile.mvp.sample.github.mvp.views.SplashView;
import com.arellomobile.mvp.sample.github.test.GithubSampleTestRunner;
import com.arellomobile.mvp.sample.github.test.TestComponentRule;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.mockito.Mockito.verify;
@RunWith(GithubSampleTestRunner.class)
public class SplashPresenterTest {
@Rule
public TestComponentRule testComponentRule = new TestComponentRule();
@Mock
SplashView splashView;
private SplashPresenter presenter;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
presenter = new SplashPresenter();
presenter.getAttachedViews().add(splashView);
}
@Test
public void splash_shouldAuthorizedStateFalse() {
AuthUtils.setToken(null);
presenter.attachView(splashView);
verify(splashView).setAuthorized(false);
}
@Test
public void splash_shouldAuthorizedStateTrue() {
AuthUtils.setToken("token");
presenter.attachView(splashView);
verify(splashView).setAuthorized(true);
}
}
|
Baremetrics/pharos-cluster
|
spec/pharos/phase_spec.rb
|
require "pharos/phase"
describe Pharos::Phase do
let(:host) { double(:host) }
let(:config) { double(:config) }
let(:cluster_context) { {} }
let(:subject) { described_class.new(host, config: config, cluster_context: cluster_context) }
describe '#worker_pool' do
it 'returns FixedThreadPool' do
pool = subject.worker_pool('foo', 2)
expect(pool).to be_instance_of(Concurrent::FixedThreadPool)
end
it 'returns the same pool if asked twice' do
pool1 = subject.worker_pool('foo', 2)
pool2 = subject.worker_pool('foo', 2)
expect(pool1).to eq(pool2)
end
it 'returns a different pool if asked twice with different name' do
pool1 = subject.worker_pool('foo', 2)
pool2 = subject.worker_pool('bar', 2)
expect(pool1).not_to eq(pool2)
end
end
describe '#throttled_work' do
it 'runs given block' do
value = subject.throttled_work('foo', 2) do
'bar'
end
expect(value).to eq('bar')
end
it 'raises re-raises exceptions' do
expect {
subject.throttled_work('foo', 2) do
raise 'bar'
end
}.to raise_error(StandardError)
end
end
end
|
Centaurioun/modernmt
|
src/commons/src/main/java/eu/modernmt/io/WordCounter.java
|
<gh_stars>100-1000
package eu.modernmt.io;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by davide on 26/08/17.
*/
public class WordCounter {
private static final Pattern DELIMITERS_REGEX = Pattern.compile("[\\s\\p{Punct}\\u00A0]+");
private static final Pattern CJK_REGEX = Pattern.compile("[\\p{IsHan}\\p{IsKatakana}\\p{IsHiragana}]");
/**
* Count the amount of words in a corpus line
*
* @param line the corpus line to count the lines of which
* @return the words count
*/
public static int count(String line) {
int wordCount = 0;
Matcher delimitersMatcher = DELIMITERS_REGEX.matcher(line);
int wordStart = 0;
while (delimitersMatcher.find()) {
int wordEnd = delimitersMatcher.start();
if (wordStart == wordEnd)
continue;
wordCount += countWordsInToken(line.substring(wordStart, wordEnd));
wordStart = delimitersMatcher.end();
}
if (wordStart != line.length()) {
wordCount += countWordsInToken(line.substring(wordStart, line.length()));
}
return wordCount;
}
/**
* this private method counts the amount of words
* in a token delimited by ponctuation symbols,
* considering each CJK character as a separate word
*
* @param token the amount of words
* @return the amount of words in token
*/
private static int countWordsInToken(String token) {
int wordCount = 0;
//start index of previous CJK in token
int prev = -1;
//start index of current CJK in token
int cur = 0;
Matcher cjkMatcher = CJK_REGEX.matcher(token);
while (cjkMatcher.find()) {
// if the matcher finds a CJK character, it is a separate word
// so increment the word count
wordCount++;
// moreover, if the previous CJK is more distant than 1 from the current one,
// it means there is another word in between
// so increment the word count
cur = cjkMatcher.start();
if (cur > prev + 1)
wordCount++;
prev = cur;
}
// if the last CJK character is not the last character,
// it means that there is one last word to count
// so increment the word count
if (prev < token.length() - 1)
wordCount++;
return wordCount;
}
}
|
MartinGeisse/public
|
web-ide/src/name/martingeisse/webide/features/simvm/model/AbstractSimulationModelElement.java
|
<reponame>MartinGeisse/public
/**
* Copyright (c) 2010 <NAME>
*
* This file is distributed under the terms of the MIT license.
*/
package name.martingeisse.webide.features.simvm.model;
import name.martingeisse.common.util.ReturnValueUtil;
/**
* Base class for simulation model elements.
*/
public abstract class AbstractSimulationModelElement implements ISimulationModelElement {
/**
* the customTitle
*/
private String customTitle;
/**
* Getter method for the title.
*
* @return the title
*/
@Override
public final String getTitle() {
if (customTitle != null) {
return customTitle;
}
return ReturnValueUtil.nullNotAllowed(getDefaultTitle(), "getDefaultTitle");
}
/**
* Setter method for the title. Pass null to revert
* to the default title.
*
* @param title the title to set, or null to use the default
*/
public final void setTitle(String title) {
this.customTitle = title;
}
/**
* Returns the default title for this element, used
* as the default for {@link #getTitle()}. The default
* implementation returns {@link #toString()}.
*
* @return the default title
*/
protected String getDefaultTitle() {
return toString();
}
}
|
thomoncik/hexadoku
|
include/View/Game/InsertionGameView.hpp
|
//
// Created by <NAME> on 2019-06-05.
//
#ifndef HEXADOKU_INSERTIONGAMEVIEW_HPP
#define HEXADOKU_INSERTIONGAMEVIEW_HPP
#include "AbstractGameView.hpp"
class InsertionGameView : public AbstractGameView {
public:
explicit InsertionGameView(Board board, std::string gameTime);
private:
void DrawMovementInfo(int x, int y) const override;
void DrawActionsInfo(int x, int y) const override;
};
#endif //HEXADOKU_INSERTIONGAMEVIEW_HPP
|
baweaver/middleman
|
middleman-core/lib/middleman-core/sitemap/extensions/import.rb
|
require 'set'
require 'middleman-core/contracts'
module Middleman
module Sitemap
module Extensions
class Import < ConfigExtension
self.resource_list_manipulator_priority = 1
# Expose methods
expose_to_config :import_file, :import_path
ImportFileDescriptor = Struct.new(:from, :to) do
def execute_descriptor(app, resources)
source = ::Middleman::SourceFile.new(Pathname(from).relative_path_from(app.source_dir), Pathname(from), app.source_dir, Set.new(%i[source binary]), 0)
resources + [
::Middleman::Sitemap::Resource.new(app.sitemap, to, source)
]
end
end
ImportPathDescriptor = Struct.new(:from, :renameProc) do
def execute_descriptor(app, resources)
resources + ::Middleman::Util.glob_directory(File.join(from, '**/*'))
.reject { |path| File.directory?(path) }
.map do |path|
target_path = Pathname(path).relative_path_from(Pathname(from).parent).to_s
::Middleman::Sitemap::Resource.new(
app.sitemap,
renameProc.call(target_path, path),
path
)
end
end
end
# Import an external file into `source`
# @param [String] from The original path.
# @param [String] to The new path.
# @return [void]
Contract String, String => ImportFileDescriptor
def import_file(from, to)
ImportFileDescriptor.new(
File.expand_path(from, @app.root),
::Middleman::Util.normalize_path(to)
)
end
# Import an external glob into `source`
# @param [String] from The original path.
# @param [Proc] block Renaming method
# @return [void]
Contract String, Maybe[Proc] => ImportPathDescriptor
def import_path(from, &block)
ImportPathDescriptor.new(
from,
block_given? ? block : proc { |path| path }
)
end
end
end
end
end
|
OnGameSteemTron/ongameapp
|
client/js/collections/startload.js
|
<gh_stars>1-10
import { Session } from 'meteor/session';
Session.set('load',0)
startload = {
loadFromSteem: function () {
// Content.getContentByCreated('ongame', 100, 'ongame', function (error) {
// if (error)
// console.log(error)
// })
// Content.getContentByCreated('ongame-news', 100, 'ongame', function (error) {
// if (error)
// console.log(error)
// })
// Content.getContentByCreated('ongame-streaming', 100, 'ongame', function (error) {
// if (error)
// console.log(error)
// })
// Content.getContentByCreated('ongame-video', 100, 'ongame', function (error) {
// if (error)
// console.log(error)
// })
// Content.getContentByCreated('ongame-review', 100, 'ongame', function (error) {
// if (error)
// console.log(error)
// })
// Content.getContentByCreated('ongame-screenshot', 100, 'ongame', function (error) {
// if (error)
// console.log(error)
// })
// Content.getContentByCreated('ongame-tips', 100, 'ongame', function (error) {
// if (error)
// console.log(error)
// })
Content.getContentByCreated('xbox', 10, 'xbox', function (error) {
if (error)
console.log(error)
else Session.set('load',Session.get('load')+1)
})
Content.getContentByCreated('playstation', 10, 'psx', function (error) {
if (error)
console.log(error)
else Session.set('load',Session.get('load')+1)
})
}
}
|
jabusjavus/Fundamental-Algorithms
|
fundamentals/DataStructures/src/com/appmit/chapter1/q6_compression/Question.java
|
package com.appmit.chapter1.q6_compression;
public class Question {
private static String compressed(String s){
StringBuilder builder = new StringBuilder();
char[] chars = s.toCharArray();
char previous = chars[0];
int count = 0;
for(char word: chars){
if(word != previous){
builder.append(previous);
builder.append(count == 0 ? 1: count);
count = 0;
}else{
count++;
}
previous = word;
}
builder.append(previous);
builder.append(count + 1);
if(builder.length() >= chars.length)
return s;
return builder.toString();
}
public static void main(String[] args){
String test = "abcdefgggggggg";
System.out.println(compressed(test));
}
}
|
creatorrr/devicehive-java-server
|
devicehive-common/src/main/java/com/devicehive/model/eventbus/Filter.java
|
<filename>devicehive-common/src/main/java/com/devicehive/model/eventbus/Filter.java<gh_stars>100-1000
package com.devicehive.model.eventbus;
/*
* #%L
* DeviceHive Common Module
* %%
* Copyright (C) 2016 - 2017 DataArt
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.hazelcast.nio.serialization.Portable;
import com.hazelcast.nio.serialization.PortableReader;
import com.hazelcast.nio.serialization.PortableWriter;
import java.io.IOException;
import java.util.Objects;
import java.util.StringJoiner;
public class Filter implements Portable {
public static final int FACTORY_ID = 1;
public static final int CLASS_ID = 4;
private Long networkId;
private Long deviceTypeId;
private String deviceId;
private String eventName;
private String name;
public Filter() {
}
public Filter(Long networkId, Long deviceTypeId, String deviceId, String eventName, String name) {
this.networkId = networkId;
this.deviceTypeId = deviceTypeId;
this.deviceId = deviceId;
this.eventName = eventName;
this.name = name;
}
public Long getNetworkId() {
return networkId;
}
public void setNetworkId(Long networkId) {
this.networkId = networkId;
}
public Long getDeviceTypeId() {
return deviceTypeId;
}
public void setDeviceTypeId(Long deviceTypeId) {
this.deviceTypeId = deviceTypeId;
}
public String getDeviceId() {
return deviceId;
}
public void setDeviceId(String deviceId) {
this.deviceId = deviceId;
}
public String getEventName() {
return eventName;
}
public void setEventName(String eventName) {
this.eventName = eventName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getFirstKey() {
StringJoiner joiner = new StringJoiner(",");
joiner.add(networkId != null ? networkId.toString() : "*")
.add(deviceTypeId != null ? deviceTypeId.toString() : "*")
.add(deviceId != null ? deviceId : "*");
return joiner.toString();
}
public String getDeviceIgnoredFirstKey() {
StringJoiner joiner = new StringJoiner(",");
joiner.add(networkId != null ? networkId.toString() : "*")
.add(deviceTypeId != null ? deviceTypeId.toString() : "*")
.add("*");
return joiner.toString();
}
public String getSecondKey() {
StringJoiner joiner = new StringJoiner(",");
joiner.add(eventName != null ? eventName : "*")
.add(name != null ? name : "*");
return joiner.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Filter)) return false;
Filter that = (Filter) o;
return Objects.equals(networkId, that.networkId) &&
Objects.equals(deviceTypeId, that.deviceTypeId) &&
Objects.equals(deviceId, that.deviceId) &&
Objects.equals(eventName, that.eventName) &&
Objects.equals(name, that.name);
}
@Override
public int hashCode() {
return Objects.hash(networkId, deviceTypeId, deviceId, eventName, name);
}
@Override
public String toString() {
return "Filter{" +
"networkId=" + networkId +
", deviceTypeId=" + deviceTypeId +
", deviceId=" + deviceId +
", eventName=" + eventName +
", name=" + name +
'}';
}
@Override
public int getFactoryId() {
return FACTORY_ID;
}
@Override
public int getClassId() {
return CLASS_ID;
}
@Override
public void writePortable(PortableWriter writer) throws IOException {
writer.writeLong("networkId", Objects.nonNull(networkId) ? networkId : 0);
writer.writeLong("deviceTypeId", Objects.nonNull(deviceTypeId) ? deviceTypeId : 0);
writer.writeUTF("deviceId", deviceId);
writer.writeUTF("eventName", eventName);
writer.writeUTF("name", name);
}
@Override
public void readPortable(PortableReader reader) throws IOException {
networkId = reader.readLong("networkId");
deviceTypeId = reader.readLong("deviceTypeId");
deviceId = reader.readUTF("deviceId");
eventName = reader.readUTF("eventName");
name = reader.readUTF("name");
}
}
|
restful-api-description-language/RADL
|
java/core/src/test/java/radl/test/RandomData.java
|
<gh_stars>10-100
/*
* Copyright © EMC Corporation. All rights reserved.
*/
package radl.test;
import java.security.SecureRandom;
/**
* Random pieces of data. Useful for testing, for instance.
*/
public final class RandomData {
private static final int MIN_STRING_LENGTH = 3;
private static final int MAX_STRING_LENGTH = 64;
private static final int MIN_INTEGER = 0;
private static final int MAX_INTEGER = 1000;
private final SecureRandom random;
public RandomData() {
this(new SecureRandom());
}
public RandomData(SecureRandom random) {
this.random = random;
}
public String string() {
return string(integer(MIN_STRING_LENGTH, MAX_STRING_LENGTH));
}
public String string(int length) {
StringBuilder result = new StringBuilder();
for (int i = 0; i < length; i++) {
result.append(lowercaseLetter());
}
return result.toString();
}
public char lowercaseLetter() {
return (char)('a' + integer('z' - 'a' + 1));
}
public int integer() {
return integer(MAX_INTEGER);
}
public int integer(int max) {
return integer(MIN_INTEGER, max);
}
public int integer(int min, int max) {
ensureMinMax(min, max);
return min + random.nextInt(max - min);
}
private void ensureMinMax(int min, int max) {
if (min >= max) {
throw new IllegalArgumentException(String.format("Min (%d) must be less than max (%d)", min, max));
}
}
public boolean logical() {
return logical(50);
}
public boolean logical(int percent) {
ensureMinMax(0, percent);
ensureMinMax(percent, 101);
return integer(100) < percent;
}
}
|
anta/libzwaveip
|
examples/reference_apps/tokenizer.c
|
<filename>examples/reference_apps/tokenizer.c
/*
* Copyright 2016 Sigma Designs, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* tokenizer.c
*
* Created on: Aug 18, 2016
* Author: jbu
*
* From http://stackoverflow.com/a/8106894/106280
* License: unknown.
*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "tokquote/tokquote.h"
char** tokenize(const char* const input) {
char* str = strdup(input);
int count = 0;
int capacity = 10;
char** result = malloc(capacity * sizeof(*result));
char* tok = tokquote(str, " ");
while (1) {
if (count >= capacity)
result = realloc(result, (capacity *= 2) * sizeof(*result));
result[count++] = tok ? strdup(tok) : tok;
if (!tok) break;
tok = tokquote(NULL, " ");
}
free(str);
return result;
}
int free_tokenlist(char** toklist) {
char** it;
for (it = toklist; it && *it; ++it) {
free(*it);
}
free(toklist);
return 0;
}
unsigned int token_count(char** toklist) {
unsigned int i = 0;
while (0 != *toklist) {
i++;
toklist++;
}
return i;
}
#ifdef NOT_USED
int main() {
char** tokens = tokenize("test string.");
char** it;
for (it = tokens; it && *it; ++it) {
printf("%s\n", *it);
free(*it);
}
free(tokens);
return 0;
}
#endif
|
wv-aut/experience-plus-react-coupled
|
src/routes/User/userElements/BirthDateForm/components/BirthDateForm.js
|
<reponame>wv-aut/experience-plus-react-coupled<gh_stars>0
import React, { Component } from 'react'
import { API } from '../../../config/formFields.config'
import { checkIfFieldIsRequired, showErrorMessage } from '../../../config/requiredFields.config'
class BirthDateForm extends Component {
_getOptions (count = null) {
let option = []
let months = [
'',
'Jänner',
'Februar',
'März',
'April',
'Mai',
'Juni',
'Juli',
'August',
'September',
'Oktober',
'November',
'Dezember'
]
for (let i = 1; i <= count; i++) {
option.push(<option key={i} value={i < 10 ? '0' + i : i}>{count === 12 ? months[i] : i}</option>)
}
return option
}
_checkIfFieldisRequired (fieldname) {
return checkIfFieldIsRequired(fieldname, this.props.user.data, this.props.location.pathname)
}
/**
* Check if current field is required
* @param {string|boolean} value
* @return {string}
*/
_checkIfBoolean (value) {
if (typeof value === 'boolean') {
return value.toString()
} else {
return value
}
}
renderTaxOptOutForCompany (companyName = false) {
return (
<div className='form-row radio'>
<ul>
<li>
<label>
<input
type='radio'
data-form='taxOptOut'
defaultChecked={this._checkIfBoolean(this.props.user.data.taxOptOut) === 'true'}
value='true'
onChange={(e) => this.props.changeInputWithValidation(e, this.props)}
name='tax-opt-out'
/>
{companyName &&
<p>Ich möchte meine Spenden auch weiterhin als Betriebsausgaben berücksichtigen.
Sie sind daher nicht von der Übermittlungspflicht erfasst.
Sie erhalten in Zukunft automatisch eine Jahresspendenbestätigung.</p>
}
{!companyName &&
<p>Nein, ich möchte meine Spenden nicht absetzen und mache von meinem Widerrufsrecht Gebrauch.</p>
}
<div className='check'>
<div className='inside' />
</div>
</label>
</li>
</ul>
</div>
)
}
render () {
return (
<div>
{this.props.user.dataTemp.companyName &&
this.renderTaxOptOutForCompany(this.props.user.data.companyName)}
<div className='form-row radio'>
<ul>
<li>
<label>
<input
type='radio'
data-form='taxOptOut'
defaultChecked={this._checkIfBoolean(this.props.user.data.taxOptOut) === 'false'}
value='false'
onChange={(e) => this.props.changeInputWithValidation(e, this.props)}
name='tax-opt-out'
/>
{this.props.user.dataTemp.companyName &&
<p>Ich möchte doch meine Spenden in Zukunft privat als Sonderausgabe absetzen. Bitte füllen Sie die unten angeführten Felder zusammen mit Ihrem Geburtsdatum aus:</p>
}
{!this.props.user.dataTemp.companyName &&
<p>Ja, ich stimme der automatischen Spendenabsetzbarkeit zu. Bitte geben Sie dafür Ihr Geburtsdatum bekannt:</p>
}
<div className='check'>
<div className='inside' />
</div>
</label>
</li>
</ul>
</div>
<div className={'form-row ' + (this._checkIfFieldisRequired(API.BIRTH_DATE) && 'required label')}>
<label className='grid-12-3'>
<span>Tag:</span>
<select
data-dateelement='day'
data-fulldate={this.props.user.data.birthdate}
data-required={this._checkIfFieldisRequired(API.BIRTH_DATE)}
className={this._checkIfFieldisRequired(API.BIRTH_DATE) &&
showErrorMessage(this.props.user.data.birthdate.split('-')[2])
}
onChange={this.props.changeDate}
value={this.props.user.data.birthdate.split('-')[2]}>
<option value='00'>Tag</option>
{this._getOptions(31)}
</select>
<span className='error'>Bitte wählen Sie Ihren Geburts-Tag aus.</span>
</label>
<label className='grid-12-5'>
<span className={this._checkIfFieldisRequired(API.BIRTH_DATE)}>Monat:</span>
<select
data-dateelement='month'
data-fulldate={this.props.user.data.birthdate}
data-required={this._checkIfFieldisRequired(API.BIRTH_DATE)}
className={this._checkIfFieldisRequired(API.BIRTH_DATE) &&
showErrorMessage(this.props.user.data.birthdate.split('-')[1])
}
onChange={this.props.changeDate}
value={this.props.user.data.birthdate.split('-')[1]}>
<option value='00'>Monat</option>
{this._getOptions(12)}
</select>
<span className='error'>Bitte wählen Sie Ihr Geburts-Monat aus.</span>
</label>
<label className='grid-12-4'>
<span>Jahr:</span>
<input
data-dateelement='year'
data-fulldate={this.props.user.data.birthdate}
data-required={this._checkIfFieldisRequired(API.BIRTH_DATE)}
className={this._checkIfFieldisRequired(API.BIRTH_DATE) &&
showErrorMessage(this.props.user.data.birthdate.split('-')[0], 1900, 2010)
}
onChange={this.props.changeDate}
maxLength='4'
type='number'
name='birth-year'
defaultValue={this.props.user.data.birthdate.split('-')[0]} />
<span className='error'>Bitte tragen Sie Ihr Geburts-Jahr ein.</span>
</label>
</div>
{!this.props.user.dataTemp.companyName &&
this.renderTaxOptOutForCompany(this.props.user.data.registeredCompany)
}
</div>
)
}
}
BirthDateForm.propTypes = {
changeDate: React.PropTypes.func,
changeInput: React.PropTypes.func,
changeInputWithValidation: React.PropTypes.func,
user: React.PropTypes.object,
location: React.PropTypes.object
}
export default BirthDateForm
|
JoPintoPaul/cgt-property-disposals-frontend
|
app/uk/gov/hmrc/cgtpropertydisposalsfrontend/controllers/returns/gainorlossafterreliefs/GainOrLossAfterReliefsController.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.cgtpropertydisposalsfrontend.controllers.returns.gainorlossafterreliefs
import cats.data.EitherT
import cats.instances.future._
import cats.syntax.either._
import com.google.inject.Inject
import play.api.data.Form
import play.api.data.Forms.{mapping, of}
import play.api.mvc.{Action, AnyContent, MessagesControllerComponents, Result}
import uk.gov.hmrc.cgtpropertydisposalsfrontend.config.{ErrorHandler, ViewConfig}
import uk.gov.hmrc.cgtpropertydisposalsfrontend.controllers.SessionUpdates
import uk.gov.hmrc.cgtpropertydisposalsfrontend.controllers.actions.{AuthenticatedAction, RequestWithSessionData, SessionDataAction, WithAuthAndSessionDataAction}
import uk.gov.hmrc.cgtpropertydisposalsfrontend.controllers.returns.StartingToAmendToFillingOutReturnBehaviour
import uk.gov.hmrc.cgtpropertydisposalsfrontend.models.ConditionalRadioUtils.InnerOption
import uk.gov.hmrc.cgtpropertydisposalsfrontend.models.{ConditionalRadioUtils, FormUtils}
import uk.gov.hmrc.cgtpropertydisposalsfrontend.models.JourneyStatus.{FillingOutReturn, StartingToAmendReturn}
import uk.gov.hmrc.cgtpropertydisposalsfrontend.models.finance.{AmountInPence, MoneyUtils}
import uk.gov.hmrc.cgtpropertydisposalsfrontend.models.finance.MoneyUtils.validateAmountOfMoney
import uk.gov.hmrc.cgtpropertydisposalsfrontend.models.ids.UUIDGenerator
import uk.gov.hmrc.cgtpropertydisposalsfrontend.models.returns.DraftReturn
import uk.gov.hmrc.cgtpropertydisposalsfrontend.repos.SessionStore
import uk.gov.hmrc.cgtpropertydisposalsfrontend.services.returns.{FurtherReturnCalculationEligibility, FurtherReturnCalculationEligibilityUtil, ReturnsService}
import uk.gov.hmrc.cgtpropertydisposalsfrontend.util.{Logging, toFuture}
import uk.gov.hmrc.cgtpropertydisposalsfrontend.util.Logging._
import uk.gov.hmrc.cgtpropertydisposalsfrontend.{controllers, views}
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
import scala.concurrent.{ExecutionContext, Future}
class GainOrLossAfterReliefsController @Inject() (
val authenticatedAction: AuthenticatedAction,
val sessionDataAction: SessionDataAction,
val sessionStore: SessionStore,
val errorHandler: ErrorHandler,
returnsService: ReturnsService,
uuidGenerator: UUIDGenerator,
cc: MessagesControllerComponents,
gainOrLossAfterReliefsPage: views.html.returns.gainorlossafterreliefs.gain_or_loss_after_reliefs,
val glarCalculatorEligibilityUtil: FurtherReturnCalculationEligibilityUtil,
checkYourAnswersPage: views.html.returns.gainorlossafterreliefs.check_your_answers
)(implicit viewConfig: ViewConfig, ec: ExecutionContext)
extends FrontendController(cc)
with WithAuthAndSessionDataAction
with SessionUpdates
with Logging
with StartingToAmendToFillingOutReturnBehaviour {
import GainOrLossAfterReliefsController._
def enterGainOrLossAfterReliefs(): Action[AnyContent] =
authenticatedActionWithSessionData.async { implicit request =>
withFillingOutReturnAndAnswers { (fillingOutReturn, draftReturn, answer) =>
glarCalculatorEligibilityUtil
.isEligibleForFurtherReturnOrAmendCalculation(fillingOutReturn)
.fold[Result](
{ e =>
logger.warn("Could not check for calculation eligibility", e)
errorHandler.errorResult()
},
furtherReturnEligibility =>
Ok(
gainOrLossAfterReliefsPage(
answer.fold(gainOrLossAfterReliefsForm)(value => gainOrLossAfterReliefsForm.fill(value.inPounds())),
answer.fold(controllers.returns.routes.TaskListController.taskList())(_ =>
routes.GainOrLossAfterReliefsController.checkYourAnswers()
),
fillingOutReturn.subscribedDetails.isATrust,
draftReturn.representativeType(),
draftReturn.triageAnswers().isLeft,
fillingOutReturn.isAmendReturn,
furtherReturnEligibility match {
case FurtherReturnCalculationEligibility.Eligible(calculation, _, _) => Some(calculation)
case FurtherReturnCalculationEligibility.Ineligible(_) => None
}
)
)
)
}
}
def enterGainOrLossAfterReliefsSubmit: Action[AnyContent] =
authenticatedActionWithSessionData.async { implicit request =>
withFillingOutReturnAndAnswers { case (fillingOutReturn, draftReturn, answer) =>
gainOrLossAfterReliefsForm
.bindFromRequest()
.fold(
formWithErrors =>
glarCalculatorEligibilityUtil
.isEligibleForFurtherReturnOrAmendCalculation(fillingOutReturn)
.fold(
err => {
logger.warn("Could not check for calculation eligibility", err)
errorHandler.errorResult()
},
eligibility =>
BadRequest(
gainOrLossAfterReliefsPage(
formWithErrors,
answer.fold(controllers.returns.routes.TaskListController.taskList())(_ =>
routes.GainOrLossAfterReliefsController.checkYourAnswers()
),
fillingOutReturn.subscribedDetails.isATrust,
draftReturn.representativeType(),
draftReturn.triageAnswers().isLeft,
fillingOutReturn.isAmendReturn,
eligibility match {
case FurtherReturnCalculationEligibility.Eligible(calculation, _, _) => Some(calculation)
case FurtherReturnCalculationEligibility.Ineligible(_) => None
}
)
)
),
value =>
if (answer.map(_.inPounds()).contains(value))
Redirect(
routes.GainOrLossAfterReliefsController.checkYourAnswers()
)
else {
val updatedAmount = AmountInPence.fromPounds(value)
val updatedDraftReturn =
draftReturn.fold(
_.copy(
gainOrLossAfterReliefs = Some(updatedAmount),
exemptionAndLossesAnswers = None,
yearToDateLiabilityAnswers = None
),
_.copy(
gainOrLossAfterReliefs = Some(updatedAmount),
exemptionAndLossesAnswers = None,
yearToDateLiabilityAnswers = None
),
_.copy(
gainOrLossAfterReliefs = Some(updatedAmount),
exemptionAndLossesAnswers = None,
yearToDateLiabilityAnswers = None
),
_.copy(
gainOrLossAfterReliefs = Some(updatedAmount),
exemptionAndLossesAnswers = None,
yearToDateLiabilityAnswers = None
),
_.copy(
gainOrLossAfterReliefs = Some(updatedAmount),
exemptionAndLossesAnswers = None,
yearToDateLiabilityAnswers = None
)
)
val updatedJourney = fillingOutReturn.copy(draftReturn = updatedDraftReturn)
val result = for {
_ <- returnsService.storeDraftReturn(updatedJourney)
_ <- EitherT(
updateSession(sessionStore, request)(
_.copy(journeyStatus = Some(updatedJourney))
)
)
} yield ()
result.fold(
{ e =>
logger.warn("Could not update draft return", e)
errorHandler.errorResult()
},
_ =>
Redirect(
routes.GainOrLossAfterReliefsController.checkYourAnswers()
)
)
}
)
}
}
def checkYourAnswers(): Action[AnyContent] =
authenticatedActionWithSessionData.async { implicit request =>
withFillingOutReturnAndAnswers { (journeyStatus, draftReturn, answers) =>
answers match {
case Some(completeInitialGainOrLossAnswers) =>
Ok(
checkYourAnswersPage(
completeInitialGainOrLossAnswers,
journeyStatus.subscribedDetails.isATrust,
draftReturn.representativeType(),
draftReturn.triageAnswers().isLeft
)
)
case None =>
Redirect(
routes.GainOrLossAfterReliefsController.enterGainOrLossAfterReliefs()
)
}
}
}
def checkYourAnswersSubmit(): Action[AnyContent] =
authenticatedActionWithSessionData.async { implicit request =>
withFillingOutReturnAndAnswers { (_, _, _) =>
Redirect(controllers.returns.routes.TaskListController.taskList())
}
}
private def withFillingOutReturnAndAnswers(
processReturnAndAnswersIntoResult: (
FillingOutReturn,
DraftReturn,
Option[AmountInPence]
) => Future[Result]
)(implicit request: RequestWithSessionData[_]): Future[Result] =
request.sessionData.flatMap(_.journeyStatus) match {
case Some(s: StartingToAmendReturn) =>
convertFromStartingAmendToFillingOutReturn(s, sessionStore, errorHandler, uuidGenerator)
case Some(
fillingOutReturn @ FillingOutReturn(
_,
_,
_,
d,
_,
_
)
) if fillingOutReturn.isFurtherOrAmendReturn.contains(true) =>
processReturnAndAnswersIntoResult(
fillingOutReturn,
d,
d.gainOrLossAfterReliefs
)
case _ => Redirect(controllers.routes.StartController.start())
}
}
object GainOrLossAfterReliefsController {
val gainOrLossAfterReliefsForm: Form[BigDecimal] = {
val (outerId, gainId, lossId) = ("gainOrLossAfterReliefs", "gainAfterReliefs", "lossAfterReliefs")
def innerOption(id: String): InnerOption[BigDecimal] =
InnerOption { data =>
FormUtils
.readValue(id, data, identity)
.flatMap(
validateAmountOfMoney(
id,
_ <= 0,
_ > MoneyUtils.maxAmountOfPounds
)(_)
)
.leftMap {
Seq(_)
}
}
val formatter = ConditionalRadioUtils.formatter(outerId)(
List(
Left(innerOption(gainId)),
Left(innerOption(lossId).map(_ * -1)),
Right(BigDecimal(0))
)
) { d =>
if (d > 0)
Map(
outerId -> "0",
gainId -> MoneyUtils.formatAmountOfMoneyWithoutPoundSign(d)
)
else if (d < 0)
Map(
outerId -> "1",
lossId -> MoneyUtils.formatAmountOfMoneyWithoutPoundSign((d * -1))
)
else
Map(outerId -> "2")
}
Form[BigDecimal](
mapping(
"" -> of(formatter)
)(identity)(Some(_))
)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.