repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
augustoluiz/us-ms-java-gerenciador-usuarios
|
src/main/java/com/pibitaim/us/msjavagerenciadorusuarios/data/form/TelefonesUsuarioForm.java
|
package com.pibitaim.us.msjavagerenciadorusuarios.data.form;
import lombok.Data;
import lombok.Getter;
import lombok.Setter;
import javax.validation.constraints.NotNull;
@Data
public class TelefonesUsuarioForm {
@NotNull
private Long codigoCadastroTelefone;
@NotNull
private Long cpfCnpjUsuario;
@NotNull
private Integer nivelPrioridade;
@NotNull
private Boolean telefonePrincipal;
}
|
Cristiyi/csxft
|
service/es_delete/es_delete_house_type_image.go
|
/**
* @Description:
* @File: es_delete_house_type_image
* @Date: 2020/11/9 0009 17:52
*/
package es_delete
import "csxft/serializer"
//删除户型图服务
type DeleteHouseTypeImageService struct {
ProjectId int `form:"project_id" json:"project_id" binding:"required"`
}
//根据楼盘删除户型图服务
func (service *DeleteHouseTypeImageService) DeleteHouseTypeImageService() serializer.Response {
DeleteHouseTypeImage(service.ProjectId)
return serializer.Response{
Code: 200,
Msg: "success",
}
}
|
maciejg-git/vue-bootstrap-icons
|
dist-mdi/mdi/cog-transfer.js
|
import { h } from 'vue'
export default {
name: "CogTransfer",
vendor: "Mdi",
type: "",
tags: ["cog","transfer"],
render() {
return h(
"svg",
{"xmlns":"http://www.w3.org/2000/svg","width":"24","height":"24","viewBox":"0 0 24 24","class":"v-icon","fill":"currentColor","data-name":"mdi-cog-transfer","innerHTML":"<path d='M14 18V16L10 19.5L14 23V21H17V18H14M22 15.5L18 12V14H15V17H18V19L22 15.5M21.66 8.73L19.66 5.27C19.54 5.05 19.28 4.96 19.05 5.05L16.56 6.05C16.05 5.64 15.5 5.31 14.87 5.05L14.5 2.42C14.46 2.18 14.25 2 14 2H10C9.75 2 9.54 2.18 9.5 2.42L9.13 5.07C8.5 5.33 7.96 5.66 7.44 6.07L5 5.05C4.77 4.96 4.5 5.05 4.39 5.27L2.39 8.73C2.26 8.94 2.31 9.22 2.5 9.37L4.57 11C4.53 11.33 4.5 11.67 4.5 12C4.5 12.33 4.53 12.67 4.57 13L2.46 14.63C2.26 14.78 2.21 15.06 2.34 15.27L4.34 18.73C4.45 19 4.74 19.11 5 19L5 19L7.5 18C7.74 18.19 8 18.37 8.26 18.53L11.77 15.53C9.84 15.4 8.38 13.73 8.5 11.8C8.65 9.87 10.32 8.41 12.25 8.55C13.69 8.64 14.92 9.62 15.35 11H19.43L21.54 9.37C21.73 9.22 21.78 8.94 21.66 8.73Z' />"},
)
}
}
|
vacaly/Alink
|
core/src/main/java/com/alibaba/alink/operator/batch/regression/DecisionTreeRegPredictBatchOp.java
|
package com.alibaba.alink.operator.batch.regression;
import org.apache.flink.ml.api.misc.param.Params;
import com.alibaba.alink.operator.common.tree.predictors.RandomForestModelMapper;
import com.alibaba.alink.operator.batch.utils.ModelMapBatchOp;
import com.alibaba.alink.params.regression.DecisionTreeRegPredictParams;
/**
*
*/
public final class DecisionTreeRegPredictBatchOp extends ModelMapBatchOp <DecisionTreeRegPredictBatchOp> implements
DecisionTreeRegPredictParams <DecisionTreeRegPredictBatchOp> {
public DecisionTreeRegPredictBatchOp() {
this(null);
}
public DecisionTreeRegPredictBatchOp(Params params) {
super(RandomForestModelMapper::new, params);
}
}
|
serghalak/otus_java_2020_03
|
L29-executors/src/main/java/ru/otus/executors/CompletableFutureDemo.java
|
package ru.otus.executors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
public class CompletableFutureDemo {
private static final Logger logger = LoggerFactory.getLogger(CompletableFutureDemo.class);
public static void main(String[] args) throws ExecutionException, InterruptedException {
//simpleAsync();
//asyncThen();
//asyncError();
//asyncAcceptBoth();
asyncAcceptEither();
}
private static void simpleAsync() throws ExecutionException, InterruptedException {
logger.info("start");
CompletableFuture<String> future1 = CompletableFuture.supplyAsync(() -> task(1));
logger.info("thread is not blocked");
logger.info("result:{}", future1.get());
}
private static void asyncThen() {
logger.info("start");
CompletableFuture<String> future2 = CompletableFuture.supplyAsync(() -> task(2));
future2.thenAccept(val -> logger.info("result:{}", val));
future2.join();
}
private static void asyncError() {
CompletableFuture<String> future3 = CompletableFuture.supplyAsync(CompletableFutureDemo::errorAction);
future3.exceptionally(Throwable::getMessage).thenAccept(msg -> logger.info("msg:{}", msg));
}
private static void asyncAcceptBoth() {
logger.info("start");
CompletableFuture<String> futureT1 = CompletableFuture.supplyAsync(() -> task(100));
CompletableFuture<String> futureT2 = CompletableFuture.supplyAsync(() -> task(200));
CompletableFuture<Void> joinedResult = futureT1.thenAcceptBoth(futureT2, (s1, s2) -> logger.info("joinedResult: {}, {}", s1, s2));
joinedResult.join();
}
private static void asyncAcceptEither() {
logger.info("start");
CompletableFuture<String> futureT1 = CompletableFuture.supplyAsync(() -> {
sleep(1);
return task(100);
}
);
CompletableFuture<String> futureT2 = CompletableFuture.supplyAsync(() -> task(200));
CompletableFuture<Void> firstResult = futureT1.acceptEither(futureT2, s -> logger.info("firstResult: {}", s));
firstResult.join();
}
private static String errorAction() {
throw new RuntimeException("error for Test");
}
private static String task(int id) {
sleep(5);
logger.info("task is done: {}", id);
return "done" + id;
}
private static void sleep(int seconds) {
try {
Thread.sleep(TimeUnit.SECONDS.toMillis(seconds));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
|
ModWU/DYHelper2
|
src/dyhelper/com/util/ImsiUtil.java
|
<reponame>ModWU/DYHelper2
package dyhelper.com.util;
import android.content.Context;
import android.telephony.TelephonyManager;
import android.text.TextUtils;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
public class ImsiUtil {
private Integer simId_1 = 0;
private Integer simId_2 = 1;
private String imsi_1 = "";
private String imsi_2 = "";
private String imei_1 = "";
private String imei_2 = "";
private Context mContext;
public static class IMSInfo {
public String chipName;
public String imsi_1;
public String imei_1;
public String imsi_2;
public String imei_2;
}
public ImsiUtil(Context mContext) {
// TODO Auto-generated constructor stub
this.mContext = mContext;
}
/**
* 鑾峰彇IMSInfo
* @return
*/
public IMSInfo getIMSInfo()
{
IMSInfo imsInfo = initQualcommDoubleSim();
if( imsInfo!=null)
{
return imsInfo;
}
else
{
imsInfo=initMtkDoubleSim();
if(imsInfo!=null)
{
return imsInfo;
}
else
{
imsInfo = initMtkSecondDoubleSim();
if(imsInfo!=null)
{
return imsInfo;
}
else
{
imsInfo = initSpreadDoubleSim();
if(imsInfo!=null)
{
return imsInfo;
}
else
{
imsInfo= getIMSI();
if(imsInfo!=null)
{
return imsInfo;
}
else
{
imsInfo=null;
return imsInfo;
}
}
}
}
}
}
/**
* MTK鐨勮姱鐗囩殑鍒ゆ柇
* @param mContext
* @return
*/
public IMSInfo initMtkDoubleSim() {
IMSInfo imsInfo=null;
try {
TelephonyManager tm = (TelephonyManager) mContext
.getSystemService(Context.TELEPHONY_SERVICE);
Class<?> c = Class.forName("com.android.internal.telephony.Phone");
Field fields1 = c.getField("GEMINI_SIM_1");
fields1.setAccessible(true);
simId_1 = (Integer) fields1.get(null);
Field fields2 = c.getField("GEMINI_SIM_2");
fields2.setAccessible(true);
simId_2 = (Integer) fields2.get(null);
Method m = TelephonyManager.class.getDeclaredMethod(
"getSubscriberIdGemini", int.class);
imsi_1 = (String) m.invoke(tm, simId_1);
imsi_2 = (String) m.invoke(tm, simId_2);
Method m1 = TelephonyManager.class.getDeclaredMethod(
"getDeviceIdGemini", int.class);
imei_1 = (String) m1.invoke(tm, simId_1);
imei_2 = (String) m1.invoke(tm, simId_2);
imsInfo = new IMSInfo();
imsInfo.chipName="MTK鑺墖";
imsInfo.imei_1=imei_1;
imsInfo.imei_2=imei_2;
imsInfo.imsi_1=imsi_1;
imsInfo.imsi_2=imsi_2;
} catch (Exception e) {
imsInfo = null;
return imsInfo;
}
return imsInfo;
}
/**
* MTK鐨勮姱鐗囩殑鍒ゆ柇2
* @param mContext
* @return
*/
public IMSInfo initMtkSecondDoubleSim() {
IMSInfo imsInfo=null;
try {
TelephonyManager tm = (TelephonyManager) mContext
.getSystemService(Context.TELEPHONY_SERVICE);
Class<?> c = Class.forName("com.android.internal.telephony.Phone");
Field fields1 = c.getField("GEMINI_SIM_1");
fields1.setAccessible(true);
simId_1 = (Integer) fields1.get(null);
Field fields2 = c.getField("GEMINI_SIM_2");
fields2.setAccessible(true);
simId_2 = (Integer) fields2.get(null);
Method mx = TelephonyManager.class.getMethod("getDefault",
int.class);
TelephonyManager tm1 = (TelephonyManager) mx.invoke(tm, simId_1);
TelephonyManager tm2 = (TelephonyManager) mx.invoke(tm, simId_2);
imsi_1 = tm1.getSubscriberId();
imsi_2 = tm2.getSubscriberId();
imei_1 = tm1.getDeviceId();
imei_2 = tm2.getDeviceId();
imsInfo = new IMSInfo();
imsInfo.chipName="MTK鑺墖";
imsInfo.imei_1=imei_1;
imsInfo.imei_2=imei_2;
imsInfo.imsi_1=imsi_1;
imsInfo.imsi_2=imsi_2;
} catch (Exception e) {
imsInfo = null;
return imsInfo;
}
return imsInfo;
}
/**
* 灞曡鑺墖鐨勫垽鏂�
* @param mContext
* @return
*/
public IMSInfo initSpreadDoubleSim() {
IMSInfo imsInfo=null;
try {
Class<?> c = Class
.forName("com.android.internal.telephony.PhoneFactory");
Method m = c.getMethod("getServiceName", String.class, int.class);
String spreadTmService = (String) m.invoke(c,
Context.TELEPHONY_SERVICE, 1);
TelephonyManager tm = (TelephonyManager) mContext
.getSystemService(Context.TELEPHONY_SERVICE);
imsi_1 = tm.getSubscriberId();
imei_1 = tm.getDeviceId();
TelephonyManager tm1 = (TelephonyManager) mContext
.getSystemService(spreadTmService);
imsi_2 = tm1.getSubscriberId();
imei_2 = tm1.getDeviceId();
imsInfo = new IMSInfo();
imsInfo.chipName="灞曡鑺墖";
imsInfo.imei_1=imei_1;
imsInfo.imei_2=imei_2;
imsInfo.imsi_1=imsi_1;
imsInfo.imsi_2=imsi_2;
} catch (Exception e) {
imsInfo = null;
return imsInfo;
}
return imsInfo;
}
/**
* 楂橀�氳姱鐗囧垽鏂�
* @param mContext
* @return
*/
public IMSInfo initQualcommDoubleSim() {
IMSInfo imsInfo=null;
try {
Class<?> cx = Class.forName("android.telephony.MSimTelephonyManager");
Object obj =mContext.getSystemService("phone_msim");
Method md = cx.getMethod("getDeviceId", int.class);
Method ms = cx.getMethod("getSubscriberId", int.class);
imei_1 = (String) md.invoke(obj, simId_1);
imei_2 = (String) md.invoke(obj, simId_2);
imsi_1 = (String) ms.invoke(obj, simId_1);
imsi_2 = (String) ms.invoke(obj, simId_2);
int statephoneType_2=0;
boolean flag = false;
try {
Method mx = cx.getMethod("getPreferredDataSubscription", int.class);
Method is = cx.getMethod("isMultiSimEnabled", int.class);
statephoneType_2 = (Integer) mx.invoke(obj);
flag = (Boolean) is.invoke(obj);
} catch (Exception e) {
// TODO: handle exception
}
imsInfo = new IMSInfo();
imsInfo.chipName="楂橀�氳姱鐗�-getPreferredDataSubscription:"+statephoneType_2+",flag:"+flag;
imsInfo.imei_1=imei_1;
imsInfo.imei_2=imei_2;
imsInfo.imsi_1=imsi_1;
imsInfo.imsi_2=imsi_2;
} catch (Exception e) {
imsInfo = null;
return imsInfo;
}
return imsInfo;
}
/**
* 绯荤粺鐨刟pi
* @return
*/
public IMSInfo getIMSI() {
IMSInfo imsInfo=null;
try {
TelephonyManager tm = (TelephonyManager) mContext
.getSystemService(Context.TELEPHONY_SERVICE);
imsi_1 = tm.getSubscriberId();
imei_1 = tm.getDeviceId();
} catch (Exception e) {
// TODO: handle exception
imsInfo = null;
return imsInfo;
}
if (TextUtils.isEmpty(imsi_1)||imsi_1.length()<10 ) {
imsInfo = null;
return imsInfo;
}
else
{
imsInfo = new IMSInfo();
imsInfo.chipName="鍗曞崱鑺墖";
imsInfo.imei_1=imei_1;
imsInfo.imei_2="";
imsInfo.imsi_1=imsi_1;
imsInfo.imsi_2="";
return imsInfo;
}
}
}
|
ykyuen/dockerbeat
|
vendor/github.com/elastic/beats/filebeat/harvester/processor/timeout.go
|
<filename>vendor/github.com/elastic/beats/filebeat/harvester/processor/timeout.go
package processor
import (
"errors"
"time"
)
var (
errTimeout = errors.New("timeout")
)
// timeoutProcessor will signal some configurable timeout error if no
// new line can be returned in time.
type timeoutProcessor struct {
reader LineProcessor
timeout time.Duration
signal error
running bool
ch chan lineMessage
}
type lineMessage struct {
line Line
err error
}
// newTimeoutProcessor returns a new timeoutProcessor from an input line processor.
func newTimeoutProcessor(in LineProcessor, signal error, timeout time.Duration) *timeoutProcessor {
if signal == nil {
signal = errTimeout
}
return &timeoutProcessor{
reader: in,
signal: signal,
timeout: timeout,
ch: make(chan lineMessage, 1),
}
}
// Next returns the next line. If no line was returned before timeout, the
// configured timeout error is returned.
// For handline timeouts a goroutine is started for reading lines from
// configured line processor. Only when underlying processor returns an error, the
// goroutine will be finished.
func (p *timeoutProcessor) Next() (Line, error) {
if !p.running {
p.running = true
go func() {
for {
line, err := p.reader.Next()
p.ch <- lineMessage{line, err}
if err != nil {
break
}
}
}()
}
select {
case msg := <-p.ch:
if msg.err != nil {
p.running = false
}
return msg.line, msg.err
case <-time.After(p.timeout):
return Line{}, p.signal
}
}
|
Libertus-Lab/Game-Pencil-Engine
|
src/pawgui/pawgui_theme_controller.cpp
|
<filename>src/pawgui/pawgui_theme_controller.cpp
/*
pawgui_theme_controller.cpp
This file is part of:
PawByte Ambitious Working GUI(PAWGUI)
https://www.pawbyte.com/pawgui
Copyright (c) 2014-2021 <NAME>, <NAME>.
Copyright (c) 2014-2021 PawByte LLC.
Copyright (c) 2014-2021 PawByte Ambitious Working GUI(PAWGUI) contributors ( Contributors Page )
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the “Software”), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-PawByte Ambitious Working GUI(PAWGUI) <https://www.pawbyte.com/pawgui>
*/
#include "pawgui_theme_controller.h"
namespace pawgui
{
themes_controller::themes_controller()
{
}
themes_controller::~themes_controller()
{
}
gui_theme * themes_controller::add_theme(std::string theme_name, bool customTheme )
{
gui_theme * newTemplate = new gui_theme(theme_name, customTheme);
theme_list.push_back( newTemplate);
return newTemplate;
}
gui_theme * themes_controller::find_theme(std::string theme_name )
{
if( (int)theme_name.size() > 0 )
{
for( int i = (int)theme_list.size()-1; i >=0; i-- )
{
if( theme_list[i]->theme_name == theme_name)
{
return theme_list[i];
}
}
}
return nullptr;
}
}
|
gy2006/flow-platform-x
|
core/src/main/java/com/flowci/core/job/service/StepService.java
|
<reponame>gy2006/flow-platform-x<gh_stars>0
/*
* Copyright 2018 flow.ci
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.flowci.core.job.service;
import com.flowci.core.job.domain.Job;
import com.flowci.domain.ExecutedCmd;
import com.flowci.tree.Node;
import java.util.List;
/**
* @author yang
*/
public interface StepService {
List<ExecutedCmd> init(Job job);
/**
* Get executed cmd for job and node
*/
ExecutedCmd get(Job job, Node node);
/**
* Get executed cmd from cmd id
*/
ExecutedCmd get(String cmdId);
/**
* List step of executed cmd for job
*/
List<ExecutedCmd> list(Job job);
/**
* Get step list in string, {name}={stats};{name}={stats}
* No steps after current node
*/
String toVarString(Job job, Node current);
/**
* Change step status, and put steps string to job context
*/
void statusChange(Job job, Node node, ExecutedCmd.Status status, String err);
void statusChange(ExecutedCmd entity, ExecutedCmd.Status status, String err);
/**
* To update properties are related with cmd executed result
*/
void resultUpdate(ExecutedCmd result);
/**
* Delete steps by flow id
*/
Long delete(String flowId);
}
|
mojotti/near_buy
|
react-native/__tests__/reducers/AuthorizationReducer.test.js
|
import { authorizationReducer } from '../../src/redux/reducers/AuthorizationReducer';
const testUsername = 'testUsername';
const testToken = '<PASSWORD>Token';
const defaultState = {
isLoggedIn: false,
username: '',
token: '',
id: ''
};
const loggedInState = {
isLoggedIn: true,
username: testUsername,
token: testToken,
};
test('return default state when invalid type passed to authorizationReducer', () => {
const invalidActionType = { type: 'invalid' };
expect(authorizationReducer(defaultState, invalidActionType)).toEqual(
defaultState,
);
});
test('return logged in state when logged in with correct details', () => {
const login = {
type: 'LOGIN',
username: testUsername,
token: testToken,
};
expect(authorizationReducer(defaultState, login)).toEqual(loggedInState);
});
test('return logged out state when logging out with correct details', () => {
const logout = {
type: 'LOGOUT',
};
expect(authorizationReducer(loggedInState, logout)).toEqual(defaultState);
});
|
gamblor21/Adafruit_Learning_System_Guides
|
Chromakey_Light_Ring/code.py
|
# SPDX-FileCopyrightText: 2021 <NAME> @todbot and <NAME> for Adafruit Industries
# SPDX-License-Identifier: MIT
# QT Py encoder based on https://github.com/todbot/qtpy-knob
# Retroreflective chromakey light ring
# Mount a rotary encoder directly to an Adafruit QT Py,
# add some neopixels to get a color/brightness controller
#
import time
import board
from digitalio import DigitalInOut, Direction, Pull
import neopixel
import rotaryio
dim_val = 0.2
NUM_PIX = 24
PIX_TYPE = "RGB" # RGB or RGBW
if PIX_TYPE == "RGB":
ORDER = (1, 0, 2)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
else:
ORDER = (1, 0, 2, 3)
GREEN = (0, 255, 0, 0)
BLUE = (0, 0, 255, 0)
WHITE = (0, 0, 0, 255)
BLACK = (0, 0, 0, 0)
colors = [GREEN, BLUE, WHITE, BLACK]
current_color = 0
ring = neopixel.NeoPixel(
board.MISO, NUM_PIX, brightness=0.2, auto_write=False, pixel_order=ORDER
)
ring.fill(colors[current_color])
ring.show()
# button of rotary encoder
button = DigitalInOut(board.MOSI)
button.pull = Pull.UP
# Use pin A2 as a fake ground for the rotary encoder
fakegnd = DigitalInOut(board.A2)
fakegnd.direction = Direction.OUTPUT
fakegnd.value = False
encoder = rotaryio.IncrementalEncoder(board.A3, board.A1)
print("---Chromakey Light Ring---")
last_encoder_val = encoder.position
ring_pos = 0
rainbow_pos = 0
last_time = time.monotonic()
ring_on = True
while True:
encoder_diff = last_encoder_val - encoder.position # encoder clicks since last read
last_encoder_val = encoder.position
if button.value is False: # button pressed
current_color = (current_color + 1) % len(colors)
ring.fill(colors[current_color])
ring.show()
time.sleep(0.5) # debounce
else:
if encoder_diff > 0:
if dim_val >= 0.01:
dim_val = (dim_val - 0.01) % 1.0
ring.brightness = dim_val
ring.show()
elif encoder_diff < 0:
if dim_val <= 0.99:
dim_val = (dim_val + 0.01) % 1.0
ring.brightness = dim_val
ring.show()
time.sleep(0.01)
|
amila1998/Research-Project-Management-Tool
|
client2/src/components/ForgotPassword/Forgot.js
|
<reponame>amila1998/Research-Project-Management-Tool
const Forgot = () => {
return (
<div className="resetpsaaword">
<form className="" novalidate>
<div className="">
<label htmlFor="validationCustom03" className="form-label">Email</label>
<input type="email" className="form-control" id="validationCustom03" required/>
</div>
<div className="column">
<div className="login_btn">
<button>send</button>
</div>
</div>
</form>
</div>
);
};
export default Forgot;
|
carter-gao/AutoTestFramework
|
common/api/baseTestCase.py
|
#!/usr/bin/env python
# coding:utf-8
# @Author: Carter.Gao
# @Email: <EMAIL>
# @Date: 2019/9/23 20:36
# @IDE: PyCharm
# @About: 接口测试用例基类
from time import time
from unittest import TestCase
from common.logger import Logger
from common.dataFactory import FakeData
from common.api.apiOperateExcel import BackFillToExcel
from common.api.dataCompare import DataCompare
class BaseTestCase(TestCase):
"""
接口测试用例基类
"""
# 引用日志类
log = Logger('执行API测试').get_logger()
@classmethod
def setUpClass(cls) -> None:
cls.log.info('{}开始执行{}'.format('=' * 50, '=' * 50))
# 引用随机数据生成类
cls.faker = FakeData()
# 引用测试结果回写类
cls.back_fill = BackFillToExcel()
# 创建生成器,返回一个迭代器,用于测试用例计数
cls._count_case = (x for x in range(1, 1000))
@classmethod
def tearDownClass(cls) -> None:
# 所有用例执行完毕,数据回写完毕后,保存回写结果
cls.back_fill.save_excel()
cls.log.info('{}执行完毕{}'.format('=' * 50, '=' * 50))
def setUp(self) -> None:
# 每执行一个用例,从迭代器获取一个值,从1开始
self.count = next(self._count_case)
self.log.info('{}准备执行第{}个用例{}'.format('-' * 25, self.count, '-' * 25))
# 第一个回写测试用例编号,确定当前用例的测试结果回写行数
self.back_fill.fill_case_number(self.count)
# 默认回写判定结果为SUCCESS,后续由断言方法改写
self.back_fill.fill_judgement_result()
@property
def timestamp(self):
"""时间戳"""
return int(time())
def check_result(self, excepted: dict, response: dict):
"""
传入预期结果和实际结果,调用数据比对方法获取比对结果,根据比对结果判定用例执行结果
:param excepted: 预期
:param response: 实际
:return: None
"""
# 回写预期结果、响应结果
self.back_fill.fill_excepted(excepted)
self.back_fill.fill_response(response)
self.log.info('预期结果:{}'.format(excepted))
compare_result = DataCompare().compare(excepted, response)
# 回写比对结果
self.back_fill.fill_compare_result(compare_result)
try:
self.assertTrue(compare_result == [])
except AssertionError:
self.log.error('执行结果:FAILURE')
# 若比对结果存在不一致,改写判定结果为FAILURE
self.back_fill.fill_judgement_result(result=0)
raise AssertionError(f'预期结果:{excepted},比对结果:{compare_result}')
else:
self.log.info('执行结果:SUCCESS')
|
zhangkn/iOS14Header
|
System/Library/PrivateFrameworks/ScreenshotServices.framework/SSScreenCapturerPresentationOptions.h
|
<gh_stars>1-10
/*
* This header is generated by classdump-dyld 1.0
* on Sunday, September 27, 2020 at 12:26:14 PM Mountain Standard Time
* Operating System: Version 14.0 (Build 18A373)
* Image Source: /System/Library/PrivateFrameworks/ScreenshotServices.framework/ScreenshotServices
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
@interface SSScreenCapturerPresentationOptions : NSObject {
unsigned long long _presentationMode;
}
@property (assign,nonatomic) unsigned long long presentationMode; //@synthesize presentationMode=_presentationMode - In the implementation block
-(unsigned long long)presentationMode;
-(void)setPresentationMode:(unsigned long long)arg1 ;
-(id)description;
@end
|
wanyufe/eks-anywhere
|
pkg/curatedpackages/mocks/installer.go
|
// Code generated by MockGen. DO NOT EDIT.
// Source: pkg/curatedpackages/packagecontrollerclient.go
// Package mocks is a generated GoMock package.
package mocks
import (
context "context"
reflect "reflect"
gomock "github.com/golang/mock/gomock"
)
// MockChartInstaller is a mock of ChartInstaller interface.
type MockChartInstaller struct {
ctrl *gomock.Controller
recorder *MockChartInstallerMockRecorder
}
// MockChartInstallerMockRecorder is the mock recorder for MockChartInstaller.
type MockChartInstallerMockRecorder struct {
mock *MockChartInstaller
}
// NewMockChartInstaller creates a new mock instance.
func NewMockChartInstaller(ctrl *gomock.Controller) *MockChartInstaller {
mock := &MockChartInstaller{ctrl: ctrl}
mock.recorder = &MockChartInstallerMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockChartInstaller) EXPECT() *MockChartInstallerMockRecorder {
return m.recorder
}
// InstallChart mocks base method.
func (m *MockChartInstaller) InstallChart(ctx context.Context, chart, ociURI, version, kubeconfigFilePath string, values []string) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "InstallChart", ctx, chart, ociURI, version, kubeconfigFilePath, values)
ret0, _ := ret[0].(error)
return ret0
}
// InstallChart indicates an expected call of InstallChart.
func (mr *MockChartInstallerMockRecorder) InstallChart(ctx, chart, ociURI, version, kubeconfigFilePath, values interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InstallChart", reflect.TypeOf((*MockChartInstaller)(nil).InstallChart), ctx, chart, ociURI, version, kubeconfigFilePath, values)
}
|
nicolasjinchereau/pizza-quest
|
source/game/RenderQueue.h
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) <NAME>. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
#pragma once
#include "Singleton.h"
#include "EnumBitmask.h"
#include <vector>
#include <algorithm>
using namespace std;
class Object;
enum class DrawLayer : int
{
Bottom = 0,
Background = 1000,
Tiles = 2000,
Characters = 3000,
Cars = 4000,
Props = 5000,
Structures = 6000,
Overlay = 7000,
UserInterface = 8000,
Top = 9000
};
ENUM_BITMASK(DrawLayer)
class RenderQueue : public Singleton<RenderQueue>
{
vector<Object*> _queue;
public:
static void Submit(Object *pObject);
static void Clear();
static void Sort();
static void Execute();
};
|
Will03/NVSM_pytorch
|
src/features/tokenize_bert.py
|
import pickle
from pytorch_pretrained_bert import BertTokenizer
from pathlib import Path
from tqdm import tqdm
from tokenize_documents import load_docs
def tokenize_docs(documents, tokenizer):
'''
Tokenises a list of documents.
'''
tokenized_documents = [tokenizer.tokenize(doc) for doc in tqdm(documents, desc = 'Tokenizing documents')]
return tokenized_documents
def create_features(source_folder, dest_data_folder, tokenizer):
'''
Tokenizes the documents, creates the vocabulary and both index
token dictionaries. This functions serializes everything into
the folder given as arguments.
'''
all_documents = []
for cat_folder in source_folder.iterdir():
if not cat_folder.is_dir():
continue
category = cat_folder.name
filepaths = list(cat_folder.iterdir())
documents = load_docs(filepaths)
tokenized_documents = tokenize_docs(documents, tokenizer)
category_documents = [
{
'name' : name,
'category' : category,
'tokens' : tokens
}
for name, tokens in zip(
[path.name for path in filepaths],
tokenized_documents
)
]
all_documents.extend(category_documents)
with open(dest_data_folder / 'tokenized_docs_bert.pkl', 'wb') as tok_docs_file:
pickle.dump(all_documents, tok_docs_file)
if __name__ == '__main__':
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
create_features(
Path('../../data/interim'),
Path('../../data/processed'),
tokenizer
)
|
campingrider/cforum_ex
|
assets/js/notifications/notification-updates.js
|
import { t } from "../modules/i18n";
import { alertInfo } from "../modules/alerts";
import { updateTitleInfos, setNewFavicon } from "../title_infos";
document.addEventListener("cf:userPrivate", event => {
const channel = event.detail;
channel.on("new_priv_message", data => {
updateTitleInfos();
const elem = document.getElementById("mails");
if (elem) {
elem.innerText = data.unread;
elem.setAttribute("title", t("{count} unread mails", { unread: data.unread }));
}
alertInfo(
t("You've got a new mail from {sender}: {subject}", {
sender: data.priv_message.sender_name,
subject: data.priv_message.subject
})
);
});
channel.on("score-update", data => {
const elem = document.querySelector("#user-info .score");
if (elem) {
const intlScore = new Intl.NumberFormat(window.navigator.language).format(data.score);
elem.innerText = `(${intlScore})`;
elem.setAttribute("title", t("{score} points", { score: intlScore }));
}
});
channel.on("new_notification", data => {
updateTitleInfos();
setNewFavicon();
const elem = document.getElementById("notifications-display");
if (elem) {
elem.innerText = `${data.unread}`;
elem.setAttribute("title", t("{count} new notifications"));
}
alertInfo(t("You've got a new notification: {subject}", { subject: data.notification.subject }));
});
channel.on("notification_count", data => {
updateTitleInfos();
const elem = document.getElementById("notifications-display");
if (elem) {
elem.innerText = `${data.unread}`;
elem.setAttribute("title", t("{count} new notifications"));
}
});
});
|
shackra/godot
|
drivers/opus/celt/opus_custom_demo.c
|
<gh_stars>10-100
/* Copyright (c) 2007-2008 CSIRO
Copyright (c) 2007-2009 Xiph.Org Foundation
Written by <NAME> */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef OPUS_ENABLED
#include "opus/opus_config.h"
#endif
#include "opus/opus_custom.h"
#include "opus/celt/arch.h"
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <string.h>
#define MAX_PACKET 1275
int main(int argc, char *argv[])
{
int err;
char *inFile, *outFile;
FILE *fin, *fout;
OpusCustomMode *mode=NULL;
OpusCustomEncoder *enc;
OpusCustomDecoder *dec;
int len;
opus_int32 frame_size, channels, rate;
int bytes_per_packet;
unsigned char data[MAX_PACKET];
int complexity;
#if !(defined (OPUS_FIXED_POINT) && !defined(CUSTOM_MODES)) && defined(RESYNTH)
int i;
double rmsd = 0;
#endif
int count = 0;
opus_int32 skip;
opus_int16 *in, *out;
if (argc != 9 && argc != 8 && argc != 7)
{
fprintf (stderr, "Usage: test_opus_custom <rate> <channels> <frame size> "
" <bytes per packet> [<complexity> [packet loss rate]] "
"<input> <output>\n");
return 1;
}
rate = (opus_int32)atol(argv[1]);
channels = atoi(argv[2]);
frame_size = atoi(argv[3]);
mode = opus_custom_mode_create(rate, frame_size, NULL);
if (mode == NULL)
{
fprintf(stderr, "failed to create a mode\n");
return 1;
}
bytes_per_packet = atoi(argv[4]);
if (bytes_per_packet < 0 || bytes_per_packet > MAX_PACKET)
{
fprintf (stderr, "bytes per packet must be between 0 and %d\n",
MAX_PACKET);
return 1;
}
inFile = argv[argc-2];
fin = fopen(inFile, "rb");
if (!fin)
{
fprintf (stderr, "Could not open input file %s\n", argv[argc-2]);
return 1;
}
outFile = argv[argc-1];
fout = fopen(outFile, "wb+");
if (!fout)
{
fprintf (stderr, "Could not open output file %s\n", argv[argc-1]);
fclose(fin);
return 1;
}
enc = opus_custom_encoder_create(mode, channels, &err);
if (err != 0)
{
fprintf(stderr, "Failed to create the encoder: %s\n", opus_strerror(err));
fclose(fin);
fclose(fout);
return 1;
}
dec = opus_custom_decoder_create(mode, channels, &err);
if (err != 0)
{
fprintf(stderr, "Failed to create the decoder: %s\n", opus_strerror(err));
fclose(fin);
fclose(fout);
return 1;
}
opus_custom_decoder_ctl(dec, OPUS_GET_LOOKAHEAD(&skip));
if (argc>7)
{
complexity=atoi(argv[5]);
opus_custom_encoder_ctl(enc,OPUS_SET_COMPLEXITY(complexity));
}
in = (opus_int16*)malloc(frame_size*channels*sizeof(opus_int16));
out = (opus_int16*)malloc(frame_size*channels*sizeof(opus_int16));
while (!feof(fin))
{
int ret;
err = fread(in, sizeof(short), frame_size*channels, fin);
if (feof(fin))
break;
len = opus_custom_encode(enc, in, frame_size, data, bytes_per_packet);
if (len <= 0)
fprintf (stderr, "opus_custom_encode() failed: %s\n", opus_strerror(len));
/* This is for simulating bit errors */
#if 0
int errors = 0;
int eid = 0;
/* This simulates random bit error */
for (i=0;i<len*8;i++)
{
if (rand()%atoi(argv[8])==0)
{
if (i<64)
{
errors++;
eid = i;
}
data[i/8] ^= 1<<(7-(i%8));
}
}
if (errors == 1)
data[eid/8] ^= 1<<(7-(eid%8));
else if (errors%2 == 1)
data[rand()%8] ^= 1<<rand()%8;
#endif
#if 1 /* Set to zero to use the encoder's output instead */
/* This is to simulate packet loss */
if (argc==9 && rand()%1000<atoi(argv[argc-3]))
/*if (errors && (errors%2==0))*/
ret = opus_custom_decode(dec, NULL, len, out, frame_size);
else
ret = opus_custom_decode(dec, data, len, out, frame_size);
if (ret < 0)
fprintf(stderr, "opus_custom_decode() failed: %s\n", opus_strerror(ret));
#else
for (i=0;i<ret*channels;i++)
out[i] = in[i];
#endif
#if !(defined (OPUS_FIXED_POINT) && !defined(CUSTOM_MODES)) && defined(RESYNTH)
for (i=0;i<ret*channels;i++)
{
rmsd += (in[i]-out[i])*1.0*(in[i]-out[i]);
/*out[i] -= in[i];*/
}
#endif
count++;
fwrite(out+skip*channels, sizeof(short), (ret-skip)*channels, fout);
skip = 0;
}
PRINT_MIPS(stderr);
opus_custom_encoder_destroy(enc);
opus_custom_decoder_destroy(dec);
fclose(fin);
fclose(fout);
opus_custom_mode_destroy(mode);
free(in);
free(out);
#if !(defined (OPUS_FIXED_POINT) && !defined(CUSTOM_MODES)) && defined(RESYNTH)
if (rmsd > 0)
{
rmsd = sqrt(rmsd/(1.0*frame_size*channels*count));
fprintf (stderr, "Error: encoder doesn't match decoder\n");
fprintf (stderr, "RMS mismatch is %f\n", rmsd);
return 1;
} else {
fprintf (stderr, "Encoder matches decoder!!\n");
}
#endif
return 0;
}
|
riccardo-forina/enmasse
|
iot/iot-device-registry-infinispan/src/main/java/io/enmasse/iot/registry/infinispan/cache/AdapterCredentialsCacheProvider.java
|
<gh_stars>0
/*
* Copyright 2019, EnMasse authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.enmasse.iot.registry.infinispan.cache;
import org.infinispan.client.hotrod.RemoteCache;
import org.infinispan.configuration.cache.CacheMode;
import org.infinispan.configuration.cache.Index;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import io.enmasse.iot.registry.infinispan.config.InfinispanProperties;
import io.enmasse.iot.registry.infinispan.device.data.CredentialKey;
@Component
public class AdapterCredentialsCacheProvider extends AbstractCacheProvider {
@Autowired
public AdapterCredentialsCacheProvider(final InfinispanProperties properties) throws Exception {
super(properties);
}
public org.infinispan.configuration.cache.Configuration buildConfiguration() {
return new org.infinispan.configuration.cache.ConfigurationBuilder()
.indexing()
.index(Index.NONE)
.clustering()
.cacheMode(CacheMode.DIST_SYNC)
.hash()
.numOwners(1)
.build();
}
public RemoteCache<CredentialKey, String> getAdapterCredentialsCache() {
return getOrCreateCache(properties.getAdapterCredentialsCacheName(), buildConfiguration());
}
}
|
rmurray1717/graficas
|
src/client/components/Viewer/Extensions/Dynamic/Viewing.Extension.MetaProperties/MetaTreeView/MetaTreeDelegate.js
|
import ContextMenu from './MetaContextMenu'
import MetaTreeNode from './MetaTreeNode'
import { TreeDelegate } from 'TreeView'
import sortBy from 'lodash/sortBy'
export default class MetaTreeDelegate extends TreeDelegate {
/// //////////////////////////////////////////////////////
//
//
/// //////////////////////////////////////////////////////
constructor (menuContainer) {
super()
this.contextMenu = new ContextMenu({
container: menuContainer
})
this.contextMenu.on(
'context.property.delete', (node) => {
this.onDeleteProperty(node)
})
this.contextMenu.on(
'context.property.edit', (node) => {
this.onEditProperty(node)
})
this.on('node.dblClick', (node) => {
this.onEditProperty(node)
})
}
/// //////////////////////////////////////////////////////
//
//
/// //////////////////////////////////////////////////////
async onEditProperty (node) {
const isModelOverride = !node.props.metaType
const newMetaProperty = await this.emit(
'property.edit',
node.toMetaProperty(),
isModelOverride)
if (newMetaProperty) {
this.emit('node.update',
newMetaProperty)
}
}
/// //////////////////////////////////////////////////////
//
//
/// //////////////////////////////////////////////////////
async onDeleteProperty (node) {
const isModelOverride = !node.props.metaType
const deleted = await this.emit(
'property.delete',
node.toMetaProperty(), isModelOverride)
if (deleted) {
this.emit('node.destroy',
node.id)
}
}
/// //////////////////////////////////////////////////////
//
//
/// //////////////////////////////////////////////////////
createRootNode (data) {
this.rootNode = new MetaTreeNode({
displayName: data.displayName,
externalId: data.externalId,
dbId: data.dbId.toString(),
component: data.component,
propsMap: data.propsMap,
delegate: this,
parent: null,
type: 'root',
group: true,
id: 'root'
})
return this.rootNode
}
/// //////////////////////////////////////////////////////////
//
//
/// //////////////////////////////////////////////////////////
destroy () {
this.rootNode.destroy()
}
/// //////////////////////////////////////////////////////////
//
//
/// //////////////////////////////////////////////////////////
createTreeNode (node, parentDomElement) {
const container = document.createElement('div')
parentDomElement.appendChild(container)
node.type.split('.').forEach((cls) => {
parentDomElement.classList.add(cls)
})
parentDomElement.classList.add(
'click-trigger')
node.mount(container)
}
/// //////////////////////////////////////////////////////
//
//
/// //////////////////////////////////////////////////////
nodeClickSelector (event) {
const className = event.target.className
return (
className.toLowerCase().indexOf('click-trigger') > -1
)
}
/// ////////////////////////////////////////////////////////////////
//
//
/// ////////////////////////////////////////////////////////////////
onTreeNodeRightClick (tree, node, event) {
if (node.type === 'property') {
this.contextMenu.show(event, node)
}
}
/// //////////////////////////////////////////////////////////
//
//
/// //////////////////////////////////////////////////////////
forEachChild (node, addChild) {
node.addChild = addChild
}
/// //////////////////////////////////////////////////////////
//
//
/// //////////////////////////////////////////////////////////
mapPropsByCategory (properties) {
const propsMap = {}
properties.forEach((prop) => {
const category = prop.displayCategory
? prop.displayCategory
: 'Other'
if (category.indexOf('__') !== 0) {
propsMap[category] = propsMap[category] || []
propsMap[category].push(prop)
}
})
// sort props by displayName in each category
for (const category in propsMap) {
propsMap[category] = sortBy(
propsMap[category], (prop) => {
return prop.displayName
})
}
return propsMap
}
}
|
ChainSQL/chainsqld
|
src/peersafe/protocol/STProposeSet.h
|
//------------------------------------------------------------------------------
/*
This file is part of rippled: https://github.com/ripple/rippled
Copyright (c) 2012, 2013 Ripple Labs Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#ifndef PEERSAFE_PROTOCOL_STPROPOSESET_H_INCLUDED
#define PEERSAFE_PROTOCOL_STPROPOSESET_H_INCLUDED
#include <ripple/protocol/UintTypes.h>
#include <ripple/protocol/STObject.h>
#include <ripple/protocol/PublicKey.h>
namespace ripple {
class RCLTxSet;
class Schema;
class STProposeSet final : public STObject, public CountedObject<STProposeSet>
{
public:
//< Sequence value when a peer initially joins consensus
static std::uint32_t const seqJoin = 0;
//< Sequence number when a peer wants to bow out and leave consensus
static std::uint32_t const seqLeave = 0xffffffff;
static char const* getCountedObjectName()
{
return "STProposeSet";
}
using pointer = std::shared_ptr<STProposeSet>;
using ref = const std::shared_ptr<STProposeSet>&;
STProposeSet(
SerialIter& sit,
NetClock::time_point now,
NodeID const& nodeid,
PublicKey const& publicKey);
STProposeSet(
std::uint32_t proposeSeq,
uint256 const& position,
uint256 const& preLedgerHash,
NetClock::time_point closetime,
NetClock::time_point now,
NodeID const& nodeid,
PublicKey const& publicKey);
STProposeSet(
std::uint32_t proposeSeq,
uint256 const& position,
uint256 const& preLedgerHash,
NetClock::time_point closetime,
NetClock::time_point now,
NodeID const& nodeid,
PublicKey const& publicKey,
std::uint32_t ledgerSeq,
std::uint64_t view,
RCLTxSet const& set);
STBase*
copy(std::size_t n, void* buf) const override
{
return emplace(n, buf, *this);
}
STBase*
move(std::size_t n, void* buf) override
{
return emplace(n, buf, std::move(*this));
}
std::uint32_t proposeSeq() const { return proposeSeq_; }
uint256 const& position() const { return position_; }
uint256 const& prevLedger() const { return previousLedger_; }
NetClock::time_point const& closeTime() const { return closeTime_; }
NetClock::time_point const& seenTime() const { return time_; }
NodeID const& nodeID() const { return nodeID_; }
PublicKey const& getSignerPublic() const { return signerPublic_; }
std::uint32_t const& curLedgerSeq() const { return curLedgerSeq_; }
std::uint64_t const& view() const { return view_; }
bool isInitial() const { return proposeSeq_ == seqJoin; }
bool isBowOut() const { return proposeSeq_ == seqLeave; }
bool isStale(NetClock::time_point cutoff) const { return time_ <= cutoff; }
void changePosition(
uint256 const& newPosition,
NetClock::time_point newCloseTime,
NetClock::time_point now);
void bowOut(NetClock::time_point now);
Blob getSerialized() const;
Json::Value getJson() const;
std::shared_ptr<RCLTxSet>
getTxSet(Schema& app) const;
private:
//! The sequence number of these positions taken by this node
std::uint32_t proposeSeq_;
//! Unique identifier of the position this proposal is taking
uint256 position_;
//! Unique identifier of prior ledger this proposal is based on
uint256 previousLedger_;
//! The ledger close time this position is taking
NetClock::time_point closeTime_;
// !The time this position was last updated
NetClock::time_point time_;
//! The identifier of the node taking this position
NodeID nodeID_;
PublicKey signerPublic_;
std::uint32_t curLedgerSeq_ = 0;
std::uint64_t view_ = 0;
private:
static SOTemplate const& getFormat();
};
} // ripple
#endif
|
Arsenal821/incubator-pegasus
|
java-client/src/main/java/org/apache/pegasus/client/HashKeyData.java
|
<reponame>Arsenal821/incubator-pegasus<gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pegasus.client;
import java.util.*;
import org.apache.commons.lang3.tuple.Pair;
/**
* @author qinzuoyan
* <p>Store data under the same hashKey.
*/
public class HashKeyData {
public boolean allFetched = true;
public byte[] hashKey = null;
public List<Pair<byte[], byte[]>> values = null; // List{sortKey, value}
public HashKeyData() {}
public HashKeyData(byte[] hashKey) {
this.hashKey = hashKey;
}
public HashKeyData(byte[] hashKey, List<Pair<byte[], byte[]>> values) {
this.hashKey = hashKey;
this.values = values;
}
public HashKeyData(boolean allFetched, byte[] hashKey, List<Pair<byte[], byte[]>> values) {
this.allFetched = allFetched;
this.hashKey = hashKey;
this.values = values;
}
public void addData(byte[] sortKey, byte[] value) {
if (values == null) values = new ArrayList<Pair<byte[], byte[]>>();
values.add(Pair.of(sortKey, value));
}
}
|
tienph91/Aspose.Slides-for-Java
|
Examples/src/main/java/com/aspose/slides/examples/presentations/opening/GetFileFormat.java
|
package com.aspose.slides.examples.presentations.opening;
import com.aspose.slides.IPresentationInfo;
import com.aspose.slides.LoadFormat;
import com.aspose.slides.PresentationFactory;
import com.aspose.slides.examples.RunExamples;
public class GetFileFormat
{
public static void main(String[] args)
{
//ExStart:GetFileFormat
// The path to the documents directory.
String dataDir = RunExamples.getDataDir_PresentationOpening();
IPresentationInfo info = PresentationFactory.getInstance().getPresentationInfo(dataDir + "HelloWorld.pptx");
switch (info.getLoadFormat())
{
case LoadFormat.Pptx:
{
break;
}
case LoadFormat.Unknown:
{
break;
}
}
//ExEnd:GetFileFormat
}
}
|
andre-becker/XLT
|
src/main/java/com/xceptance/xlt/report/util/XltChartTheme.java
|
<reponame>andre-becker/XLT
package com.xceptance.xlt.report.util;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.GraphicsEnvironment;
import java.awt.Paint;
import java.awt.Stroke;
import java.io.InputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.StandardChartTheme;
import org.jfree.chart.axis.ValueAxis;
import org.jfree.chart.block.LineBorder;
import org.jfree.chart.plot.Plot;
import org.jfree.chart.renderer.AbstractRenderer;
import org.jfree.chart.renderer.xy.StandardXYBarPainter;
import org.jfree.chart.renderer.xy.XYBarPainter;
import org.jfree.chart.title.LegendTitle;
import org.jfree.chart.title.TextTitle;
import org.jfree.chart.title.Title;
import org.jfree.chart.ui.RectangleInsets;
/**
* The standard XLT chart theme.
*/
public class XltChartTheme extends StandardChartTheme
{
private static final Color _DEFAULT_OUTLINE_PAINT = new Color(0xBFBFBF);
private static final Stroke _DEFAULT_OUTLINE_STROKE = new BasicStroke(1.0f);
private static final boolean AXIS_LINE_VISIBLE = false;
private static final Paint AXIS_TICK_MARK_PAINT = _DEFAULT_OUTLINE_PAINT;
private static final Paint CHART_BACKGROUND_PAINT = new Color(0xFAFAFA);
private static final RectangleInsets CHART_PADDING = new RectangleInsets(8, 0, 8, 2);
private static final Font FONT_EXTRA_LARGE;
private static final Font FONT_LARGE;
private static final Font FONT_REGULAR;
private static final Font FONT_SMALL;
private static final LineBorder LEGEND_BORDER = new LineBorder(_DEFAULT_OUTLINE_PAINT, _DEFAULT_OUTLINE_STROKE,
new RectangleInsets(1, 1, 1, 1));
private static final RectangleInsets LEGEND_PADDING = new RectangleInsets(1, 4, 1, 4);
private static final Paint PLOT_BACKGROUND_PAINT = Color.WHITE;
private static final Paint PLOT_GRID_LINE_PAINT = Color.LIGHT_GRAY;
private static final Paint TEXT_PAINT = new Color(0x333333);
private static final XYBarPainter YX_BAR_PAINTER = new StandardXYBarPainter();
private static final Log LOG = LogFactory.getLog(XltChartTheme.class);
private static final int TEXTTITLE_MAXLINES = 1;
private static final RectangleInsets TEXTTITLE_PADDING = new RectangleInsets(1, 100, 1, 100);
/*
* Initialize the fonts.
*/
static
{
Font extraLargeFont;
Font largeFont;
Font regularFont;
Font smallFont;
try
{
// load base fonts
final Font boldFont = loadFont("Roboto-Medium.ttf");
final Font plainFont = loadFont("Roboto-Regular.ttf");
// derive fonts from base fonts
extraLargeFont = boldFont.deriveFont(13f);
largeFont = plainFont.deriveFont(12f);
regularFont = plainFont.deriveFont(12f);
smallFont = plainFont.deriveFont(10f);
}
catch (final Exception e)
{
LOG.error("Failed to load chart text font", e);
// use fall-back fonts
extraLargeFont = new Font("SansSerif", Font.BOLD, 12);
largeFont = new Font("SansSerif", Font.PLAIN, 12);
regularFont = new Font("SansSerif", Font.PLAIN, 12);
smallFont = new Font("SansSerif", Font.PLAIN, 10);
}
// initialize font constants
FONT_EXTRA_LARGE = extraLargeFont;
FONT_LARGE = largeFont;
FONT_REGULAR = regularFont;
FONT_SMALL = smallFont;
}
/**
* Loads the font with the given file name from the class path, relative to this class.
*
* @param fontFileName
* the name of the font file
* @return the font
* @throws Exception
* if anything goes wrong
*/
private static Font loadFont(final String fontFileName) throws Exception
{
try (final InputStream fontStream = XltChartTheme.class.getResourceAsStream(fontFileName))
{
final Font font = Font.createFont(Font.TRUETYPE_FONT, fontStream);
GraphicsEnvironment.getLocalGraphicsEnvironment().registerFont(font);
return font;
}
}
/**
* Constructor.
*/
public XltChartTheme()
{
super("XLT");
// text settings
setExtraLargeFont(FONT_EXTRA_LARGE);
setLargeFont(FONT_LARGE);
setRegularFont(FONT_REGULAR);
setSmallFont(FONT_SMALL);
// chart settings
setTitlePaint(TEXT_PAINT);
setChartBackgroundPaint(CHART_BACKGROUND_PAINT);
// plot settings
setPlotBackgroundPaint(PLOT_BACKGROUND_PAINT);
setPlotOutlinePaint(_DEFAULT_OUTLINE_PAINT);
setAxisOffset(RectangleInsets.ZERO_INSETS);
setDomainGridlinePaint(PLOT_GRID_LINE_PAINT);
setRangeGridlinePaint(PLOT_GRID_LINE_PAINT);
// bar plot settings
setXYBarPainter(YX_BAR_PAINTER);
// axis settings
setAxisLabelPaint(TEXT_PAINT);
setTickLabelPaint(TEXT_PAINT);
// legend settings
setLegendItemPaint(TEXT_PAINT);
// annotations/renderers
setItemLabelPaint(TEXT_PAINT);
}
/**
* {@inheritDoc}
*/
@Override
public void apply(final JFreeChart chart)
{
super.apply(chart);
TextTitle title = chart.getTitle();
if (title != null)
{
title.setMaximumLinesToDisplay(TEXTTITLE_MAXLINES);
title.setPadding(TEXTTITLE_PADDING);
}
// set some additional properties
chart.setPadding(CHART_PADDING);
}
/**
* {@inheritDoc}
*/
@Override
public void applyToPlot(final Plot plot)
{
super.applyToPlot(plot);
// set some additional properties
plot.setOutlineStroke(_DEFAULT_OUTLINE_STROKE);
}
/**
* {@inheritDoc}
*/
@Override
protected void applyToAbstractRenderer(final AbstractRenderer renderer)
{
// implemented empty -> we do not want the theme to overwrite our programmatically set colors and strokes
}
/**
* {@inheritDoc}
*/
@Override
protected void applyToTitle(final Title title)
{
super.applyToTitle(title);
// set some additional properties
if (title instanceof LegendTitle)
{
final LegendTitle legendTitle = (LegendTitle) title;
legendTitle.setFrame(LEGEND_BORDER);
legendTitle.setPadding(LEGEND_PADDING);
}
}
/**
* {@inheritDoc}
*/
@Override
protected void applyToValueAxis(final ValueAxis axis)
{
super.applyToValueAxis(axis);
// set some additional properties
axis.setTickLabelFont(FONT_SMALL);
axis.setTickMarkPaint(AXIS_TICK_MARK_PAINT);
axis.setAxisLineVisible(AXIS_LINE_VISIBLE);
}
}
|
eyantra/Visual_Programming_for_Firebird_V
|
Codes/Java Code/codeblocks/src/workspace/WorkspaceEvent.java
|
package workspace;
import codeblocks.Block;
import codeblocks.BlockLink;
public class WorkspaceEvent {
//workspace-wide events
//affects layout and content of workspace and at least two or more blocks
public final static int PAGE_ADDED = 1;
public final static int PAGE_REMOVED = 2;
public final static int BLOCK_ADDED = 3;
public final static int BLOCK_REMOVED = 4;
public final static int BLOCKS_CONNECTED = 5;
public final static int BLOCKS_DISCONNECTED = 6;
public final static int BLOCK_STACK_COMPILED = 7;
//page specific events
public final static int PAGE_RENAMED = 8;
public final static int PAGE_RESIZED = 9;
//block specific events
public final static int BLOCK_RENAMED = 10;
public final static int BLOCK_MOVED = 11;
public final static int BLOCK_GENUS_CHANGED = 12;
public final static int BLOCK_COMMENT_ADDED = 13;
public final static int BLOCK_COMMENT_REMOVED = 14;
public final static int BLOCK_COMMENT_MOVED = 15;
public final static int BLOCK_COMMENT_RESIZED= 16;
public final static int BLOCK_COMMENT_VISBILITY_CHANGE = 17;
public final static int BLOCK_COMMENT_CHANGED= 18;
//workspace specific event
public final static int WORKSPACE_FINISHED_LOADING = 100;
private Long blockID = Block.NULL;
private int eventType;
private WorkspaceWidget widget = null;
private BlockLink link = null;
private String oldWidgetName = null;
//If this is a user spawned event or not
private boolean userSpawned = false;
/**
* Constructs a new WorkspaceEvent. This constructor should be used to report
* page added, removed events. The WorkspaceWidget page parameter should
* be an instance of Page.
* @param page
* @param eventType
*/
public WorkspaceEvent(WorkspaceWidget page,int eventType){
this.widget = page;
this.eventType = eventType;
this.blockID = Block.NULL;
}
public WorkspaceEvent(WorkspaceWidget page,int eventType, boolean userSpawned){
this.widget = page;
this.eventType = eventType;
this.blockID = Block.NULL;
this.userSpawned = userSpawned;
}
/**
* Constructs a new WorkspaceEvent. This constructor should be used to report
* page renamed events. The WorkspaceWidget page parameter should
* be an instance of Page.
* @param page
* @param oldName the old String name of this page
* @param eventType
*/
public WorkspaceEvent(WorkspaceWidget page, String oldName, int eventType){
this.widget = page;
this.eventType = eventType;
this.blockID = Block.NULL;
this.oldWidgetName = oldName;
}
public WorkspaceEvent(WorkspaceWidget page, String oldName, int eventType, boolean userSpawned){
this.widget = page;
this.eventType = eventType;
this.blockID = Block.NULL;
this.oldWidgetName = oldName;
this.userSpawned = userSpawned;
}
/**
* Constructs a new WorkspaceEvent. This constructor should be used to report
* the following: block added, removed, renamed, compiled, moved.
* @param widget
* @param blockID
* @param eventType
*/
public WorkspaceEvent(WorkspaceWidget widget, Long blockID, int eventType){
this.widget = widget;
this.eventType = eventType;
this.blockID = blockID;
}
public WorkspaceEvent(WorkspaceWidget widget, Long blockID, int eventType, boolean userSpawned){
this.widget = widget;
this.eventType = eventType;
this.blockID = blockID;
this.userSpawned = userSpawned;
}
/**
* Constructs a new WorkspaceEvent. This constructor should be used to report
* block connected/disconnected events. The specified link contains the connection
* information.
* @param widget
* @param link
* @param eventType
*/
public WorkspaceEvent(WorkspaceWidget widget, BlockLink link, int eventType){
this.widget = widget;
this.link = link;
this.eventType = eventType;
this.blockID = Block.NULL;
}
public WorkspaceEvent(WorkspaceWidget widget, BlockLink link, int eventType, boolean userSpawned){
this.widget = widget;
this.link = link;
this.eventType = eventType;
this.userSpawned = userSpawned;
}
/**
* Tells if this event is a user spawned event or not
* @return true if this event was spawned by a user
*/
public boolean isUserEvent()
{
return userSpawned;
}
/**
* Returns the WorkspaceWidget where this event occured.
* @return the WorkspaceWidget where this event occured.
*/
public WorkspaceWidget getSourceWidget(){
return widget;
}
/**
* Returns the Long ID of the Block where this event occured. For
* block connection events, this id is Block.NULL since the event occurred
* from two blocks.
*/
public Long getSourceBlockID(){
return blockID;
}
/**
* Returns the int event type of this
* @return the int event type of this
*/
public int getEventType(){
return eventType;
}
/**
* Returns the BlockLink where this event originated, or null if the event type
* of this is not block connected or disconnected.
* @return the BlockLink where this event originated, or null if the event type
* of this is not block connected or disconnected.
*/
public BlockLink getSourceLink(){
return link;
}
/**
* Returns the original name of the source widget; null if the source widget's
* name did not change.
* @return the original name of the source widget; null if the source widget's
* name did not change.
*/
public String getOldNameOfSourceWidget(){
return oldWidgetName;
}
public String toString() {
switch (eventType) {
case PAGE_ADDED:
return "WorkspaceEvent(PAGE_ADDED: " + widget + ")";
case PAGE_REMOVED:
return "WorkspaceEvent(PAGE_REMOVED: " + widget +")";
case BLOCK_ADDED:
return "WorkspaceEvent(BLOCK_ADDED: " + Block.getBlock(blockID) + ")";
case BLOCK_REMOVED:
return "WorkspaceEvent(BLOCK_REMOVED: " + Block.getBlock(blockID) + ")";
case BLOCKS_CONNECTED:
return "WorkspaceEvent(BLOCKS_CONNECTED: " + link + ")";
case BLOCKS_DISCONNECTED:
return "WorkspaceEvent(BLOCKS_DISCONNECTED: " + link + ")";
case BLOCK_STACK_COMPILED:
return "WorkspaceEvent(BLOCK_STACK_COMPILED: " + Block.getBlock(blockID) + ")";
case PAGE_RENAMED:
return "WorkspaceEvent(PAGE_RENAMED: " + widget + ")";
case PAGE_RESIZED:
return "WorkspaceEvent(PAGE_RESIZED: " + widget + ")";
case BLOCK_RENAMED:
return "WorkspaceEvent(BLOCK_RENAMED: " + Block.getBlock(blockID) + ")";
case BLOCK_MOVED:
if (link == null)
return "WorkspaceEvent(BLOCK_MOVED: " + Block.getBlock(blockID) + ")";
else
return "WorkspaceEvent(BLOCK_MOVED: " + link + ")";
case BLOCK_GENUS_CHANGED:
return "WorkspaceEvent(BLOCK_GENUS_CHANGED: " + Block.getBlock(blockID) + ")";
case WORKSPACE_FINISHED_LOADING:
return "WorkspaceEvent(WORKSPACE_FINISHED_LOADING)";
default:
return "WorkspaceEvent(" + eventType + ")";
}
}
}
|
if-pan-zpp/mdk
|
mdk/src/utils/ContactType.cpp
|
<reponame>if-pan-zpp/mdk
#include "utils/ContactType.hpp"
#include <unordered_map>
using namespace mdk;
using namespace std;
ContactType::operator ContactTypeIdx const&() const {
return code;
}
ContactType::ContactType(const std::string &name) {
static unordered_map<string, ContactTypeIdx> conversions = {
{ "NAT", ContactTypeIdx::NAT },
{ "NAT_BB", ContactTypeIdx::NAT_BB },
{ "NAT_BS", ContactTypeIdx::NAT_BS },
{ "NAT_SB", ContactTypeIdx::NAT_SB },
{ "NAT_SS", ContactTypeIdx::NAT_SS },
{ "SSBOND", ContactTypeIdx::SSBOND }
};
code = conversions.at(name);
}
ContactType::operator int8_t() const {
return (int8_t)code;
}
ContactType::operator std::string() const {
static unordered_map<ContactTypeIdx, string> conversions = {
{ContactTypeIdx::NAT, "NAT" },
{ContactTypeIdx::NAT_BB, "NAT_BB" },
{ContactTypeIdx::NAT_BS, "NAT_BS" },
{ContactTypeIdx::NAT_SB, "NAT_SB" },
{ContactTypeIdx::NAT_SS, "NAT_SS" },
{ContactTypeIdx::SSBOND, "SSBOND" }
};
return conversions.at(code);
}
|
antomy-gc/art-java
|
application-json/src/main/java/ru/art/json/descriptor/JsonEntityWriter.java
|
/*
* ART Java
*
* Copyright 2019 ART
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.art.json.descriptor;
import com.fasterxml.jackson.core.*;
import lombok.experimental.*;
import ru.art.core.checker.*;
import ru.art.entity.*;
import ru.art.entity.constants.*;
import ru.art.entity.constants.ValueType.*;
import ru.art.json.exception.*;
import static java.util.Objects.*;
import static ru.art.core.caster.Caster.*;
import static ru.art.core.constants.StringConstants.*;
import static ru.art.core.context.Context.*;
import static ru.art.core.extension.FileExtensions.*;
import static ru.art.core.extension.StringExtensions.*;
import static ru.art.entity.Value.*;
import static ru.art.json.constants.JsonMappingExceptionMessages.*;
import static ru.art.json.module.JsonModule.*;
import java.io.*;
import java.nio.file.*;
import java.util.*;
@UtilityClass
public class JsonEntityWriter {
public static byte[] writeJsonToBytes(Value value) {
return writeJson(value).getBytes(contextConfiguration().getCharset());
}
public static void writeJson(Value value, OutputStream outputStream) {
if (isNull(outputStream)) {
return;
}
try {
outputStream.write(writeJson(value).getBytes(contextConfiguration().getCharset()));
} catch (IOException ioException) {
throw new JsonMappingException(ioException);
}
}
public static void writeJson(Value value, Path path) {
writeFileQuietly(path, writeJson(value));
}
public static String writeJson(Value value) {
return writeJson(jsonModule().getObjectMapper().getFactory(), value, false);
}
public static String writeJson(JsonFactory jsonFactory, Value value, boolean prettyOutput) {
if (isNull(jsonFactory)) throw new JsonMappingException(JSON_FACTORY_IS_NULL);
if (isNull(value)) {
return BRACES;
}
if (isEmpty(value)) {
if (isCollection(value)) {
return SQUARE_BRACES;
}
return BRACES;
}
StringWriter stringWriter = new StringWriter();
try (JsonGenerator generator = jsonFactory.createGenerator(stringWriter)) {
if (prettyOutput) {
generator.useDefaultPrettyPrinter();
}
switch (value.getType()) {
case ENTITY:
writeJsonEntity(generator, asEntity(value));
break;
case COLLECTION:
writeArray(generator, asCollection(value));
break;
case STRING:
return emptyIfNull(asPrimitive(value).getString());
case LONG:
return emptyIfNull(asPrimitive(value).getLong());
case DOUBLE:
return emptyIfNull(asPrimitive(value).getDouble());
case FLOAT:
return emptyIfNull(asPrimitive(value).getFloat());
case INT:
return emptyIfNull(asPrimitive(value).getInt());
case BOOL:
return emptyIfNull(asPrimitive(value).getBool());
case BYTE:
return emptyIfNull(asPrimitive(value).getByte());
case STRING_PARAMETERS_MAP:
writeStringParameters(generator, asStringParametersMap(value));
break;
case MAP:
writeJsonMap(generator, asMap(value));
}
} catch (IOException ioException) {
throw new JsonMappingException(ioException);
}
return stringWriter.toString();
}
private static void writeJsonMap(JsonGenerator generator, MapValue map) {
if (isNull(map)) return;
map.getElements()
.entrySet()
.stream()
.filter(entry -> isPrimitive(entry.getKey()))
.forEach(entry -> writeJsonMapEntry(generator, entry));
}
private static void writeJsonMap(JsonGenerator jsonGenerator, String name, MapValue mapValue) {
if (isNull(mapValue)) return;
try {
jsonGenerator.writeObjectFieldStart(name);
writeJsonMap(jsonGenerator, mapValue);
jsonGenerator.writeEndObject();
} catch (IOException ioException) {
throw new JsonMappingException(ioException);
}
}
private static void writeJsonMapEntry(JsonGenerator generator, Map.Entry<? extends Value, ? extends Value> entry) {
try {
writeField(generator, emptyIfNull(entry.getKey()), entry.getValue());
} catch (IOException ioException) {
throw new JsonMappingException(ioException);
}
}
private static void writeJsonEntity(JsonGenerator generator, Entity entity) throws IOException {
if (isNull(entity)) return;
generator.writeStartObject();
Map<String, ? extends Value> fields = getEntityFields(entity);
for (String field : fields.keySet()) {
writeField(generator, field, fields.get(field));
}
generator.writeEndObject();
}
private static void writeJsonEntity(JsonGenerator jsonGenerator, String name, Entity entity) throws IOException {
if (isNull(entity)) return;
jsonGenerator.writeObjectFieldStart(name);
Map<String, ? extends Value> fields = getEntityFields(entity);
for (String field : fields.keySet()) {
writeField(jsonGenerator, field, fields.get(field));
}
jsonGenerator.writeEndObject();
}
private static void writeArray(JsonGenerator jsonGenerator, String fieldName, CollectionValue<?> array) throws IOException {
if (isNull(array)) return;
jsonGenerator.writeArrayFieldStart(fieldName);
switch (array.getCollectionMode()) {
case PRIMITIVE_ARRAY:
writePrimitiveElements(jsonGenerator, array);
break;
case COLLECTION:
writeCollectionElements(jsonGenerator, array);
break;
}
jsonGenerator.writeEndArray();
}
private static void writeArray(JsonGenerator jsonGenerator, CollectionValue<?> array) throws IOException {
if (isNull(array)) return;
jsonGenerator.writeStartArray();
switch (array.getCollectionMode()) {
case PRIMITIVE_ARRAY:
writePrimitiveElements(jsonGenerator, array);
break;
case COLLECTION:
writeCollectionElements(jsonGenerator, array);
break;
}
jsonGenerator.writeEndArray();
}
private static void writeField(JsonGenerator jsonGenerator, String name, Value value) throws IOException {
if (isNull(value)) return;
switch (value.getType()) {
case ENTITY:
writeJsonEntity(jsonGenerator, name, asEntity(value));
return;
case COLLECTION:
writeArray(jsonGenerator, name, asCollection(value));
return;
case MAP:
writeJsonMap(jsonGenerator, name, asMap(value));
return;
case STRING_PARAMETERS_MAP:
writeStringParameters(jsonGenerator, name, asStringParametersMap(value));
return;
case STRING:
case INT:
case DOUBLE:
case LONG:
case BOOL:
case BYTE:
case FLOAT:
writeField(jsonGenerator, name, asPrimitive(value));
}
}
private static void writeField(JsonGenerator jsonGenerator, String name, Primitive value) throws IOException {
if (isEmpty(value) || CheckerForEmptiness.isEmpty(value.getValue())) return;
switch (value.getType()) {
case STRING:
jsonGenerator.writeStringField(name, value.getString());
return;
case INT:
jsonGenerator.writeNumberField(name, value.getInt());
return;
case DOUBLE:
jsonGenerator.writeNumberField(name, value.getDouble());
return;
case LONG:
jsonGenerator.writeNumberField(name, value.getLong());
return;
case BOOL:
jsonGenerator.writeBooleanField(name, value.getBool());
return;
case BYTE:
jsonGenerator.writeNumberField(name, value.getByte());
return;
case FLOAT:
jsonGenerator.writeNumberField(name, value.getFloat());
}
}
private static void writeCollectionValue(JsonGenerator jsonGenerator, CollectionElementsType type, Object value) throws IOException {
if (isNull(value)) return;
switch (type) {
case COLLECTION:
writeArray(jsonGenerator, asCollection(cast(value)));
return;
case ENTITY:
writeJsonEntity(jsonGenerator, asEntity(cast(value)));
return;
case STRING:
jsonGenerator.writeString((String) value);
return;
case INT:
jsonGenerator.writeNumber((Integer) value);
return;
case BOOL:
jsonGenerator.writeBoolean((Boolean) value);
return;
case DOUBLE:
jsonGenerator.writeNumber((Double) value);
return;
case LONG:
jsonGenerator.writeNumber((Long) value);
return;
case BYTE:
jsonGenerator.writeNumber((Byte) value);
return;
case FLOAT:
jsonGenerator.writeNumber((Float) value);
return;
case VALUE:
Value valueObject = (Value) value;
writeCollectionValue(jsonGenerator, valueObject.getType(), valueObject);
return;
case STRING_PARAMETERS_MAP:
writeStringParameters(jsonGenerator, asStringParametersMap(cast(value)));
}
}
private static void writeCollectionValue(JsonGenerator jsonGenerator, ValueType type, Value value) throws IOException {
if (isNull(value)) return;
switch (type) {
case COLLECTION:
writeArray(jsonGenerator, asCollection(cast(value)));
return;
case ENTITY:
writeJsonEntity(jsonGenerator, asEntity(cast(value)));
return;
case STRING:
jsonGenerator.writeString(asPrimitive(value).getString());
return;
case INT:
jsonGenerator.writeNumber(asPrimitive(value).getInt());
return;
case BOOL:
jsonGenerator.writeBoolean(asPrimitive(value).getBool());
return;
case DOUBLE:
jsonGenerator.writeNumber(asPrimitive(value).getDouble());
return;
case LONG:
jsonGenerator.writeNumber(asPrimitive(value).getLong());
return;
case BYTE:
jsonGenerator.writeNumber(asPrimitive(value).getByte());
return;
case FLOAT:
jsonGenerator.writeNumber(asPrimitive(value).getFloat());
case STRING_PARAMETERS_MAP:
writeStringParameters(jsonGenerator, asStringParametersMap(cast(value)));
}
}
private static Map<String, ? extends Value> getEntityFields(Entity entity) {
return entity.getFields();
}
private static void writeCollectionElements(JsonGenerator jsonGenerator, CollectionValue<?> array) throws IOException {
CollectionElementsType valueType = array.getElementsType();
for (Object value : array.getElements()) {
writeCollectionValue(jsonGenerator, valueType, value);
}
}
private static void writePrimitiveElements(JsonGenerator jsonGenerator, CollectionValue<?> array) throws IOException {
CollectionElementsType valueType = array.getElementsType();
switch (valueType) {
case INT:
for (int value : array.getIntArray()) {
jsonGenerator.writeNumber(value);
}
return;
case BOOL:
for (boolean value : array.getBoolArray()) {
jsonGenerator.writeBoolean(value);
}
return;
case DOUBLE:
for (double value : array.getDoubleArray()) {
jsonGenerator.writeNumber(value);
}
return;
case LONG:
for (long value : array.getIntArray()) {
jsonGenerator.writeNumber(value);
}
return;
case BYTE:
for (byte value : array.getByteArray()) {
jsonGenerator.writeNumber(value);
}
return;
case FLOAT:
for (float value : array.getFloatArray()) {
jsonGenerator.writeNumber(value);
}
}
}
private static void writeStringParameters(JsonGenerator generator, StringParametersMap stringParametersMap) throws IOException {
if (isNull(stringParametersMap)) return;
generator.writeStartObject();
Map<String, String> parameters = stringParametersMap.getParameters();
for (String field : parameters.keySet()) {
generator.writeStringField(field, parameters.get(field));
}
generator.writeEndObject();
}
private static void writeStringParameters(JsonGenerator generator, String name, StringParametersMap stringParametersMap) throws IOException {
if (isNull(stringParametersMap)) return;
generator.writeObjectFieldStart(name);
Map<String, String> parameters = stringParametersMap.getParameters();
for (String field : parameters.keySet()) {
generator.writeStringField(field, parameters.get(field));
}
generator.writeEndObject();
}
}
|
galrosenthal/Football-Workshop
|
src/main/java/Domain/Users/AssociationRepresentative.java
|
package Domain.Users;
import Domain.EntityManager;
import Domain.Exceptions.ExistsAlreadyException;
import Domain.Exceptions.RoleExistsAlreadyException;
import Domain.Financials.AssociationFinancialRecordLog;
import Domain.Game.PointsPolicy;
import Domain.Game.SchedulingPolicy;
import Domain.Game.League;
import Domain.Game.Team;
import Domain.Game.Season;
import java.util.Date;
import java.util.List;
public class AssociationRepresentative extends Role {
List<AssociationFinancialRecordLog> logger;
/**
* Constructor
*
* @param systemUser - SystemUser - The system user to add the new role to
* @param addToDB - boolean - Whether to add the new role to the database
*/
public AssociationRepresentative(SystemUser systemUser, boolean addToDB) {
super(RoleTypes.ASSOCIATION_REPRESENTATIVE, systemUser);
if (addToDB) {
EntityManager.getInstance().addRole(this);
}
}
/**
* Creates a new League.
*
* @param leagueName - String - A unique league name
* @return - boolean - True if a new league was created successfully, else false
* @throws Exception - throws if a league already exists with the given leagueName
*/
public boolean addLeague(String leagueName) throws Exception {
//verifies if a league in the chosen name already exists
boolean doesLeagueExist = EntityManager.getInstance().doesLeagueExists(leagueName);
if (doesLeagueExist) {
throw new Exception("League with the same name already exists");
}
//Adding a new league
League league = new League(leagueName, true);
return true;
}
/**
* Adds a new Referee role to a given user with the given training.
* If the user is already a referee then throw exception.
*
* @param newRefereeUser - SystemUser - a user to add a referee role to.
* @param training - RefereeQualification - the training of the referee
* @return - boolean - true if the referee role was added successfully.
* @throws RoleExistsAlreadyException - if the user is already a referee.
*/
public boolean addReferee(SystemUser newRefereeUser, RefereeQualification training) throws RoleExistsAlreadyException {
if (newRefereeUser.getRole(RoleTypes.REFEREE) != null) { //Check: test this line to see if the re-creation from DB is sufficient
throw new RoleExistsAlreadyException("Already a referee");
}
Referee refereeRole = new Referee(newRefereeUser, training, true);
return true;
}
/**
* Creates a new team.
*
* @param teamName - String - A unique team name
* @param newTeamOwnerUser - SystemUser - The user who is chosen to be the team owner of the new team.
* @return - boolean - True if a new team was created successfully, else false
*/
public boolean addTeam(String teamName, SystemUser newTeamOwnerUser) {
Role newTeamOwnerRole = newTeamOwnerUser.getRole(RoleTypes.TEAM_OWNER);
TeamOwner teamOwner;
if (newTeamOwnerRole == null) {
teamOwner = new TeamOwner(newTeamOwnerUser, true);
} else {
teamOwner = (TeamOwner) newTeamOwnerRole;
}
Team newTeam = createNewTeam(teamName, teamOwner);
// teamOwner.setAppointedOwner(newTeam, this.getSystemUser());
newTeam.addTeamOwner(teamOwner);
teamOwner.addTeamToOwn(newTeam, this.getSystemUser());
teamOwner.setAppointedOwner(newTeam,this.getSystemUser());
return true;
}
/**
* Creates a new team. Responsible only for creating and adding a new team, doesn't do any farther checks.
*
* @param teamName - String - the team's name.
* @param to -TeamOwner - The team's owner.
* @return The new Team that was created.
*/
private Team createNewTeam(String teamName, TeamOwner to) {
Team team = new Team(teamName, to, true);
EntityManager.getInstance().addTeam(team);
return team;
}
/**
* Removes the referee role from a given user.
*
* @param chosenUser - SystemUser - a user with a Referee role to be removed.
* @return - boolean - true if the Referee role was removed successfully, else false
*/
public boolean removeReferee(SystemUser chosenUser) {
Referee refereeRole = (Referee) chosenUser.getRole(RoleTypes.REFEREE);
if (refereeRole != null) {
if (!refereeRole.hasFutureGames()) {
if (chosenUser.removeRole(refereeRole)) { //TODO: NEED TO UN ASSIGN FROM GAMES ALSO - PROBLEM
refereeRole.unAssignFromAllSeasons();
refereeRole.unAssignFromAllGames();
}
return true;
}
}
return false;
}
/**
* Assigns a given referee to a given season if the referee has not been previously assigned to the season.
*
* @param chosenSeason - Season - the season to assign the referee to
* @param refereeRole - Referee - the referee to be assigned
* @throws Exception - throws if the referee has been previously assigned to the season.
*/
public void assignRefereeToSeason(Season chosenSeason, Referee refereeRole) throws Exception {
if (chosenSeason.doesContainsReferee(refereeRole)) {
throw new Exception("This referee is already assigned to the chosen season");
} else {
if (EntityManager.getInstance().assignRefereeToSeason(refereeRole, chosenSeason)) {
chosenSeason.assignReferee(refereeRole);
refereeRole.assignToSeason(chosenSeason);
} else {
throw new Exception("The operation failed to execute because of a DB failure");
}
}
}
/**
* Assign teams to season
*
* @param chosenTeams The teams to assign.
* @param season The season to assign the teams to.
* @return false if no teams were assigned to the season, else true.
*/
public boolean assignTeamsToSeason(List<Team> chosenTeams, Season season) {
if (chosenTeams == null || chosenTeams.isEmpty())
return false;
for (Team team : chosenTeams) {
season.addTeam(team);
team.addSeason(season);
}
return true;
}
/**
* Remove teams from season
*
* @param chosenTeams The teams to remove.
* @param season The season to remove the teams from.
* @return false if no teams were removed from the season, else true.
*/
public boolean removeTeamsFromSeason(List<Team> chosenTeams, Season season) {
if (chosenTeams == null || chosenTeams.isEmpty())
return false;
for (Team team : chosenTeams) {
season.removeTeam(team);
team.removeSeason(season);
}
return true;
}
/**
* Adds a new points policy using the given parameters.
* Adds only if the arguments are correct and if an identical policy doesn't exist yet.
*
* @param victoryPoints - int - the amount of points earned for a victory - positive integer
* @param lossPoints - int - the amount of points lost for a loss - negative integer or zero
* @param tiePoints - int - the amount of points earned/lost for a tie - integer
* @throws Exception - IllegalArgumentException - if the wrong arguments were passed, ExistsAlreadyException - if the policy already exists
*/
public void addPointsPolicy(int victoryPoints, int lossPoints, int tiePoints) throws Exception {
if (victoryPoints <= 0) {
throw new IllegalArgumentException("The victory points most be positive");
} else if (lossPoints > 0) {
throw new IllegalArgumentException("The loss points most be negative or zero");
}
if (EntityManager.getInstance().doesPointsPolicyExists(victoryPoints, lossPoints, tiePoints)) {
throw new ExistsAlreadyException("This points policy already exists");
}
PointsPolicy newPointsPolicy = new PointsPolicy(victoryPoints, lossPoints, tiePoints);
EntityManager.getInstance().addPointsPolicy(newPointsPolicy);
}
/**
* Sets the points policy of the given season to be the points policy given
*
* @param chosenSeason - Season - the season to changed its points policy
* @param pointsPolicy - PointsPolicy - the new points policy
*/
public void setPointsPolicy(Season chosenSeason, PointsPolicy pointsPolicy) {
if (chosenSeason != null && pointsPolicy != null) {
chosenSeason.setPointsPolicy(pointsPolicy);
}
}
/**
* Adds a new scheduling policy using the given parameters.
* Adds only if the arguments are correct and if an identical policy doesn't exist yet.
*
* @param gamesPerSeason - int - The number of games for each team per season - positive integer
* @param gamesPerDay - int - The number of games on the same day - positive integer
* @param minRest - int - The minimum rest days between games - non-negative integer
*/
public void addSchedulingPolicy(int gamesPerSeason, int gamesPerDay, int minRest) throws Exception {
if (gamesPerSeason <= 0) {
throw new IllegalArgumentException("The number of games for each team per season must be positive integer");
} else if (gamesPerDay <= 0) {
throw new IllegalArgumentException("The number of games on the same day must be positive integer");
} else if (minRest < 0) {
throw new IllegalArgumentException("The minimum rest days between games must be non-negative integer");
}
if (EntityManager.getInstance().doesSchedulingPolicyExists(gamesPerSeason, gamesPerDay, minRest)) {
throw new ExistsAlreadyException("This scheduling policy already exists");
}
SchedulingPolicy newSchedulingPolicy = new SchedulingPolicy(gamesPerSeason, gamesPerDay, minRest);
EntityManager.getInstance().addSchedulingPolicy(newSchedulingPolicy);
}
public void activateSchedulingPolicy(Season chosenSeason, SchedulingPolicy schedulingPolicy, Date startDate) throws Exception {
if (chosenSeason.getIsUnderway()) {
throw new Exception("Activating a scheduling policy after a season has started is forbidden");
}
chosenSeason.scheduleGames(schedulingPolicy, startDate);
}
}
|
ozywuli/ember-truth-helpers
|
app/helpers/or.js
|
<gh_stars>100-1000
export { default, or } from 'ember-truth-helpers/helpers/or';
|
andrewisen/IFC.js-web-worker-example
|
examples/04/assets/js/indexed-db/indexed-db.js
|
/**
* Assume IndexedDB is supported
*/
function IndexedDB() {
const name = 'ifc-js';
const version = 1;
const transactionName = 'three-scene';
var database;
return {
init: function (cb) {
var request = indexedDB.open(name, version);
request.onupgradeneeded = function (event) {
var db = event.target.result;
if (db.objectStoreNames.contains(transactionName) === false) {
db.createObjectStore(transactionName);
}
};
request.onsuccess = function (event) {
database = event.target.result;
cb();
};
request.onerror = function (event) {
console.error('IndexedDB', event);
};
},
get: function (key, cb) {
var transaction = database.transaction(transactionName, 'readwrite');
var objectStore = transaction.objectStore(transactionName);
var request = objectStore.get(key);
request.onsuccess = function (event) {
cb(event.target.result);
};
},
set: function (data, fileName) {
var transaction = database.transaction([transactionName], 'readwrite');
var objectStore = transaction.objectStore(transactionName);
var request = objectStore.put(data, fileName);
request.onsuccess = function () {};
}
};
}
export { IndexedDB };
|
mooshak-dcc/mooshak-2
|
home/data/configs/checks/team/resources/comp_err.java
|
class teste {
public static void main(String args[]) {
System.out.println("ola mundo\n")
}
}
|
simonjj22/libsafecrypto
|
src/utils/crypto/mersenne_twister/mt19937ar.h
|
#pragma once
/* Period parameters */
#define N 624
#define M 397
typedef struct mt_state_t
{
unsigned long mt[N]; /* the array for the state vector */
int mti; /* mti==N+1 means mt[N] is not initialized */
} mt_state_t;
extern void init_genrand(mt_state_t *ctx, unsigned long s);
extern void init_by_array(mt_state_t *ctx, unsigned long init_key[],
int key_length);
extern unsigned long genrand_int32(mt_state_t *ctx);
|
socialsoftware/edition
|
microfrontend/src/microfrontends/about/pages/Articles_es.js
|
import React from 'react'
const Articles_es = () => {
return(
<div>
<h4>
<ul>
<li><a href="#Q01Q">A. Artículos sobre el <em>Archivo LdoD</em></a></li>
<li><a href="#Q02Q">B. Tesis sobre el <em>Archivo LdoD</em></a></li>
<li><a href="#Q03Q">C. Otros artículos producidos por el equipo del proyecto</a></li>
<li><a href="#Q04Q">D. Artículos seleccionados sobre el <em>Libro del desasosiego</em></a></li>
<li><a href="#Q05Q">E. Principales ediciones del <em>Libro del desasosiego</em> (1982-2017)</a></li>
<li><a href="#Q06Q">F. Recepción crítica del <em>Archivo LdoD</em></a></li>
</ul>
</h4>
<p>
<strong> </strong>
</p>
<h3 id="Q01Q" style={{paddingTop: "60px", marginTop: "-60px"}}>
A. Artículos sobre el <em>Archivo LdoD</em></h3>
<p><strong>2013</strong> </p>
<p>
<NAME> (2013). “Fragmentación y Edición en el <em>Libro del Desasosiego</em>.”
<em>MATLIT: Materialidades da Literatura</em> 1.1: 57-73. <br />
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_3" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_3</a> <br />
DOI: <a href="https://doi.org/10.14195/2182-8830_1-1_3" target="_new">
https://doi.org/10.14195/2182-8830_1-1_3</a> </p>
<p>
Portela, Manuel (2013). “Nenhum Problema Tem Solução: Um Arquivo Digital do <em>Livro do Desassossego</em>.”
<em>MATLIT: Materialidades da Literatura</em> 1.1: 9-33. <br />
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_1" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_1</a> <br />
DOI: <a href="https://doi.org/10.14195/2182-8830_1-1_1" target="_new">
https://doi.org/10.14195/2182-8830_1-1_1</a> </p>
<p>
Silva, Antó<NAME>, and <NAME> (2013). “Social Edition 4 <em>The Book of Disquiet:</em>
The Disquiet of Experts with Common Users.”
<em>ECSCW 2013: Adjunct Proceedings. The 13th European Conference on Computer-Supported Cooperative Work</em>.
Eds. <NAME>, <NAME>, and <NAME>.
Aarhus: Department of Computer Science, Aarhus University. 45-50. <br />
URL: <a href="https://tidsskrift.dk/daimipb/article/view/13587/11586" target="_new">
https://tidsskrift.dk/daimipb/article/view/13587/11586</a> <br />
DOI: <a href=" https://doi.org/10.7146/dpb.v40i596.13587" target="_new">
https://doi.org/10.7146/dpb.v40i596.13587</a> </p>
<p>
Silva, Antó<NAME>, and <NAME> (2013).
“TEI4<em>LdoD</em>: Textual Encoding and Social Editing in Web 2.0 Environments.”
<em>The Linked TEI: Text Encoding in the Web: TEI Conference and Members Meeting 2013, Book of Abstract</em>s.
Eds. <NAME> and <NAME>. Roma: DIGILAB/Università La Sapienza. 119-126. <br />
URL: <a href="https://tei-c.org/Vault/MembersMeetings/2013/wp-content/uploads/2013/09/book-abstracts.pdf" target="_new">
https://tei-c.org/Vault/MembersMeetings/2013/wp-content/uploads/2013/09/book-abstracts.pdf</a> </p>
<p> </p>
<p> <strong>2014</strong> </p>
<p>
Portela, Manuel, and Antó<NAME> (2014). “A Model for a Virtual <em>LdoD</em>.”
<em>Literary and Linguistic Computing </em>[Advance online publication: March, 5, 2014].
<em>Digital Scholarship in the Humanities </em>30.3 (2015): 354-370. <br />
URL: <a href="https://academic.oup.com/dsh/article-abstract/30/3/354/345465" target="_new">
https://academic.oup.com/dsh/article-abstract/30/3/354/345465</a> <br />
DOI: <a href="https://doi.org/10.1093/llc/fqu004" target="_new">
https://doi.org/10.1093/llc/fqu004</a> </p>
<p>
Silva, Antó<NAME>, and <NAME> (2014-2015).
“TEI4LdoD: Textual Encoding and Social Editing in Web 2.0 Environments.”
<em>Journal of the Text Encoding Initiative</em> 8. <br />
URL: <a href="https://journals.openedition.org/jtei/1171" target="_new">
https://journals.openedition.org/jtei/1171</a> <br />
DOI: <a href="https://doi.org/10.4000/jtei.1171" target="_new">
https://doi.org/10.4000/jtei.1171</a> </p>
<p> </p>
<p> <strong>2015</strong> </p>
<p>
Portela, Manuel, and <NAME> (2015).
“The Fragmentary Kinetics of Writing in the <em>Book of Disquiet</em>.”
<em>Textual Cultures</em> 9.2: 52-78. <br />
URL: <a href="https://scholarworks.iu.edu/journals/index.php/textual/article/view/12752/28916" target="_new">
https://scholarworks.iu.edu/journals/index.php/textual/article/view/12752/28916</a> <br />
DOI: <a href="https://doi.org/10.14434/tc.v9i2.12752" target="_new">
https://doi.org/10.14434/tc.v9i2.12752</a> </p>
<p>
<NAME>, e Antó<NAME> (2015). “A Dinâmica entre Arquivo e Edição no <em>Arquivo LdoD</em>.”
<em>Colóquio Letras</em> 188: 33-47. <br />
URI: <a href="https://estudogeral.sib.uc.pt/handle/10316/44734" target="_new">
https://estudogeral.sib.uc.pt/handle/10316/44734</a> </p>
<p>
<NAME> (2015). “O <em>Arquivo LdoD</em> e a Biblioteca Digital.”
<em>A Biblioteca da Universidade: Permanência e Metamorfose</em>s.
Eds. José <NAME>, <NAME>, and <NAME>.
Coimbra: Imprensa da Universidade de Coimbra. 239-258. <br />
DOI: <a href="https://dx.doi.org/10.14195/978-989-26-1045-0_14" target="_new">
https://dx.doi.org/10.14195/978-989-26-1045-0_14</a> </p>
<p> </p>
<p> <strong>2016</strong></p>
<p>
<NAME> (2016). “Variantes Textuais no <em>Livro do Desassossego</em>:
Edição, Codificação e Interpretação".
<em>Texto Digital</em> 12.1: 54-68. <br/>
URL: <a href="https://periodicos.ufsc.br/index.php/textodigital/article/download/1807-9288.2016v12n1p54/32358" target="_new">
https://periodicos.ufsc.br/index.php/textodigital/article/download/1807-9288.2016v12n1p54/32358</a> <br />
DOI: <a href="https://doi.org/10.5007/1807-9288.2016v12n1p54" target="_new">
https://doi.org/10.5007/1807-9288.2016v12n1p54</a> </p>
<p>
<NAME> (2016). “Writing the Archive: An Experiment in Literary Self-Consciousness.”
<em>Gramma | Γραμμα: Journal of Theory and Criticism</em> 23: 15-32. <br />
URL: <a href="https://ejournals.lib.auth.gr/gramma/article/download/5401/5295" target="_new">
https://ejournals.lib.auth.gr/gramma/article/download/5401/5295</a> <br />
DOI: <a href="https://doi.org/10.26262/gramma.v23i0.5401" target="_new">
https://doi.org/10.26262/gramma.v23i0.5401</a> </p>
<p>
<NAME> (2016). “A Simulação da Performatividade Literária no<em> Arquivo LdoD.”</em>
<em>Cultura e Digital em Portugal</em>. Eds. José Luí<NAME>, Joã<NAME> e <NAME>.
Porto: Afrontamento. 89-101. </p>
<p>
Portela, Manuel, and Antó<NAME> (2016). “<NAME>’s<em> Book of Disquiet</em> as a Dynamic Digital Archive.<em>”</em>
<em>Edizioni Critiche Digitali: Edizioni a confronto/ Digital Critical Editions: Comparing Editions</em>.
Eds. Paola Italia and <NAME><em>. </em>Roma: Sapienza Università Editrice. 37-50. <br />
URI: <a href="https://estudogeral.sib.uc.pt/handle/10316/43431" target="_new">
https://estudogeral.sib.uc.pt/handle/10316/43431</a> </p>
<p>
Portela, Manuel, and Antó<NAME> (2016). “Encoding, Visualizing and Generating Variation
in Fernando Pessoa’s <em>Livro do Desassossego</em>.”
<em>Variants</em> 12-13: 189-210. <br />
URL: <a href="https://journals.openedition.org/variants/356" target="_new">
https://journals.openedition.org/variants/356</a> <br />
DOI: <a href="https://doi.org/10.4000/variants.356" target="_new">
https://doi.org/10.4000/variants.356</a> </p>
<p> </p>
<p><strong>2017</strong></p>
<p>
<NAME> (2017). “The <em>Book of Disquiet</em> Archive as a Collaborative Textual Environment:
From Digital Archive to Digital Simulator.”
<em>The Writing Platform: Digital Knowledge for Writers</em>.
Brisbane: Queensland University of Technology. Web. <br/>
URL: <a href="https://thewritingplatform.com/2017/07/book-disquiet-archive-collaborative-textual-environment-digital-archive-digital-simulator/" target="_new">
https://thewritingplatform.com/2017/07/book-disquiet-archive-collaborative-textual-environment-digital-archive-digital-simulator/</a></p>
<p>
<NAME> (2017). “Atos de Escrita no <em>Livro do Desassossego</em>.”
<em>Congresso Internacional Fernando Pessoa 2017.</em> Lisboa: <NAME>. 224-239. <br/>
URI: <a href= "https://estudogeral.sib.uc.pt/handle/10316/44304" target="_new">
https://estudogeral.sib.uc.pt/handle/10316/44304</a></p>
<p> </p>
<p><strong>2018</strong></p>
<p>
<NAME> (2018). “La simulación de los procesos literarios en el <em>Archivo LdoD.</em>”
<em>Ilusión y materialidad: perspectivas sobre el archivo</em>.
Eds. Jeró<NAME> y <NAME>á: Ediciones Uniandes. 353-366.</p>
<p>
Pereira, Luí<NAME>, <NAME>, and Licí<NAME> (2018).
“<em>Machines of Disquiet</em>: Textual Experience in the <em>LdoD Archive.</em>”
<em>MATLIT: Materialidades da Literatura</em> 6.3: 59-71. <br />
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2182-8830_6-3_5" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2182-8830_6-3_5</a> <br />
DOI: <a href="https://doi.org/10.14195/2182-8830_6-3_5" target="_new">
https://doi.org/10.14195/2182-8830_6-3_5</a> </p>
<p> </p>
<p><strong>2019</strong></p>
<p>
<NAME>, Antó<NAME>, and <NAME> (2019).
“Social Media Aware Virtual Editions for the <em>Book of Disquiet</em>.”
<em>Digital Libraries at the Crossroads of Digital Information for the Future. ICADL 2019.</em>
Lecture Notes in Computer Science, vol 11853, Eds. <NAME>, <NAME>, and <NAME>. Berlin: Springer. 271-282.<br />
DOI: <a href="https://link.springer.com/chapter/10.1007%2F978-3-030-34058-2_25" target="_new">
https://link.springer.com/chapter/10.1007%2F978-3-030-34058-2_25</a> </p>
<p> </p>
<p><strong>2020</strong></p>
<p>
<NAME>, and <NAME> (2020). “Representação e Análise da Receção Crítica
do <em>Livro do Desassossego</em> no <em>Arquivo LdoD</em>.”
<em>Novos Estudos Pessoanos - Ponto de Situação</em>, Lisboa: Casa Fernando Pessoa. 5-11.</p>
<p>
Portela, Manuel, and Cecí<NAME>ães (2020). “The Book of Disquiet Digital Archive as a Role-playing Experiment.”
<em>Attention à la marche | MIND THE GAP! ELO 2018 </em>. Ed. <NAME>.
Montréal: Université du Québec à Montréal (UQAM).</p>
<p>
Marques, Gonç<NAME>ão, Antó<NAME>, and <NAME> (2020).
“Classification in the <em>LdoD Archive</em>: A Crowdsourcing and Gamification Approach.” Submitted.</p>
<p> </p>
<p><strong>2021</strong></p>
<p>
<NAME> (2021). “From Meta-Editing to Virtual Editing: The <em>LdoD Archive</em> as a Computer-Assisted Editorial Space.”
<em>MLA Approaches to Teaching Pessoa's The Book of Disquiet. </em>
Eds. <NAME> and Jeró<NAME>.
New York: Modern Language Association. [forthcoming].</p>
<p> </p>
<p> </p>
<h3 id="Q02Q" style={{paddingTop: "60px", marginTop: "-60px"}}>
B. Tesis sobre el <em>Archivo LdoD</em></h3>
<p> </p>
<p><strong>2015</strong> </p>
<p>
Santos, André (2015). <em>Arquivo Digital do Livro do Desassossego: Pesquisa e Recomendacões. </em>
Lisboa: Instituto Superior Técnico da Universidade de Lisboa. MA thesis in Information Systems and Computer Engineering. <br/>
URL: <a href= "https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/846778572211152" target="_new">
https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/846778572211152</a></p>
<p> </p>
<p> <strong>2017</strong> </p>
<p>
<NAME> (2017-). <em>O Arquivo para além do Arquivo: Criação de ontologias
a partir de etiquetas geradas pelos utilizadores</em>.
Lisboa: Instituto Superior Técnico da Universidade de Lisboa. MA thesis in Information Systems and Computer Engineering [in progress]. </p>
<p>
<NAME>ília (2017-). <em>Fragmentos em prática: interação, colaboração e
criatividade no </em>Arquivo LdoD. Coimbra: Universidade de Coimbra. PhD thesis in Materialities of Literature [in progress]. </p>
<p> </p>
<p><strong>2018</strong> </p>
<p>
<NAME> (2018). <em>Automatic Collection of Citations in the <em>LdoD Archive</em></em>.
Lisboa: Instituto Superior Técnico da Universidade de Lisboa. MA thesis in Information Systems and Computer Engineering. <br/>
URL: <a href= "https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/1972678479054227" target="_new">
https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/1972678479054227</a></p>
<p>
<NAME> (2018). <em>Virtual Editions in the <em>LdoD Archive</em> using Crowdsourcing and Gamification</em>.
Lisboa: Instituto Superior Técnico da Universidade de Lisboa. MA thesis in Information Systems and Computer Engineering. <br/>
URL: <a href= "https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/1972678479054238" target="_new">
https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/1972678479054238</a></p>
<p>
<NAME> (2018). <em>Reusable Framework for Digital Humanities: A Case Study with the <em>LdoD Archive</em></em>.
Lisboa: Instituto Superior Técnico da Universidade de Lisboa. MA thesis in Information Systems and Computer Engineering. <br/>
URL: <a href= "https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/283828618790314" target="_new">
https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/283828618790314</a></p>
<p> </p>
<p><strong>2019</strong> </p>
<p>
<NAME>; (2019). <em>LdoD Visualization: Reading the <em>Book of Disquiet</em> with Information Visualization Techniques</em>.
Lisboa: Instituto Superior Técnico da Universidade de Lisboa. MA thesis in Information Systems and Computer Engineering. <br/>
URL: <a href= "https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/283828618790395" target="_new">
https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/283828618790395</a></p>
<p>
Gonç<NAME> (2019). <em>A Product Family for Digital Humanities Repositories</em>. Lisboa: Instituto Superior Técnico da
Universidade de Lisboa. MA thesis in Information Systems and Computer Engineering.<br/>
URL: <a href= "https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/1691203502343425" target="_new">
https://fenix.tecnico.ulisboa.pt/cursos/meic-a/dissertacao/1691203502343425</a> </p>
<p> </p>
<p> </p>
<h3 id="Q03Q" style={{paddingTop: "60px", marginTop: "-60px"}}>C.
Otros artículos producidos por el equipo del proyecto</h3>
<p> </p>
<p><strong>2013</strong> </p>
<p>
<NAME> (2013). “Platão no Ribatejo: notas sobre as <em>Notas</em>.”
<em>MATLIT: </em><em>Materialidades da Literatura</em> 1.1: 113-123. <br />
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_6" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_6</a> <br/>
DOI: <a href="https://doi.org/10.14195/2182-8830_1-1_6 " target="_new">
https://doi.org/10.14195/2182-8830_1-1_6</a> </p>
<p>
<NAME> (2013). “El concepto de biblioteca digital como cuestión epistemológica.”
<em>La biblioteca de Occidente en contexto hispánico.</em> <NAME> Á<NAME>. Logroño:
Universidad Internacional de La Rioja. 73-84. </p>
<p>
<NAME> (2013). “Estranhamentos: materialidades da escrita na Viena de 1900.”
<em>MATLIT: </em><em>Materialidades da Literatura</em> 1.1: 135-147.<br />
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_8" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_8</a> <br/>
DOI: <a href="https://doi.org/10.14195/2182-8830_1-1_8" target="_new">
https://doi.org/10.14195/2182-8830_1-1_8</a> </p>
<p>
<NAME> (2013). “Arqueofonía y materialidades del libro.”
<em>Documentos electrónicos y textualidades digitales: nuevos lectores, nuevas lecturas, nuevos géneros</em>.
Eds. José <NAME>acute;n-García, <NAME>acute;mez-Díaz y <NAME>évalo.
Salamanca: Ediciones Universidad de Salamanca. 91-104. </p>
<p>
<NAME> (2013). “As únicas coisas nobres que a vida contém.”
<em>MATLIT: </em><em>Materialidades da Literatura</em> 1.1: 89-112. <br />
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_5" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_5</a> <br/>
DOI: <a href="https://doi.org/10.14195/2182-8830_1-1_5" target="_new">
https://doi.org/10.14195/2182-8830_1-1_5</a> </p>
<p> </p>
<p> <strong>2014</strong> </p>
<p>
<NAME> (2014). “Revoluciones en la mirada: El Modernismo y la literatura digital.”
<em>Más allá de las palabras: Difusión, recepción y didáctica de la literatura hispánica</em>.
Eds. <NAME>, <NAME>;, <NAME>, Jos&<NAME>acute;<NAME>.
Valencia: PUV/Universitat de València. 213-222. </p>
<p>
<NAME> (2014). “‘As vozes saem do ar, e não de gargantas’: Arqueofonias do <em>Livro do Desassossego </em>
e da <em>Copilaçam</em> de Gil Vicente.” <em>Central de Poesia: O Livro do Desassossego</em>.
Eds. Patrícia <NAME>, <NAME>, <NAME>.
Lisboa: CLEPUL/Faculdade de Letras da Universidade de Lisboa. 63-76. </p>
<p>
Silvestre, <NAME> (2014). “O Que Nos Ensinam os Novos Meios sobre o Livro no <em>Livro do Desassossego</em>.”
<em>MATLIT: </em><em>Materialidades da Literatura </em> 1.1: 79-98. <br />
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2182-8830_2-1_4" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2182-8830_2-1_4</a> <br/>
DOI: <a href="https://doi.org/10.14195/2182-8830_2-1_4" target="_new">
https://doi.org/10.14195/2182-8830_2-1_4</a> </p>
<p> </p>
<p> <strong>2015</strong> </p>
<p>
<NAME> (2015). “(Re)criando infraestruturas: o que é exatamente a e-Filologia?”
<em>Colóquio Letras</em> 188: 48-60. <br />
URL: <a href="http://coloquio.gulbenkian.pt/cat/sirius.exe/do?bibrecord&id=PT.FCG.RCL.9782&org=I&orgp=188" target="_new">
http://coloquio.gulbenkian.pt/cat/sirius.exe/do?bibrecord&id=PT.FCG.RCL.9782&org=I&orgp=188</a> </p>
<p>
<NAME> (2015). “Barroco digital: remediação, edição textual e arquivo.”
<em>Colóquio Letras</em> 188: 20-32.<br />
URL: <a href="http://coloquio.gulbenkian.pt/cat/sirius.exe/do?bibrecord&id=PT.FCG.RCL.9780&org=I&orgp=188" target="_new">
http://coloquio.gulbenkian.pt/cat/sirius.exe/do?bibrecord&id=PT.FCG.RCL.9780&org=I&orgp=188</a> </p>
<p>
<NAME> (2015). “Academia, Geopolítica das Humanidades Digitais e Pensamento Crítico.”
<em>MATLIT: Materialidades da Literatura</em> 3.1: 111-140.<br/>
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2517" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2517</a> <br/>
DOI: <a href="https://doi.org/10.14195/2182-8830_3-1_7" target="_new">
https://doi.org/10.14195/2182-8830_3-1_7</a> </p>
<p>
<NAME> (2015). “The Virtualization of Artists’ Books: The Rationale of Remediation.”
<em>Betwixt & Between</em> [‘The Artist’s Book and Materiality /
Het Kunstenaarsboek en Materialiteit’] 36.1: 37-50. </p>
<p>
Maduro, <NAME>ôrtes (2015). “‘Ser a Página de um Livro’:
Bernardo Soares como Personagem e Livro.”
<em>Revista de Estudos Literários </em>5: 491-527. <br />
URL: <a href="https://impactum-journals.uc.pt/rel/article/view/4308/3686" target="_new">
https://impactum-journals.uc.pt/rel/article/view/4308/3686</a><br />
DOI: <a href="https://doi.org/10.14195/2183-847X_5_20" target="_new">
https://doi.org/10.14195/2183-847X_5_20</a> </p>
<p> </p>
<p> <strong>2016</strong> </p>
<p>
<NAME> (2016). “El<em> Libro del desasosiego,</em> estética y materialidad de la sensación.” <em>Abriu</em> 5: 51-63. <br />
URL: <a href="https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.4/19981" target="_new">
https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.4/19981</a> <br />
DOI: <a href="https://doi.org/10.1344/abriu2016.5.4" target="_new">
https://doi.org/10.1344/abriu2016.5.4</a> </p>
<p> </p>
<p> <strong>2017</strong> </p>
<p>
<NAME> (2017). “Materialidade e Modernidade no <em>Livro do Desassossego</em>.”
<em>Terra Roxa e Outras Terras: Revista de Estudos Literários</em> 32: 8-20. <br/>
DOI: <a href="https://dx.doi.org/10.5433/1678-2054.2016v32p8" target="_new">
https://dx.doi.org/10.5433/1678-2054.2016v32p8</a> </p>
<p> </p>
<p> <strong>2020</strong> </p>
<p>
<NAME> (2020). “Problemas de Intertextualidade Filosófica no <em>Livro do Desassossego</em>.”
<em>Novos Estudos Pessoanos - Ponto de Situação</em>,
Lisboa: Casa Fernando Pessoa. 21-30.</p>
<p> </p>
<p> </p>
<h3 id="Q04Q" style={{paddingTop: "60px", marginTop: "-60px"}}>
D. Artículos seleccionados sobre el <em>Libro del desasosiego</em></h3>
<p> </p>
<p><strong>2013</strong> </p>
<p>
<NAME> (2013). “<em>Livro do Desassossego: </em>o romance possível (var.: impossível).”
<em>Comunicações do III Congresso Internacional Fernando Pessoa. </em>
Lisboa: Casa Fernando Pessoa. Web. <br />
URL: <a href="http://www.blogletras.com/2013/12/livro-do-desassossego-o-romance.html" target="_new">
http://www.blogletras.com/2013/12/livro-do-desassossego-o-romance.html</a></p>
<p>
Sepúlveda, Pedro (2013). “Listas do Desassossego.”
<em>MATLIT: Materialidades da Literatura</em> 1.1: 35-55. <br />
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_2" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2182-8830_1-1_2</a> <br/>
DOI: <a href="https://doi.org/10.14195/2182-8830_1-1_2" target="_new">
https://doi.org/10.14195/2182-8830_1-1_2</a></p>
<p> </p>
<p><strong>2014</strong></p>
<p>
Sepúlveda, Pedro (2014). “Pessoas-livros: O Arquivo Bibliográfico de Fernando Pessoa.”
<em> MATLIT: Materialidades da Literatura</em> 2.1: 55-77. <br/>
URL: <a href="https://impactum-journals.uc.pt/matlit/article/view/2182-8830_2-1_3" target="_new">
https://impactum-journals.uc.pt/matlit/article/view/2182-8830_2-1_3</a> <br/>
DOI: <a href="https://doi.org/10.14195/2182-8830_2-1_3" target="_new">
https://doi.org/10.14195/2182-8830_2-1_3</a></p>
<p> </p>
<p> <strong>2016</strong> </p>
<p>
<NAME>, e <NAME> (2016). “El desassossec del traductor o com gosar poder fer parlar en català
B<NAME>.” <em>Abriu</em> 5: 27-33. <br />
URL: <a href="https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.2/19979" target="_new">
https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.2/19979</a> <br />
DOI: <a href="https://doi.org/10.1344/abriu2016.5.2" target="_new">
https://doi.org/10.1344/abriu2016.5.2</a> </p>
<p>
<NAME> (2016). “Livros do Desassossego – No Plural.”
<em>Abriu</em> 5: 79-93. <br />
URL: <a href="https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.6/20057" target="_new">
https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.6/20057</a> <br />
DOI: <a href="https://doi.org/10.1344/abriu2016.5.6" target="_new">
https://doi.org/10.1344/abriu2016.5.6</a> </p>
<p>
<NAME>acute;nimo (2016). “Os Muitos Desassossegos”.
<em>Revista do Centro de Estudos Portugueses </em>, Belo Horizonte, v.36, n.55: 11-27.<br />
URL: <a href="http://www.periodicos.letras.ufmg.br/index.php/cesp/article/view/11386/9886" target="_new">
http://www.periodicos.letras.ufmg.br/index.php/cesp/article/view/11386/9886</a><br />
DOI: <a href="http://dx.doi.org/10.17851/2359-0076.36.55.11-27 " target="_new">
http://dx.doi.org/10.17851/2359-0076.36.55.11-27</a></p>
<p>
<NAME>acute;nimo (2016). “Narciso ciego, iluminado por Lisboa (de Cesário Verde a <NAME>).”
<em>Abriu</em> 5: 27-33. <br />
URL: <a href="https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.3/19980" target="_new">
https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.3/19980</a> <br />
DOI: <a href="https://doi.org/10.1344/abriu2016.5.3" target="_new">
https://doi.org/10.1344/abriu2016.5.3</a> </p>
<p>
<NAME> (2016). “A Ansiedade da Unidade: Uma Teoria da Edição.”
<em> LEA Lingue e Litterature d’Oriente e d’Occidente</em> 5: 284-311. <br />
URL: <a href="https://oajournals.fupress.net/index.php/bsfm-lea/article/view/7716" target="_new">
https://oajournals.fupress.net/index.php/bsfm-lea/article/view/7716</a> <br />
DOI: <a href="https://doi.org/10.13128/LEA-1824-484x-20038" target="_new">
https://doi.org/10.13128/LEA-1824-484x-20038</a> </p>
<p>
<NAME> (2016). “Da bruma para a bruma — pela bruma:
Traduzir «esta espécie de não-livro».”
<em>Abriu</em> 5: 13-25. <br />
URL: <a href="https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.1/20058" target="_new">
https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.1/20058</a> <br />
DOI: <a href="https://doi.org/10.1344/abriu2016.5.1" target="_new">
https://doi.org/10.1344/abriu2016.5.1</a> </p>
<p>
<NAME> (2016). “<em>Livro do Desassossego:</em> translating, reading, and deciphering the text.”
<em>Abriu</em> 5: 65-77. <br />
URL: <a href="https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.5/19982" target="_new">
https://revistes.ub.edu/index.php/Abriu/article/download/abriu2016.5.5/19982</a> <br />
DOI: <a href="https://doi.org/10.1344/abriu2016.5.5" target="_new">
https://doi.org/10.1344/abriu2016.5.5</a> </p>
<p> </p>
<p> </p>
<h3 id="Q05Q" style={{paddingTop: "60px", marginTop: "-60px"}}>
E. Principales ediciones del <em>Libro del desasosiego</em> (1982-2017)
</h3>
<p>
<strong> </strong>
</p>
<p>
<table className="table-bordered">
<thead>
<tr>
<th>
<p style={{textAlign:"center"}}>
<strong>Fechas</strong>
</p>
</th>
<th><p style={{textAlign:"center"}}>
<strong><NAME>elho</strong>
</p></th>
<th><p style={{textAlign:"center"}}>
<strong><NAME></strong>
</p></th>
<th><p style={{textAlign:"center"}}>
<strong><NAME></strong>
</p></th>
<th><p style={{textAlign:"center"}}>
<strong>Jeró<NAME></strong>
</p></th>
<th><p style={{textAlign:"center"}}>
<strong><NAME></strong>
</p></th>
</tr>
</thead>
<tbody>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>1982</strong>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>
Coelho. Compilación y transcripción de los textos por
<NAME> y <NAME>. Lisboa:
Edições Ática. <br/> [2 volumes] <br/>
<strong>1ª edición</strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>1990</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>.
Lisboa: Editorial Presença. <br/> [volume 1, por Vicente
Guedes] <br/> <strong>1ª edición</strong> <br/>
ISBN 972-23-1330-4
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>1991</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>.
Lisboa: Editorial Presença. <br/> [volume 2, por
Bernardo Soares] <br/> <strong>1ª edición</strong>
<br/> ISBN 972-23-1375-4
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>1994</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>.
Campinas, SP: Unicamp. <br/> [2 volumes]<br/> <strong>2ª
edición</strong> <br/> ISBN 85-268-0288-7
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>1996</strong>
</p></td>
<td><p>
<em> </em>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>.
Campinas, SP: Unicamp. <br/> [2 volumes]<br/>
[reimpresión]<br/> ISBN 85-268-0288-7
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>1997</strong>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>
Coelho. Compilación y transcripción de los textos por
<NAME> y <NAME>. Lisboa:
Edições Ática.<br/> [2 volumes] <br/>[reimpresión]
<br/> ISBN 972-617-069-9<br/> ISBN 972-617-131-8
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>.
Lisboa: Editorial Presença.<br/> [2 volumes]<br/> <strong>3ª
edición</strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>1998</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em> </em>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/> <strong>1ª
edición</strong> <br/> ISBN 972-37-0476-5
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>1999</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em> </em>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>.
São Paulo: Companhia das Letras. <br/> <strong>2ª
edición</strong><br/> ISBN 85-7164-857-3
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2001</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em> </em>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/> <strong>3ª
edición </strong><br/> ISBN 972-37-0496-X
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2003</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em> </em>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/> <strong>4ª
edición </strong><br/> ISBN 972-37-0476-5
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2005</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em> </em>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/> <strong>5ª
edición</strong> <br/> ISBN 972-37-0476-5
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2006</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em> </em>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Círculo de Leitores. <br/> <strong>6ª
edición</strong> <br/> ISBN 972-42-3806-7
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2007</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em> </em>
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/> <strong>7ª
edición</strong> <br/> ISBN 978-972-37-0476-1
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2008</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>.
Lisboa: Relógio d’Água.<br/> <strong>4ª
edición</strong> <br/> ISBN 978-972-708-954-3
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2009</strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/> <strong>8ª
edición</strong> <br/> ISBN 978-972-37-0476-1
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2010</strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desasocego</em>. Editado por Jeró<NAME>.
Lisboa: Imprensa Nacional-Casa da Moeda. <br/> [2 volumes]<br/>
<strong>1ª edición</strong> <br/> ISBN
978-972-27-1849-3
</p></td>
<td><p>
<em> </em>
</p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2011</strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim.<br/> <strong>9ª
edición </strong><br/> ISBN 978-972-37-0476-1
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2012</strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/> <strong>10ª
edición </strong><br/> ISBN 978-972-0-78330-1<strong> </strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2013</strong>
</p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>.
Lisboa: Relógio d’Água.<br/> <strong>5ª
edición</strong> <br/> ISBN 978-972-641-397-2
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/>[reimpresión]<br/>
ISBN 978-972-37-1121-9
</p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por Jerónimo
Pizarro. Lisboa: Tinta-da-China. <br/> <strong>2ª
edición</strong> <br/> ISBN 978-989-671-207-5
</p></td>
<td><p>
<em> </em>
</p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2014</strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/> <strong>11ª
edición</strong> <br/> ISBN 978-972-37-1787-7
</p></td>
<td><p>
<em>Livro do Desassossego.</em> Editado por Jerónimo
Pizarro. Lisboa: Tinta-da-China. <br/> <strong>3ª
edición </strong><br/> ISBN 978-989-671-180-1<strong></strong><br/>
</p></td>
<td><p>
<em> </em>
</p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2015</strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego</em>. Editado por <NAME>. Lisboa:
Assírio & Alvim. <br/>[reimpresión] <br/>
ISBN 978-972-37-1787-7
</p></td>
<td><p> </p></td>
<td><p>
<em>Livro(s) do Desassossego</em>. Editado por <NAME>.
São Paulo: Global Editora.<br/> <strong>1ª
edición</strong><br/> ISBN 978-852-60-2206-5
</p></td>
</tr>
<tr>
<td><p style={{textAlign:"center"}}>
<strong>2017</strong>
</p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p> </p></td>
<td><p>
<em>Livro do Desassossego.</em> Editado por Jerónimo
Pizarro. Lisboa: Tinta-da-China. <br/> [reimpresión]<br/>ISBN
978-989-671-180-1<strong></strong>
</p></td>
<td><p> </p></td>
</tr>
</tbody>
</table>
<p>
<strong> </strong>
</p>
<h3 id="Q06Q" style={{paddingTop: "60px", marginTop: "-60px"}}>
F. Recepción crítica del <em>Archivo LdoD</em></h3>
<p> </p>
<p><strong>2017</strong></p>
<p>
Barbosa, Nicolás and <NAME> (2017). “The Website of Disquiet: The First Online Critical Edition of Fernando Pessoa.”
<em>Pessoa Plural: A Journal of Fernando Pessoa Studies.</em> 12: 725-732. <br />
URL: <a href="https://doi.org/10.7301/Z07S7KZD" target="_new">
https://doi.org/10.7301/Z07S7KZD</a> </p>
</p>
<p>
<NAME> (2017). “O Arquivo digital do “Livro do Desassossego” já está online. E tem muitas histórias para contar.”
<em>Observador.</em> 13 de dezembro de 2017. <br />
URL: <a href="https://observador.pt/2017/12/13/o-arquivo-digital-do-livro-do-desassossego-ja-esta-online-e-tem-muitas-historias-para-contar" target="_new">
https://observador.pt/2017/12/13/o-arquivo-digital-do-livro-do-desassossego-ja-esta-online-e-tem-muitas-historias-para-contar</a></p>
<p>
Queirós, Luí<NAME> (2017). “Um arquivo digital do <em>Livro do Desassossego</em> para ler e mexer.”
<em>Público.</em> 14 de dezembro de 2017, p. 24. <br />
URL: <a href="https://www.publico.pt/2017/12/14/culturaipsilon/noticia/um-arquivo-digital-do-livro-do-desassossego-para-ler-e-mexer-1795920" target="_new">
https://www.publico.pt/2017/12/14/culturaipsilon/noticia/um-arquivo-digital-do-livro-do-desassossego-para-ler-e-mexer-1795920</a></p>
<p> </p>
<p><strong>2018</strong></p>
<p>
Aldabalde, <NAME>, and <NAME> (2018). “A trajetividade do Pessoa digital: contributos para uma história do espólio pessoano.”
<NAME>, <NAME>, and Marí<NAME> (eds.), <em>Património Cultural e Transformação Digital</em>.
Lisboa: Universidade Católica Editora. 102-130.</p>
<p> </p>
<p> </p>
<p>[actualización 10-08-2020]</p>
</div>
)
}
export default Articles_es
|
kkcookies99/UAST
|
Dataset/Leetcode/test/9/475.py
|
class Solution:
def XXX(self, intx):
if intx < 0:
return False
else:
return True if str(intx) == str(intx)[::-1] else False
|
mmienko/aws-cdk-scala
|
modules/appstream/src/main/scala/io/burkard/cdk/services/appstream/CfnImageBuilder.scala
|
<filename>modules/appstream/src/main/scala/io/burkard/cdk/services/appstream/CfnImageBuilder.scala
package io.burkard.cdk.services.appstream
import scala.collection.JavaConverters._
@scala.annotation.nowarn("cat=deprecation")
@SuppressWarnings(Array("org.wartremover.warts.DefaultArguments", "org.wartremover.warts.Null", "DisableSyntax.null"))
object CfnImageBuilder {
def apply(
internalResourceId: String,
instanceType: String,
name: String,
displayName: Option[String] = None,
accessEndpoints: Option[List[_]] = None,
domainJoinInfo: Option[software.amazon.awscdk.services.appstream.CfnImageBuilder.DomainJoinInfoProperty] = None,
iamRoleArn: Option[String] = None,
enableDefaultInternetAccess: Option[Boolean] = None,
imageArn: Option[String] = None,
description: Option[String] = None,
tags: Option[List[_ <: software.amazon.awscdk.CfnTag]] = None,
vpcConfig: Option[software.amazon.awscdk.services.appstream.CfnImageBuilder.VpcConfigProperty] = None,
imageName: Option[String] = None,
appstreamAgentVersion: Option[String] = None
)(implicit stackCtx: software.amazon.awscdk.Stack): software.amazon.awscdk.services.appstream.CfnImageBuilder =
software.amazon.awscdk.services.appstream.CfnImageBuilder.Builder
.create(stackCtx, internalResourceId)
.instanceType(instanceType)
.name(name)
.displayName(displayName.orNull)
.accessEndpoints(accessEndpoints.map(_.asJava).orNull)
.domainJoinInfo(domainJoinInfo.orNull)
.iamRoleArn(iamRoleArn.orNull)
.enableDefaultInternetAccess(enableDefaultInternetAccess.map(Boolean.box).getOrElse(java.lang.Boolean.FALSE))
.imageArn(imageArn.orNull)
.description(description.orNull)
.tags(tags.map(_.asJava).orNull)
.vpcConfig(vpcConfig.orNull)
.imageName(imageName.orNull)
.appstreamAgentVersion(appstreamAgentVersion.orNull)
.build()
}
|
blodstone/CCS590v2
|
collinsHead/fig/record/CommandProcessor.java
|
<reponame>blodstone/CCS590v2
package fig.record;
import java.util.*;
import fig.basic.*;
import java.rmi.*;
import java.rmi.server.*;
/**
* Responsible for sending some commands to the server.
*/
public class CommandProcessor {
private RecordServerInterface server;
private Receiver receiver;
private boolean isClient, firewall;
public CommandProcessor(RecordServerInterface server,
String baseTempDir, boolean isClient, boolean firewall) {
this.server = server;
this.receiver = new Receiver(baseTempDir, LogInfo.stdout, LogInfo.stderr);
this.isClient = isClient;
this.firewall = firewall;
}
public CommandProcessor(RecordServerInterface server,
Receiver receiver, boolean isClient, boolean firewall) {
this.server = server;
this.receiver = receiver;
this.isClient = isClient;
this.firewall = firewall;
}
public void processCommand(String line) {
try {
if(isClient) UnicastRemoteObject.exportObject(receiver);
ResultReceiver result = server.processCommand(line,
firewall ? null : receiver); // If using a firewall, no callbacks
if(result != null) // But whatever is returned is then added to the receiver
receiver.addResult(result);
if(isClient) UnicastRemoteObject.unexportObject(receiver, false);
} catch(RemoteException e) {
throw new RuntimeException(e);
}
}
public void processCommandFile(String path) {
for(String line : IOUtils.readProgramLinesHard(path))
processCommand(line);
}
public void processCommandFiles(List<String> paths) {
for(String path : paths)
processCommandFile(path);
}
}
|
radiovideo/content-validator-api
|
src/main/java/org/sitenv/contentvalidator/model/CCDAProcActProc.java
|
package org.sitenv.contentvalidator.model;
import org.apache.log4j.Logger;
import org.sitenv.contentvalidator.dto.ContentValidationResult;
import org.sitenv.contentvalidator.dto.enums.ContentValidationResultLevel;
import org.sitenv.contentvalidator.parsers.ParserUtilities;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
public class CCDAProcActProc {
private static Logger log = Logger.getLogger(CCDAProcActProc.class.getName());
private ArrayList<CCDAII> sectionTemplateId;
private CCDACode procCode;
private CCDACode procStatus;
private CCDACode targetSiteCode;
private ArrayList<CCDAAssignedEntity> performer;
private ArrayList<CCDAServiceDeliveryLoc> sdLocs;
private CCDAII piTemplateId;
private ArrayList<CCDAUDI> udi;
private CCDACode deviceCode;
private CCDAII scopingEntityId;
private ArrayList<CCDANotesActivity> notesActivity;
private CCDAAuthor author;
public void getAllNotesActivities(HashMap<String, CCDANotesActivity> results) {
if(notesActivity != null && notesActivity.size() > 0) {
log.info(" Found non-null notes activity ");
ParserUtilities.populateNotesActiviteis(notesActivity, results);
}
}
public static void compareProcedures(HashMap<String, CCDAProcActProc> refProcs,
HashMap<String, CCDAProcActProc> subProcs, ArrayList<ContentValidationResult> results) {
log.info(" Start Procedure Acts ");
// For each Vital Sign Observation in the Ref Model, check if it is present in the subCCDA Model.
for(Map.Entry<String, CCDAProcActProc> ent: refProcs.entrySet()) {
if(subProcs.containsKey(ent.getKey())) {
log.info("Comparing Procedure Acts ");
String context = "Procedure Act Procedure Entry corresponding to the code " + ent.getKey();
subProcs.get(ent.getKey()).compare(ent.getValue(), results, context);
} else {
// Error
String error = "The scenario contains Procedure Activity Procedure data with code " + ent.getKey() +
" , however there is no matching data in the submitted CCDA. ";
ContentValidationResult rs = new ContentValidationResult(error, ContentValidationResultLevel.ERROR, "/ClinicalDocument", "0" );
results.add(rs);
}
}
// Handle the case where the Vital Signs data is not present in the reference,
if( (refProcs == null || refProcs.size() == 0) && (subProcs != null && subProcs.size() > 0) ) {
// Error
String error = "The scenario does not require Procedure data " +
" , however there is Procedure data in the submitted CCDA. ";
ContentValidationResult rs = new ContentValidationResult(error, ContentValidationResultLevel.ERROR, "/ClinicalDocument", "0" );
results.add(rs);
}
}
public void compare(CCDAProcActProc refProc, ArrayList<ContentValidationResult> results , String context) {
log.info("Comparing Procedure Activity Procedure ");
// Handle Template Ids
// ParserUtilities.compareTemplateIds(refProc.getSectionTemplateId(), sectionTemplateId, results, context);
// Compare Effective Times
String elementNameTime = "Effective Time for " + context;
//ParserUtilities.compareEffectiveTime(refResult.getMeasurementTime(), measurementTime, results, elementNameTime);
// Compare Lab Codes
String elementNameVal = "Procedure Act Procedure code element for " + context;
ParserUtilities.compareCode(refProc.getProcCode(), procCode, results, elementNameVal);
String statusCodeElem = "Procedure Act Procedure Status code element for " + context;
ParserUtilities.justCompareCode(refProc.getProcStatus(), procStatus, results, statusCodeElem);
}
public void log() {
if(procCode != null)
log.info(" Procedure Code = " + procCode.getCode());
if(procStatus != null)
log.info(" Procedure status = " + procStatus.getCode());
for(int j = 0; j < sectionTemplateId.size(); j++) {
log.info(" Tempalte Id [" + j + "] = " + sectionTemplateId.get(j).getRootValue());
log.info(" Tempalte Id Ext [" + j + "] = " + sectionTemplateId.get(j).getExtValue());
}
if(targetSiteCode != null)
log.info(" Target Site Code = " + targetSiteCode.getCode());
if(deviceCode != null)
log.info(" Device Code = " + deviceCode.getCode());
if(piTemplateId != null)
log.info(" Tempalte Id = " + piTemplateId.getRootValue());
if(scopingEntityId != null)
log.info(" Scoping Entity Id = " + scopingEntityId.getRootValue());
for(int k = 0; k < performer.size(); k++) {
performer.get(k).log();
}
for(int l = 0; l < sdLocs.size(); l++) {
sdLocs.get(l).log();
}
for(int m = 0; m < udi.size(); m++) {
udi.get(m).log();
}
for(int n = 0; n < notesActivity.size(); n++) {
notesActivity.get(n).log();
}
if(author != null)
author.log();
}
public CCDAProcActProc()
{
sectionTemplateId = new ArrayList<CCDAII>();
performer = new ArrayList<CCDAAssignedEntity>();
sdLocs = new ArrayList<CCDAServiceDeliveryLoc>();
udi = new ArrayList<CCDAUDI>();
notesActivity = new ArrayList<CCDANotesActivity>();
}
public ArrayList<CCDAII> getSectionTemplateId() {
return sectionTemplateId;
}
public void setSectionTemplateId(ArrayList<CCDAII> ids) {
if(ids != null)
this.sectionTemplateId = ids;
}
public CCDACode getProcCode() {
return procCode;
}
public void setProcCode(CCDACode procCode) {
this.procCode = procCode;
}
public CCDACode getProcStatus() {
return procStatus;
}
public void setProcStatus(CCDACode procStatus) {
this.procStatus = procStatus;
}
public CCDACode getTargetSiteCode() {
return targetSiteCode;
}
public void setTargetSiteCode(CCDACode targetSiteCode) {
this.targetSiteCode = targetSiteCode;
}
public ArrayList<CCDAAssignedEntity> getPerformer() {
return performer;
}
public void setPerformer(ArrayList<CCDAAssignedEntity> ps) {
if(ps != null)
this.performer = ps;
}
public ArrayList<CCDAServiceDeliveryLoc> getSdLocs() {
return sdLocs;
}
public void setSdLocs(ArrayList<CCDAServiceDeliveryLoc> sdl) {
if(sdl != null)
this.sdLocs = sdl;
}
public CCDAII getPiTemplateId() {
return piTemplateId;
}
public void setPiTemplateId(CCDAII piTemplateId) {
this.piTemplateId = piTemplateId;
}
public ArrayList<CCDAUDI> getUdi() {
return udi;
}
public void setUdi(ArrayList<CCDAUDI> udis) {
if(udis != null)
this.udi = udis;
}
public CCDACode getDeviceCode() {
return deviceCode;
}
public void setDeviceCode(CCDACode deviceCode) {
this.deviceCode = deviceCode;
}
public CCDAII getScopingEntityId() {
return scopingEntityId;
}
public void setScopingEntityId(CCDAII scopingEntityId) {
this.scopingEntityId = scopingEntityId;
}
public void setPatientUDI(ArrayList<CCDAUDI> udis) {
if(udis != null)
this.udi = udis;
}
public ArrayList<CCDANotesActivity> getNotesActivity() {
return notesActivity;
}
public void setNotesActivity(ArrayList<CCDANotesActivity> notesActivity) {
this.notesActivity = notesActivity;
}
public CCDAAuthor getAuthor() {
return author;
}
public void setAuthor(CCDAAuthor author) {
this.author = author;
}
}
|
hsiangawang/whois
|
whois-nrtm/src/test/java/net/ripe/db/whois/nrtm/NrtmAclLimitHandlerTest.java
|
package net.ripe.db.whois.nrtm;
import net.ripe.db.whois.query.QueryMessages;
import net.ripe.db.whois.query.acl.AccessControlListManager;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelEvent;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelState;
import org.jboss.netty.channel.UpstreamChannelStateEvent;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.net.Inet4Address;
import java.net.InetSocketAddress;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class NrtmAclLimitHandlerTest {
private static final int MAX_CONNECTIONS_PER_IP = 1;
@Mock private ChannelHandlerContext ctx;
@Mock private Channel channel;
@Mock private ChannelFuture channelFuture;
@Mock private NrtmLog nrtmLog;
@Mock private AccessControlListManager accessControlListManager;
private NrtmAclLimitHandler subject;
@Before
public void setUp() {
this.subject = new NrtmAclLimitHandler(accessControlListManager, nrtmLog);
when(ctx.getChannel()).thenReturn(channel);
when(channel.write(anyObject())).thenReturn(channelFuture);
}
@Test
public void acl_permanently_blocked() throws Exception {
final InetSocketAddress remoteAddress = new InetSocketAddress("10.0.0.0", 43);
when(channel.getRemoteAddress()).thenReturn(remoteAddress);
when(accessControlListManager.isDenied(remoteAddress.getAddress())).thenReturn(true);
final ChannelEvent event = new UpstreamChannelStateEvent(channel, ChannelState.OPEN, Boolean.TRUE);
subject.handleUpstream(ctx, event);
verify(channel, times(1)).write(eq(QueryMessages.accessDeniedPermanently(remoteAddress.getAddress())));
verify(channelFuture, times(1)).addListener(ChannelFutureListener.CLOSE);
verify(nrtmLog).log(Inet4Address.getByName("10.0.0.0"), "REJECTED");
}
@Test
public void acl_temporarily_blocked() throws Exception {
final InetSocketAddress remoteAddress = new InetSocketAddress("10.0.0.0", 43);
when(channel.getRemoteAddress()).thenReturn(remoteAddress);
when(accessControlListManager.isDenied(remoteAddress.getAddress())).thenReturn(false);
when(accessControlListManager.canQueryPersonalObjects(remoteAddress.getAddress())).thenReturn(false);
final ChannelEvent event = new UpstreamChannelStateEvent(channel, ChannelState.OPEN, Boolean.TRUE);
subject.handleUpstream(ctx, event);
verify(channel, times(1)).write(eq(QueryMessages.accessDeniedTemporarily(remoteAddress.getAddress())));
verify(channelFuture, times(1)).addListener(ChannelFutureListener.CLOSE);
verify(nrtmLog).log(Inet4Address.getByName("10.0.0.0"), "REJECTED");
}
@Test
public void acl_limit_not_breached() throws Exception {
final InetSocketAddress remoteAddress = new InetSocketAddress("10.0.0.0", 43);
when(channel.getRemoteAddress()).thenReturn(remoteAddress);
when(accessControlListManager.isDenied(remoteAddress.getAddress())).thenReturn(false);
when(accessControlListManager.canQueryPersonalObjects(remoteAddress.getAddress())).thenReturn(true);
final ChannelEvent event = new UpstreamChannelStateEvent(channel, ChannelState.OPEN, Boolean.TRUE);
subject.handleUpstream(ctx, event);
verify(ctx, times(1)).sendUpstream(event);
verify(channel, never()).close();
verify(channel, never()).write(anyObject());
verify(channelFuture, never()).addListener(ChannelFutureListener.CLOSE);
}
}
|
rocious/omaha
|
third_party/lzma/v4_65/files/CPP/7zip/Archive/Common/InStreamWithCRC.h
|
<reponame>rocious/omaha<filename>third_party/lzma/v4_65/files/CPP/7zip/Archive/Common/InStreamWithCRC.h
// InStreamWithCRC.h
#ifndef __INSTREAMWITHCRC_H
#define __INSTREAMWITHCRC_H
#include "../../../Common/MyCom.h"
#include "../../IStream.h"
extern "C"
{
#include "../../../../C/7zCrc.h"
}
class CSequentialInStreamWithCRC:
public ISequentialInStream,
public CMyUnknownImp
{
public:
MY_UNKNOWN_IMP
STDMETHOD(Read)(void *data, UInt32 size, UInt32 *processedSize);
private:
CMyComPtr<ISequentialInStream> _stream;
UInt64 _size;
UInt32 _crc;
bool _wasFinished;
public:
void SetStream(ISequentialInStream *stream) { _stream = stream; }
void Init()
{
_size = 0;
_wasFinished = false;
_crc = CRC_INIT_VAL;
}
void ReleaseStream() { _stream.Release(); }
UInt32 GetCRC() const { return CRC_GET_DIGEST(_crc); }
UInt64 GetSize() const { return _size; }
bool WasFinished() const { return _wasFinished; }
};
class CInStreamWithCRC:
public IInStream,
public CMyUnknownImp
{
public:
MY_UNKNOWN_IMP1(IInStream)
STDMETHOD(Read)(void *data, UInt32 size, UInt32 *processedSize);
STDMETHOD(Seek)(Int64 offset, UInt32 seekOrigin, UInt64 *newPosition);
private:
CMyComPtr<IInStream> _stream;
UInt64 _size;
UInt32 _crc;
bool _wasFinished;
public:
void SetStream(IInStream *stream) { _stream = stream; }
void Init()
{
_size = 0;
_wasFinished = false;
_crc = CRC_INIT_VAL;
}
void ReleaseStream() { _stream.Release(); }
UInt32 GetCRC() const { return CRC_GET_DIGEST(_crc); }
UInt64 GetSize() const { return _size; }
bool WasFinished() const { return _wasFinished; }
};
#endif
|
voleking/ICPC
|
references/aoapc-book/BeginningAlgorithmContests/exercises/ch5/5.5.2/UVaOJ/457.cpp
|
#include<iostream>
#include<cstring>
using namespace std;
int main()
{
int n;
int dna[10];
int cell[42];
int tom[42];
cin >> n;
for(int i=1;i<=n;i++)
{
memset(dna,0,sizeof(dna));
memset(cell,0,sizeof(cell));
memset(tom,0,sizeof(tom));
cell[20]=1;
tom[20]=1;
for(int j=0;j<10;j++) cin >> dna[j];
for(int day=1;day<=50;day++){
for(int i=1;i<=40;i++) {
cell[i]=tom[i];
switch(tom[i]){
case 0:cout << " "; break;
case 1:cout << "."; break;
case 2:cout << "x"; break;
case 3:cout << "W"; break;
}
}
cout << endl;
for(int i=1;i<=40;i++) tom[i]=dna[cell[i-1]+cell[i]+cell[i+1]];
}
if(i<n) cout << endl;
}
return 0;
}
/*
sample input:
1
0 1 2 0 1 3 3 2 3 0
sample output:
(false answer intended to help you read:)
bbbbbbbbbbbbbbbbbbb.bbbbbbbbbbbbbbbbbbbb
bbbbbbbbbbbbbbbbbb...bbbbbbbbbbbbbbbbbbb
bbbbbbbbbbbbbbbbb.xbx.bbbbbbbbbbbbbbbbbb
bbbbbbbbbbbbbbbb.bb.bb.bbbbbbbbbbbbbbbbb
bbbbbbbbbbbbbbb.........bbbbbbbbbbbbbbbb
bbbbbbbbbbbbbb.xbbbbbbbx.bbbbbbbbbbbbbbb
bbbbbbbbbbbbb.bbxbbbbbxbb.bbbbbbbbbbbbbb
bbbbbbbbbbbb...xxxbbbxxx...bbbbbbbbbbbbb
bbbbbbbbbbb.xb.WW.xbx.WW.bx.bbbbbbbbbbbb
bbbbbbbbbb.bbb.xxWb.bWxx.bbb.bbbbbbbbbbb
bbbbbbbbb...b.bWxW...WxWb.b...bbbbbbbbbb
bbbbbbbb.xbxx..WWWWbWWWW..xxbx.bbbbbbbbb
bbbbbbb.bb..W.WxbbWWWbbxW.W..bb.bbbbbbbb
bbbbbb....xWWxWWxbWbWbxWWxWWx....bbbbbbb
bbbbb.xbb.WWWWWWWWbWbWWWWWWWW.bbx.bbbbbb
bbbb.bbx..xbbbbbbWWbWWbbbbbbx..xbb.bbbbb
bbb...xb..bxbbbbbWWWWWbbbbbxb..bx...bbbb
bb.xb.bbxxbxxbbbbWbbbWbbbbxxbxxbb.bx.bbb
b.bbb..x.....xbbbbbbbbbbbx.....x..bbb.bb
...b.x...bbb.bxbbbbbbbbbxb.bbb...x.b...b
xbxxb..bx.b..bxxbbbbbbbxxb..b.xb..bxxbx.
x...bxxbbbxxxb..xbbbbbx..bxxxbbbxxb...bb
b.bxb..xbx.W.bx.bxbbbxb.xb.W.xbx..bxbx.b
..bxbx.b.bWW.bbbbxxbxxbbbb.WWb.b.xbx.bb.
xxbx.bbx..Wx..bbx.....xbb..xW..xbb.bb...
...bb.xb.WWW.x.xb.bbb.bx.x.WWW.bx....xbx
xbx..bbb.xbxW.Wbb..b..bbW.Wxbx.bb.bb.b.x
x.b.x.b.bb.WWx.b.xxxxx.b.xWW.bb......xbb
bbxb.bx....xWWbxbWWWWWbxbWWx...xbbbb.bxb
bxxb.bb.bb.WWWWxWWbbbWWxWWWW.b.bxbb..bxx
x..b.......xbbWWWWbbbWWWWbbx.x.bxx.xxb..
b.xxxbbbbb.bxbWbbWbbbWbbWbxbW.bb.WWW.bxx
.bWW.xbbb..bxWbbbbbbbbbbbWxW.....xbx.b..
..WxWbxb.xxbWWbbbbbbbbbbbWWWWbbb.b.bbxxx
xWWWWWxbbW.WWWbbbbbbbbbbbWbbWbb..x..x.W.
WWbbbWWxb.xxbWbbbbbbbbbbbbbbbb.x.....WW.
WWbbbWWWbbW.Wbbbbbbbbbbbbbbbb.b..bbbWxx.
WWbbbWbWbb.x.bbbbbbbbbbbbbbb..xxx.bbWxWb
WWbbbbWbb.b.b.bbbbbbbbbbbbb.x.WWWb.bWWWb
WWbbbbbb..x.x..bbbbbbbbbbb.b.WxbW...WbWb
WWbbbbb.x..W..x.bbbbbbbbb..x.WWW.WbW.Wbb
WWbbbb.b..WWW..b.bbbbbbb.x..Wxbxx.W.x.bb
WWbbb..xxWxbxWxx..bbbbb.b..WWW..WWWW.b.b
WWbb.x.WxxW.WxxW.x.bbb..xxWxbxWWxbbx.x..
WWb.b.WWxxWxWxxWW.b.b.x.WxxW.WWWWxxbW..x
WW..x.xWxxxWxxxWx.x.xb.WWxxWxxbbWx.W.W.b
WxW..WWxxWxxxWxxWW.Wbb.xWxxxx.xbWWWWxW..
WWWWWxWxxxxWxxxxWxx.b.bWxxWWWWbWWbbWWWWx
WbbbWWxxWWxxxWWxxxWbx..WxxWbbWWWWbbWbbWW
bbbbWWxxWWxWxWWxWxWWb.WWxxWbbWbbWbbbbbWW
' ' instead of 'b'(real answer):
.
...
.x x.
. . .
.........
.x x.
. x x .
...xxx xxx...
.x .WW.x x.WW. x.
. .xxW . Wxx. .
... . WxW...WxW . ...
.x xx..WWWW WWWW..xx x.
. ..W.Wx WWW xW.W.. .
....xWWxWWx W W xWWxWWx....
.x .WWWWWWWW W WWWWWWWW. x.
. x..x WW WW x..x .
...x .. x WWWWW x .. x...
.x . xx xx W W xx xx . x.
. ..x.....x x.....x.. .
... .x... . x x . ...x. ...
x xx .. x. .. xx xx .. .x .. xx x.
x... xx xxx ..x x.. xxx xx ...
. x ..x x.W. x. x x .x .W.x x.. x x.
.. x x. . WW. xx xx .WW . .x x. .
xx x. x..Wx.. x.....x ..xW..x . ...
... .x .WWW.x.x . . x.x.WWW. x....x x
x x.. .x xW.W .. .. W.Wx x. . . .x
x. .x. . .WWx. .xxxxx. .xWW. ......x
x . x....xWW x WWWWW x WWx...x . x
xx . . .WWWWxWW WWxWWWW. . x .. xx
x.. .......x WWWW WWWW x.x. xx.xx ..
.xxx . x W W W W x W. .WWW. xx
. WW.x .. xW WxW.....x x. ..
..WxW x .xx WW WWWW . . xxx
xWWWWWx W.WWW W W ..x..x.W.
WW WWx .xx W .x.....WW.
WW WWW W.W . .. Wxx.
WW W W .x. ..xxx. WxW
WW W . . . .x.WWW . WWW
WW ..x.x.. . .Wx W...W W
WW .x..W..x. ..x.WWW.W W.W
WW . ..WWW.. . .x..Wx xx.W.x.
WW ..xxWx xWxx.. . ..WWW..WWWW. .
WW .x.WxxW.WxxW.x. ..xxWx xWWx x.x..
WW . .WWxxWxWxxWW. . .x.WxxW.WWWWxx W..x
WW..x.xWxxxWxxxWx.x.x .WWxxWxx Wx.W.W.
WxW..WWxxWxxxWxxWW.W .xWxxxx.x WWWWxW..
WWWWWxWxxxxWxxxxWxx. . WxxWWWW WW WWWWx
W WWxxWWxxxWWxxxW x..WxxW WWWW W WW
WWxxWWxWxWWxWxWW .WWxxW W W WW
*/
|
techthiyanes/malaya-speech
|
malaya_speech/model/classification.py
|
import numpy as np
from malaya_speech.model.frame import Frame
from malaya_speech.utils.padding import (
sequence_nd as padding_sequence_nd,
sequence_1d,
)
from malaya_speech.utils.activation import softmax
from malaya_speech.model.abstract import Abstract
class Speakernet(Abstract):
def __init__(
self,
input_nodes,
output_nodes,
vectorizer,
sess,
model,
extra,
label,
name,
):
self._input_nodes = input_nodes
self._output_nodes = output_nodes
self._vectorizer = vectorizer
self._sess = sess
self._extra = extra
self.labels = label
self.__model__ = model
self.__name__ = name
def vectorize(self, inputs):
"""
Vectorize inputs.
Parameters
----------
inputs: List[np.array]
List[np.array] or List[malaya_speech.model.frame.Frame].
Returns
-------
result: np.array
returned [B, D].
"""
inputs = [
input.array if isinstance(input, Frame) else input
for input in inputs
]
inputs = [self._vectorizer(input) for input in inputs]
inputs, lengths = padding_sequence_nd(
inputs, dim=0, return_len=True
)
r = self._execute(
inputs=[inputs, lengths],
input_labels=['Placeholder', 'Placeholder_1'],
output_labels=['logits'],
)
return r['logits']
def __call__(self, inputs):
return self.vectorize(inputs)
class Speaker2Vec(Abstract):
def __init__(
self,
input_nodes,
output_nodes,
vectorizer,
sess,
model,
extra,
label,
name,
):
self._input_nodes = input_nodes
self._output_nodes = output_nodes
self._vectorizer = vectorizer
self._sess = sess
self._extra = extra
self.labels = label
self.__model__ = model
self.__name__ = name
def vectorize(self, inputs):
"""
Vectorize inputs.
Parameters
----------
inputs: List[np.array]
List[np.array] or List[malaya_speech.model.frame.Frame].
Returns
-------
result: np.array
returned [B, D].
"""
inputs = [
input.array if isinstance(input, Frame) else input
for input in inputs
]
inputs = [self._vectorizer(input, **self._extra) for input in inputs]
if self.__model__ == 'deep-speaker':
dim = 0
else:
dim = 1
inputs = padding_sequence_nd(inputs, dim=dim)
inputs = np.expand_dims(inputs, -1)
r = self._execute(
inputs=[inputs],
input_labels=['Placeholder'],
output_labels=['logits'],
)
return r['logits']
def __call__(self, inputs):
return self.vectorize(inputs)
class SpeakernetClassification(Abstract):
def __init__(
self,
input_nodes,
output_nodes,
vectorizer,
sess,
model,
extra,
label,
name,
):
self._input_nodes = input_nodes
self._output_nodes = output_nodes
self._vectorizer = vectorizer
self._sess = sess
self._extra = extra
self.labels = label
self.__model__ = model
self.__name__ = name
def predict_proba(self, inputs):
"""
Predict inputs, will return probability.
Parameters
----------
inputs: List[np.array]
List[np.array] or List[malaya_speech.model.frame.Frame].
Returns
-------
result: np.array
returned [B, D].
"""
inputs = [
input.array if isinstance(input, Frame) else input
for input in inputs
]
inputs = [self._vectorizer(input) for input in inputs]
inputs, lengths = padding_sequence_nd(
inputs, dim=0, return_len=True
)
r = self._execute(
inputs=[inputs, lengths],
input_labels=['Placeholder', 'Placeholder_1'],
output_labels=['logits'],
)
return softmax(r['logits'], axis=-1)
def predict(self, inputs):
"""
Predict inputs, will return labels.
Parameters
----------
inputs: List[np.array]
List[np.array] or List[malaya_speech.model.frame.Frame].
Returns
-------
result: List[str]
returned [B].
"""
probs = np.argmax(self.predict_proba(inputs), axis=1)
return [self.labels[p] for p in probs]
def __call__(self, input):
"""
Predict input, will return label.
Parameters
----------
inputs: np.array
np.array or malaya_speech.model.frame.Frame.
Returns
-------
result: str
"""
return self.predict([input])[0]
class Classification(Abstract):
def __init__(
self,
input_nodes,
output_nodes,
vectorizer,
sess,
model,
extra,
label,
name,
):
self._input_nodes = input_nodes
self._output_nodes = output_nodes
self._vectorizer = vectorizer
self._sess = sess
self._extra = extra
self.labels = label
self.__model__ = model
self.__name__ = name
def predict_proba(self, inputs):
"""
Predict inputs, will return probability.
Parameters
----------
inputs: List[np.array]
List[np.array] or List[malaya_speech.model.frame.Frame].
Returns
-------
result: np.array
returned [B, D].
"""
inputs = [
input.array if isinstance(input, Frame) else input
for input in inputs
]
inputs = [self._vectorizer(input, **self._extra) for input in inputs]
if self.__model__ == 'deep-speaker':
dim = 0
else:
dim = 1
inputs = padding_sequence_nd(inputs, dim=dim)
inputs = np.expand_dims(inputs, -1)
r = self._execute(
inputs=[inputs],
input_labels=['Placeholder'],
output_labels=['logits'],
)
return softmax(r['logits'], axis=-1)
def predict(self, inputs):
"""
Predict inputs, will return labels.
Parameters
----------
inputs: List[np.array]
List[np.array] or List[malaya_speech.model.frame.Frame].
Returns
-------
result: List[str]
returned [B].
"""
probs = np.argmax(self.predict_proba(inputs), axis=1)
return [self.labels[p] for p in probs]
def __call__(self, input):
"""
Predict input, will return label.
Parameters
----------
inputs: np.array
np.array or malaya_speech.model.frame.Frame.
Returns
-------
result: str
"""
return self.predict([input])[0]
class MarbleNetClassification(Abstract):
def __init__(
self,
input_nodes,
output_nodes,
vectorizer,
sess,
model,
extra,
label,
name,
):
self._input_nodes = input_nodes
self._output_nodes = output_nodes
self._vectorizer = vectorizer
self._sess = sess
self._extra = extra
self.labels = label
self.__model__ = model
self.__name__ = name
def predict_proba(self, inputs):
"""
Predict inputs, will return probability.
Parameters
----------
inputs: List[np.array]
List[np.array] or List[malaya_speech.model.frame.Frame].
Returns
-------
result: np.array
returned [B, D].
"""
inputs = [
input.array if isinstance(input, Frame) else input
for input in inputs
]
inputs, lengths = sequence_1d(
inputs, return_len=True
)
r = self._execute(
inputs=[inputs, lengths],
input_labels=['X_placeholder', 'X_len_placeholder'],
output_labels=['logits'],
)
return softmax(r['logits'], axis=-1)
def predict(self, inputs):
"""
Predict inputs, will return labels.
Parameters
----------
inputs: List[np.array]
List[np.array] or List[malaya_speech.model.frame.Frame].
Returns
-------
result: List[str]
returned [B].
"""
probs = np.argmax(self.predict_proba(inputs), axis=1)
return [self.labels[p] for p in probs]
def __call__(self, input):
"""
Predict input, will return label.
Parameters
----------
inputs: np.array
np.array or malaya_speech.model.frame.Frame.
Returns
-------
result: str
"""
return self.predict([input])[0]
|
EditApp/TaggingApp
|
out/shared/Component/Component.js
|
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
}
Object.defineProperty(exports, "__esModule", { value: true });
var react_1 = __importDefault(require("react"));
var styled_components_1 = __importDefault(require("styled-components"));
exports.ComponentStyled = styled_components_1.default.section < ComponentStyles > "\n \n";
var Component = function (_a) {
var children = _a.children, className = _a.className;
return react_1.default.createElement(exports.ComponentStyled, { className: "component " + className }, children);
};
exports.default = Component;
|
PNBenfica/Tipsters
|
src/js/components/sports/tipsOnThisEvent/ChatHeader.js
|
import React from "react";
export default class ChatHeader extends React.Component {
render() {
return (
<div class="panel-heading">
<h3 class="panel-title"><i class="fa fa-comments fa-fw"></i> Tips on this event</h3>
</div>
);
}
}
|
CDFN/SpongeAPI
|
src/main/java/org/spongepowered/api/scheduler/ScheduledUpdateList.java
|
<filename>src/main/java/org/spongepowered/api/scheduler/ScheduledUpdateList.java
/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.scheduler;
import org.spongepowered.api.block.BlockType;
import org.spongepowered.api.fluid.FluidType;
import org.spongepowered.api.registry.DefaultedRegistryReference;
import org.spongepowered.math.vector.Vector3i;
import java.time.Duration;
import java.time.temporal.TemporalUnit;
import java.util.Collection;
/**
* A tick based priority scheduled list targeting speicifc types of
* objects that need to be ticked. In common cases, there's either
* a {@link BlockType} or {@link FluidType} being ticked.
* @param <T> The type of update objects that are being scheduled
*/
public interface ScheduledUpdateList<T> {
/**
* Schedules a new update at the desired position with the provided delay.
*
* @param pos The position
* @param target The target
* @param delay The delay
* @param temporalUnit The unit of the delay
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(Vector3i pos, T target, int delay, TemporalUnit temporalUnit) {
return this.schedule(pos.getX(), pos.getY(), pos.getZ(), target, delay, temporalUnit, TaskPriorities.NORMAL.get());
}
/**
* Schedules a new update with the given {@code T object} for a desired {@link Duration}
* @param pos The position
* @param target The target
* @param delay The delay with a duration
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(Vector3i pos, T target, Duration delay) {
return this.schedule(pos.getX(), pos.getY(), pos.getZ(), target, delay, TaskPriorities.NORMAL);
}
/**
* Schedules a new update for the desired target
* @param x The x coordinate
* @param y The y coordinate
* @param z The z coordinate
* @param target The target object
* @param delay The delay
* @param temporalUnit The unit of time
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(int x, int y, int z, T target, int delay, TemporalUnit temporalUnit) {
return this.schedule(x, y, z, target, delay, temporalUnit, TaskPriorities.NORMAL.get());
}
/**
*
* @param x The x coordinate
* @param y The y coordinate
* @param z The z coordinate
* @param target The target
* @param delay The delay
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(int x, int y, int z, T target, Duration delay) {
return this.schedule(x, y, z, target, delay, TaskPriorities.NORMAL);
}
/**
*
* @param pos The position
* @param target The target
* @param delay The delay
* @param temporalUnit The unit of time
* @param priority The priority of the scheduled update
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(Vector3i pos, T target, int delay, TemporalUnit temporalUnit, TaskPriority priority) {
return this.schedule(pos.getX(), pos.getY(), pos.getZ(), target, Duration.of(delay, temporalUnit), priority);
}
/**
*
* @param pos The position
* @param target The target
* @param delay The delay
* @param temporalUnit The unit of time
* @param priority The priority of the scheduled update
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(Vector3i pos, T target, int delay, TemporalUnit temporalUnit, DefaultedRegistryReference<? extends TaskPriority> priority) {
return this.schedule(pos.getX(), pos.getY(), pos.getZ(), target, Duration.of(delay, temporalUnit), priority.get());
}
/**
*
* @param pos The position
* @param target The target
* @param delay The delay
* @param priority The priority of the scheduled update
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(Vector3i pos, T target, Duration delay, TaskPriority priority) {
return this.schedule(pos.getX(), pos.getY(), pos.getZ(), target, delay, priority);
}
/**
*
* @param pos The position
* @param target The target
* @param delay The delay
* @param priority The priority of the scheduled update
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(Vector3i pos, T target, Duration delay, DefaultedRegistryReference<? extends TaskPriority> priority) {
return this.schedule(pos.getX(), pos.getY(), pos.getZ(), target, delay, priority.get());
}
/**
*
* @param x The x coordinate
* @param y The y coordinate
* @param z The z coordinate
* @param target The target
* @param delay The delay
* @param temporalUnit The unit of time
* @param priority The priority of the scheduled update
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(int x, int y, int z, T target, int delay, TemporalUnit temporalUnit, TaskPriority priority) {
return this.schedule(x, y, z, target, Duration.of(delay, temporalUnit), priority);
}
/**
*
* @param x The x coordinate
* @param y The y coordinate
* @param z The z coordinate
* @param target The target
* @param delay The delay
* @param temporalUnit The unit of time
* @param priority The priority of the scheduled update
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(int x, int y, int z, T target, int delay, TemporalUnit temporalUnit, DefaultedRegistryReference<? extends TaskPriority> priority) {
return this.schedule(x, y, z, target, Duration.of(delay, temporalUnit), priority.get());
}
/**
*
* @param x The x coordinate
* @param y The y coordinate
* @param z The z coordinate
* @param target The target
* @param delay The delay
* @param priority The priority of the scheduled update
* @return The scheduled update
*/
ScheduledUpdate<T> schedule(int x, int y, int z, T target, Duration delay, TaskPriority priority);
/**
*
* @param x The x coordinate
* @param y The y coordinate
* @param z The z coordinate
* @param target The target
* @param delay The delay
* @param priority The priority of the scheduled update
* @return The scheduled update
*/
default ScheduledUpdate<T> schedule(int x, int y, int z, T target, Duration delay, DefaultedRegistryReference<? extends TaskPriority> priority) {
return this.schedule(x, y, z, target, delay, priority.get());
}
/**
* Gets whether there's a scheduled update at the desired position with the provided target.
*
* @param pos The position
* @param target The target
* @return True if there's an update scheduled
*/
default boolean isScheduled(Vector3i pos, T target) {
return this.isScheduled(pos.getX(), pos.getY(), pos.getZ(), target);
}
/**
*
* @param x The x coordinate
* @param y The y coordinate
* @param z The z coordinate
* @param target The target
* @return True if there's an update scheduled
*/
boolean isScheduled(int x, int y, int z, T target);
/**
*
* @param pos The position
* @return The collection of scheduled updates at the desired position
*/
default Collection<? extends ScheduledUpdate<T>> getScheduledAt(Vector3i pos) {
return this.getScheduledAt(pos.getX(), pos.getY(), pos.getZ());
}
/**
* Gets a collection of scheduled updates at the desired position. There is
* no guarantee
* @param x The x coordinate
* @param y The y coordinate
* @param z The z coordinate
* @return The collection of scheduled updates at the desired position
*/
Collection<? extends ScheduledUpdate<T>> getScheduledAt(int x, int y, int z);
}
|
duchoang/aws-scala
|
core/src/test/scala/io/atlassian/aws/spec/ScalazProperties.scala
|
package io.atlassian.aws.spec
import org.scalacheck.Prop.forAll
import org.scalacheck.{ Arbitrary, Cogen, Properties }
import scalaz._
/**
* Scalacheck properties that should hold for instances of type classes defined in Scalaz Core.
* Taken from scalaz-scalacheck-binding which currently is behind on scalacheck version.
*/
object ScalazProperties {
private def newProperties(name: String)(f: Properties => Unit): Properties = {
val p = new Properties(name)
f(p)
p
}
object equal {
def commutativity[A](implicit A: Equal[A], arb: Arbitrary[A]) = forAll(A.equalLaw.commutative _)
def reflexive[A](implicit A: Equal[A], arb: Arbitrary[A]) = forAll(A.equalLaw.reflexive _)
def transitive[A](implicit A: Equal[A], arb: Arbitrary[A]) = forAll(A.equalLaw.transitive _)
def naturality[A](implicit A: Equal[A], arb: Arbitrary[A]) = forAll(A.equalLaw.naturality _)
def laws[A](implicit A: Equal[A], arb: Arbitrary[A]): Properties =
newProperties("equal") { p =>
p.property("commutativity") = commutativity[A]
p.property("reflexive") = reflexive[A]
p.property("transitive") = transitive[A]
p.property("naturality") = naturality[A]
()
}
}
object semigroup {
def associative[A](implicit A: Semigroup[A], eqa: Equal[A], arb: Arbitrary[A]) = forAll(A.semigroupLaw.associative _)
def laws[A](implicit A: Semigroup[A], eqa: Equal[A], arb: Arbitrary[A]): Properties =
newProperties("semigroup") { p =>
p.property("associative") = associative[A]
()
}
}
object monoid {
def leftIdentity[A](implicit A: Monoid[A], eqa: Equal[A], arb: Arbitrary[A]) = forAll(A.monoidLaw.leftIdentity _)
def rightIdentity[A](implicit A: Monoid[A], eqa: Equal[A], arb: Arbitrary[A]) = forAll(A.monoidLaw.rightIdentity _)
def laws[A](implicit A: Monoid[A], eqa: Equal[A], arb: Arbitrary[A]): Properties =
newProperties("monoid") { p =>
p.include(semigroup.laws[A])
p.property("left identity") = leftIdentity[A]
p.property("right identity") = rightIdentity[A]
()
}
}
object plus {
def associative[F[_], X](implicit f: Plus[F], afx: Arbitrary[F[X]], ef: Equal[F[X]]) =
forAll(f.plusLaw.associative[X] _)
def laws[F[_]](implicit F: Plus[F], afx: Arbitrary[F[Int]], ef: Equal[F[Int]]): Properties =
newProperties("plus") { p =>
p.include(semigroup.laws[F[Int]](F.semigroup[Int], implicitly, implicitly))
p.property("associative") = associative[F, Int]
()
}
}
object plusEmpty {
def leftPlusIdentity[F[_], X](implicit f: PlusEmpty[F], afx: Arbitrary[F[X]], ef: Equal[F[X]]) =
forAll(f.plusEmptyLaw.leftPlusIdentity[X] _)
def rightPlusIdentity[F[_], X](implicit f: PlusEmpty[F], afx: Arbitrary[F[X]], ef: Equal[F[X]]) =
forAll(f.plusEmptyLaw.rightPlusIdentity[X] _)
def laws[F[_]](implicit F: PlusEmpty[F], afx: Arbitrary[F[Int]], af: Arbitrary[Int => Int], ef: Equal[F[Int]]): Properties =
newProperties("plusEmpty") { p =>
p.include(plus.laws[F])
p.include(monoid.laws[F[Int]](F.monoid[Int], implicitly, implicitly))
p.property("left plus identity") = leftPlusIdentity[F, Int]
p.property("right plus identity") = rightPlusIdentity[F, Int]
()
}
}
object invariantFunctor {
def identity[F[_], X](implicit F: InvariantFunctor[F], afx: Arbitrary[F[X]], ef: Equal[F[X]]) =
forAll(F.invariantFunctorLaw.invariantIdentity[X] _)
def composite[F[_], X, Y, Z](implicit F: InvariantFunctor[F], af: Arbitrary[F[X]], axy: Arbitrary[(X => Y)],
ayz: Arbitrary[(Y => Z)], ayx: Arbitrary[(Y => X)], azy: Arbitrary[(Z => Y)], ef: Equal[F[Z]]) =
forAll(F.invariantFunctorLaw.invariantComposite[X, Y, Z] _)
def laws[F[_]](implicit F: InvariantFunctor[F], af: Arbitrary[F[Int]], axy: Arbitrary[(Int => Int)],
ef: Equal[F[Int]]): Properties =
newProperties("invariantFunctor") { p =>
p.property("identity") = identity[F, Int]
p.property("composite") = composite[F, Int, Int, Int]
()
}
}
object functor {
def identity[F[_], X](implicit F: Functor[F], afx: Arbitrary[F[X]], ef: Equal[F[X]]) =
forAll(F.functorLaw.identity[X] _)
def composite[F[_], X, Y, Z](implicit F: Functor[F], af: Arbitrary[F[X]], axy: Arbitrary[(X => Y)],
ayz: Arbitrary[(Y => Z)], ef: Equal[F[Z]]) =
forAll(F.functorLaw.composite[X, Y, Z] _)
def laws[F[_]](implicit F: Functor[F], af: Arbitrary[F[Int]], axy: Arbitrary[(Int => Int)],
ef: Equal[F[Int]]): Properties =
newProperties("functor") { p =>
p.include(invariantFunctor.laws[F])
p.property("identity") = identity[F, Int]
p.property("composite") = composite[F, Int, Int, Int]
()
}
}
object apply { self =>
def composition[F[_], X, Y, Z](implicit ap: Apply[F], afx: Arbitrary[F[X]], au: Arbitrary[F[Y => Z]],
av: Arbitrary[F[X => Y]], e: Equal[F[Z]]) = forAll(ap.applyLaw.composition[X, Y, Z] _)
def laws[F[_]](implicit F: Apply[F], af: Arbitrary[F[Int]],
aff: Arbitrary[F[Int => Int]], e: Equal[F[Int]]): Properties =
newProperties("apply") { p =>
p.include(functor.laws[F])
p.property("composition") = self.composition[F, Int, Int, Int]
()
}
}
object applicative {
def identity[F[_], X](implicit f: Applicative[F], afx: Arbitrary[F[X]], ef: Equal[F[X]]) =
forAll(f.applicativeLaw.identityAp[X] _)
def homomorphism[F[_], X, Y](implicit ap: Applicative[F], ax: Arbitrary[X], af: Arbitrary[X => Y], e: Equal[F[Y]]) =
forAll(ap.applicativeLaw.homomorphism[X, Y] _)
def interchange[F[_], X, Y](implicit ap: Applicative[F], ax: Arbitrary[X], afx: Arbitrary[F[X => Y]], e: Equal[F[Y]]) =
forAll(ap.applicativeLaw.interchange[X, Y] _)
def mapApConsistency[F[_], X, Y](implicit ap: Applicative[F], ax: Arbitrary[F[X]], afx: Arbitrary[X => Y], e: Equal[F[Y]]) =
forAll(ap.applicativeLaw.mapLikeDerived[X, Y] _)
def laws[F[_]](implicit F: Applicative[F], af: Arbitrary[F[Int]],
aff: Arbitrary[F[Int => Int]], e: Equal[F[Int]]): Properties =
newProperties("applicative") { p =>
p.include(ScalazProperties.apply.laws[F])
p.property("identity") = applicative.identity[F, Int]
p.property("homomorphism") = applicative.homomorphism[F, Int, Int]
p.property("interchange") = applicative.interchange[F, Int, Int]
p.property("map consistent with ap") = applicative.mapApConsistency[F, Int, Int]
()
}
}
object bind {
def associativity[M[_], X, Y, Z](implicit M: Bind[M], amx: Arbitrary[M[X]], af: Arbitrary[(X => M[Y])],
ag: Arbitrary[(Y => M[Z])], emz: Equal[M[Z]]) =
forAll(M.bindLaw.associativeBind[X, Y, Z] _)
def bindApConsistency[M[_], X, Y](implicit M: Bind[M], amx: Arbitrary[M[X]],
af: Arbitrary[M[X => Y]], emy: Equal[M[Y]]) =
forAll(M.bindLaw.apLikeDerived[X, Y] _)
def laws[M[_]](implicit a: Bind[M], am: Arbitrary[M[Int]],
af: Arbitrary[Int => M[Int]], ag: Arbitrary[M[Int => Int]], e: Equal[M[Int]]): Properties =
newProperties("bind") { p =>
p.include(ScalazProperties.apply.laws[M])
p.property("associativity") = bind.associativity[M, Int, Int, Int]
p.property("ap consistent with bind") = bind.bindApConsistency[M, Int, Int]
()
}
}
object monad {
def rightIdentity[M[_], X](implicit M: Monad[M], e: Equal[M[X]], a: Arbitrary[M[X]]) =
forAll(M.monadLaw.rightIdentity[X] _)
def leftIdentity[M[_], X, Y](implicit am: Monad[M], emy: Equal[M[Y]], ax: Arbitrary[X], af: Arbitrary[(X => M[Y])]) =
forAll(am.monadLaw.leftIdentity[X, Y] _)
def laws[M[_]](implicit a: Monad[M], am: Arbitrary[M[Int]],
af: Arbitrary[Int => M[Int]], ag: Arbitrary[M[Int => Int]], e: Equal[M[Int]]): Properties =
newProperties("monad") { p =>
p.include(applicative.laws[M])
p.include(bind.laws[M])
p.property("right identity") = monad.rightIdentity[M, Int]
p.property("left identity") = monad.leftIdentity[M, Int, Int]
()
}
}
object monadPlus {
def emptyMap[F[_], X](implicit f: MonadPlus[F], afx: Arbitrary[X => X], ef: Equal[F[X]]) =
forAll(f.monadPlusLaw.emptyMap[X] _)
def leftZero[F[_], X](implicit F: MonadPlus[F], afx: Arbitrary[X => F[X]], ef: Equal[F[X]]) =
forAll(F.monadPlusLaw.leftZero[X] _)
def rightZero[F[_], X](implicit F: MonadPlus[F], afx: Arbitrary[F[X]], ef: Equal[F[X]]) =
forAll(F.strongMonadPlusLaw.rightZero[X] _)
def laws[F[_]](implicit F: MonadPlus[F], afx: Arbitrary[F[Int]], afy: Arbitrary[F[Int => Int]], ef: Equal[F[Int]]): Properties =
newProperties("monad plus") { p =>
p.include(monad.laws[F])
p.include(plusEmpty.laws[F])
p.property("empty map") = emptyMap[F, Int]
p.property("left zero") = leftZero[F, Int]
()
}
def strongLaws[F[_]](implicit F: MonadPlus[F], afx: Arbitrary[F[Int]], afy: Arbitrary[F[Int => Int]], ef: Equal[F[Int]]) =
newProperties("monad plus") { p =>
p.include(laws[F])
p.property("right zero") = rightZero[F, Int]
()
}
}
object monadError {
def raisedErrorsHandled[F[_], E, A](implicit me: MonadError[F, E], eq: Equal[F[A]], ae: Arbitrary[E], afea: Arbitrary[E => F[A]]) =
forAll(me.monadErrorLaw.raisedErrorsHandled[A] _)
def errorsRaised[F[_], E, A](implicit me: MonadError[F, E], eq: Equal[F[A]], ae: Arbitrary[E], aa: Arbitrary[A]) =
forAll(me.monadErrorLaw.errorsRaised[A] _)
def errorsStopComputation[F[_], E, A](implicit me: MonadError[F, E], eq: Equal[F[A]], ae: Arbitrary[E], aa: Arbitrary[A]) =
forAll(me.monadErrorLaw.errorsStopComputation[A] _)
def laws[F[_], E](implicit me: MonadError[F, E], am: Arbitrary[F[Int]], afap: Arbitrary[F[Int => Int]], aeq: Equal[F[Int]], ae: Arbitrary[E], ce: Cogen[E]): Properties =
newProperties("monad error") { p =>
p.include(monad.laws[F])
p.property("raisedErrorsHandled") = raisedErrorsHandled[F, E, Int]
p.property("errorsRaised") = errorsRaised[F, E, Int]
p.property("errorsStopComputation") = errorsStopComputation[F, E, Int]
()
}
}
}
|
WhiteWolf21/fp-growth
|
mlxtend/mlxtend/data/tests/test_multiplexer.py
|
# <NAME> 2014-2020
# mlxtend Machine Learning Library Extensions
#
# Author: <NAME> <<EMAIL>>
#
# License: BSD 3 clause
import numpy as np
import sys
from mlxtend.data import make_multiplexer_dataset
from mlxtend.utils import assert_raises
def test_defaults():
X, y = make_multiplexer_dataset()
assert X.shape == (100, 6), X.shape
assert X.dtype == np.int
assert y.shape == (100, ), y.shape
assert y.dtype == np.int
def test_invalid_address_bits():
msg_1 = "address_bits must be an integer. Got <class 'float'>."
# for Python 2.7:
if sys.version_info[0] == 2:
msg_1 = msg_1.replace('<class', '<type')
assert_raises(AttributeError,
msg_1,
make_multiplexer_dataset,
1.2)
msg_2 = "Number of address_bits must be greater than 0. Got -1."
assert_raises(AttributeError,
msg_2,
make_multiplexer_dataset,
-1)
def test_imbalance():
X, y = make_multiplexer_dataset(address_bits=2, sample_size=1000,
positive_class_ratio=0.3)
np.bincount(y) == (700, 300), np.bincount(y)
X, y = make_multiplexer_dataset(address_bits=2, sample_size=1000,
positive_class_ratio=0.7)
np.bincount(y) == (300, 700), np.bincount(y)
def test_address_bits():
X, y = make_multiplexer_dataset(address_bits=3, sample_size=100)
assert X.shape == (100, 11)
def test_class_labels():
X, y = make_multiplexer_dataset(address_bits=2, sample_size=10,
random_seed=0)
assert np.array_equal(y, np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0]))
features = np.array([[0, 1, 0, 1, 1, 0],
[0, 1, 1, 1, 1, 0],
[1, 0, 1, 0, 1, 1],
[0, 1, 1, 1, 1, 1],
[0, 1, 0, 1, 1, 1],
[0, 1, 1, 0, 1, 1],
[1, 1, 1, 1, 1, 0],
[0, 1, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 1],
[1, 0, 1, 0, 0, 1]])
print(X)
assert np.array_equal(X, features)
def test_class_labels_shuffle():
X, y = make_multiplexer_dataset(address_bits=2, sample_size=10,
random_seed=0, shuffle=True)
print(y)
assert np.array_equal(y, np.array([0, 1, 0, 0, 1, 0, 1, 0, 1, 1]))
features = np.array([[0, 1, 1, 0, 1, 1],
[0, 1, 1, 1, 1, 0],
[1, 0, 1, 0, 0, 1],
[1, 1, 1, 1, 1, 0],
[0, 1, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1],
[0, 1, 0, 1, 1, 1],
[0, 1, 0, 0, 0, 0],
[1, 0, 1, 0, 1, 1],
[0, 1, 1, 1, 1, 1]])
assert np.array_equal(X, features)
|
ZhuangweiKang/tods
|
tods/sk_interface/detection_algorithm/LSTMODetector_skinterface.py
|
<reponame>ZhuangweiKang/tods
import numpy as np
from ..base import BaseSKI
from tods.detection_algorithm.LSTMODetect import LSTMODetectorPrimitive
class LSTMODetectorSKI(BaseSKI):
def __init__(self, **hyperparams):
super().__init__(primitive=LSTMODetectorPrimitive, **hyperparams)
self.fit_available = True
self.predict_available = True
self.produce_available = False
|
aws/cluster-api-provider-cloudstack
|
pkg/cloud/user_credentials.go
|
<gh_stars>1-10
/*
Copyright 2022 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cloud
import (
"strings"
"github.com/pkg/errors"
)
const (
rootDomain = "ROOT"
domainDelimiter = "/"
)
type UserCredIFace interface {
ResolveDomain(*Domain) error
ResolveAccount(*Account) error
ResolveUser(*User) error
ResolveUserKeys(*User) error
GetUserWithKeys(*User) (bool, error)
}
// Domain contains specifications that identify a domain.
type Domain struct {
Name string
Path string
ID string
}
// Account contains specifications that identify an account.
type Account struct {
Name string
Domain Domain
ID string
}
// User contains information uniquely identifying and scoping a user.
type User struct {
ID string
Name string
APIKey string
SecretKey string
Account
}
// ResolveDomain resolves a domain's information.
func (c *client) ResolveDomain(domain *Domain) error {
// A domain can be specified by Id, Name, and or Path.
// Parse path and use it to set name if not present.
tokens := []string{}
if domain.Path != "" {
// Split path and get name.
tokens = strings.Split(domain.Path, domainDelimiter)
if domain.Name == "" {
domain.Name = tokens[len(tokens)-1]
}
// Ensure the path begins with ROOT.
if !strings.EqualFold(tokens[0], rootDomain) {
tokens = append([]string{rootDomain}, tokens...)
} else {
tokens[0] = rootDomain
}
domain.Path = strings.Join(tokens, domainDelimiter)
}
// Set present search/list parameters.
p := c.cs.Domain.NewListDomainsParams()
p.SetListall(true)
setIfNotEmpty(domain.Name, p.SetName)
setIfNotEmpty(domain.ID, p.SetId)
// If path was provided also use level narrow the search for domain.
if level := len(tokens) - 1; level >= 0 {
p.SetLevel(level)
}
resp, retErr := c.cs.Domain.ListDomains(p)
if retErr != nil {
return retErr
}
// If the Id was provided.
if domain.ID != "" {
if resp.Count != 1 {
return errors.Errorf("domain ID %s provided, expected exactly one domain, got %d", domain.ID, resp.Count)
}
if domain.Path != "" && !strings.EqualFold(resp.Domains[0].Path, domain.Path) {
return errors.Errorf("domain Path %s did not match domain ID %s", domain.Path, domain.ID)
}
domain.Path = resp.Domains[0].Path
domain.Name = resp.Domains[0].Name
return nil
}
// Consider the case where only the domain name is provided.
if domain.Path == "" && domain.Name != "" {
if resp.Count != 1 {
return errors.Errorf(
"only domain name: %s provided, expected exactly one domain, got %d", domain.Name, resp.Count)
}
}
// Finally, search for the domain by Path.
for _, possibleDomain := range resp.Domains {
if possibleDomain.Path == domain.Path {
domain.ID = possibleDomain.Id
return nil
}
}
return errors.Errorf("domain not found for domain path %s", domain.Path)
}
// ResolveAccount resolves an account's information.
func (c *client) ResolveAccount(account *Account) error {
// Resolve domain prior to any account resolution activity.
if err := c.ResolveDomain(&account.Domain); err != nil {
return errors.Wrapf(err, "resolving domain %s details", account.Domain.Name)
}
p := c.cs.Account.NewListAccountsParams()
p.SetDomainid(account.Domain.ID)
setIfNotEmpty(account.ID, p.SetId)
setIfNotEmpty(account.Name, p.SetName)
resp, retErr := c.cs.Account.ListAccounts(p)
if retErr != nil {
return retErr
} else if resp.Count == 0 {
return errors.Errorf("could not find account %s", account.Name)
} else if resp.Count != 1 {
return errors.Errorf("expected 1 Account with account name %s in domain ID %s, but got %d",
account.Name, account.Domain.ID, resp.Count)
}
account.ID = resp.Accounts[0].Id
account.Name = resp.Accounts[0].Name
return nil
}
// ResolveUser resolves a user's information.
func (c *client) ResolveUser(user *User) error {
// Resolve account prior to any user resolution activity.
if err := c.ResolveAccount(&user.Account); err != nil {
return errors.Wrapf(err, "resolving account %s details", user.Account.Name)
}
p := c.cs.User.NewListUsersParams()
p.SetAccount(user.Account.Name)
p.SetDomainid(user.Domain.ID)
p.SetListall(true)
resp, err := c.cs.User.ListUsers(p)
if err != nil {
return err
} else if resp.Count != 1 {
return errors.Errorf("expected 1 User with username %s but got %d", user.Name, resp.Count)
}
user.ID = resp.Users[0].Id
user.Name = resp.Users[0].Username
return nil
}
// ResolveUserKeys resolves a user's api keys.
func (c *client) ResolveUserKeys(user *User) error {
// Resolve user prior to any api key resolution activity.
if err := c.ResolveUser(user); err != nil {
return errors.Wrap(err, "error encountered when resolving user details")
}
p := c.cs.User.NewGetUserKeysParams(user.ID)
resp, err := c.cs.User.GetUserKeys(p)
if err != nil {
return errors.Errorf("error encountered when resolving user api keys for user %s", user.Name)
}
user.APIKey = resp.Apikey
user.SecretKey = resp.Secretkey
return nil
}
// GetUserWithKeys will search a domain and account for the first user that has api keys.
// Returns true if a user is found and false otherwise.
func (c *client) GetUserWithKeys(user *User) (bool, error) {
// Resolve account prior to any user resolution activity.
if err := c.ResolveAccount(&user.Account); err != nil {
return false, errors.Wrapf(err, "resolving account %s details", user.Account.Name)
}
// List users and take first user that has already has api keys.
p := c.cs.User.NewListUsersParams()
p.SetAccount(user.Account.Name)
setIfNotEmpty(user.Account.Domain.ID, p.SetDomainid)
p.SetListall(true)
resp, err := c.cs.User.ListUsers(p)
if err != nil {
return false, err
}
// Return first user with keys.
for _, possibleUser := range resp.Users {
user.ID = possibleUser.Id
if err := c.ResolveUserKeys(user); err == nil {
return true, nil
}
}
user.ID = ""
return false, nil
}
|
nanolith/rcpr
|
src/fiber/common/fiber_scheduler_resource_handle.c
|
/**
* \file fiber/common/fiber_scheduler_resource_handle.c
*
* \brief Get the resource handle for a fiber_scheduler.
*
* \copyright 2021 <NAME>. Please see license.txt in this
* distribution for the license terms under which this software is distributed.
*/
#include <rcpr/model_assert.h>
#include "fiber_internal.h"
RCPR_IMPORT_fiber;
/**
* \brief Given a \ref fiber_scheduler instance, return the resource handle for
* this \ref fiber_scheduler instance.
*
* \param sched The \ref fiber_scheduler instance from which the
* resource handle is returned.
*
* \returns the \ref resource handle for this \ref fiber_scheduler instance.
*/
RCPR_SYM(resource)* RCPR_SYM(fiber_scheduler_resource_handle)(
RCPR_SYM(fiber_scheduler)* sched)
{
RCPR_MODEL_ASSERT(prop_fiber_scheduler_valid(sched));
return &(sched->hdr);
}
|
yungtiec/bkp
|
client/scenes/profile/components/ProfileHeader.js
|
import React, { Component } from "react";
import { Link, withRouter } from "react-router-dom";
import Avatar from "react-avatar";
import { PublicProfileNavbar, UserSettingsNavbar } from "./index";
export default ({
name,
profileContext,
isMyProfile,
avatarUrl,
getParams
}) => {
return (
<div className="profile-header profile-header--with-nav">
<div className="profile-banner">
<div className="app-container">
<div className="profile-avatar__container">
<Avatar
className="profile-avatar"
name={name && name.trim() ? name : "?"}
size={190}
src={
avatarUrl ||
"/assets/blank-avatar.png"
}
color={"#459DF9"}
fgColor={"#ffffff"}
/>
</div>
</div>
</div>
{profileContext === "settings" ? (
<UserSettingsNavbar getParams={getParams} />
) : (
<PublicProfileNavbar isMyProfile={isMyProfile} getParams={getParams} />
)}
</div>
);
};
|
Archiit19/The-JavaScript-Workshop
|
Lesson05/exercise-01/index.js
|
<reponame>Archiit19/The-JavaScript-Workshop<gh_stars>0
const i = 10;
console.log(i); // -> 10
const f = function() {
var i = 20;
console.log(i);
};
f(); // -> 20
console.log(i); // -> 10
if (true) {
let i = 15;
console.log(i);
} // -> 15
console.log(i); // -> 10
i = 10;
if (true) {
var i = 15;
console.log(i);
}
// -> Uncaught SyntaxError: Identifier 'i' has already been declared
|
RomuloPy/Winc_Academy
|
JS/Opdrachten/opdracht_026/script.js
|
function squareAdd(num1, num2) {
square = (num1 * num1) + (num2 * num2);
square1 = (square * square);
return square1;
};
console.log(squareAdd(2, 3));
const squareAdd1 = function(num3, num4) {
square2 = (num3 * num3) + (num4 * num4);
square3 = (square2 * square2);
return square3;
};
console.log(squareAdd1(2, 3));
const squareAdd2 = (num5, num6) => {
square4 = (num5 * num5) + (num6 * num6);
square5 = (square4 * square4);
return square5;
}
console.log(squareAdd2(2, 3));
|
penehyba/thorntail-test-suite
|
microprofile/microprofile-openapi-1.0/src/main/java/org/wildfly/swarm/ts/microprofile/openapi/v10/MyResponse.java
|
<reponame>penehyba/thorntail-test-suite<gh_stars>1-10
package org.wildfly.swarm.ts.microprofile.openapi.v10;
import java.io.Serializable;
public class MyResponse implements Serializable {
private String attribute1;
private String attribute2;
public MyResponse(String attribute1, String attribute2) {
this.attribute1 = attribute1;
this.attribute2 = attribute2;
}
public String getAttribute1() {
return attribute1;
}
public String getAttribute2() {
return attribute2;
}
}
|
leeeastwood/Haiway
|
framework/tests/test_array_reverse.js
|
<gh_stars>100-1000
a = [1,2,3].reverse();
b = [1,2,3,4].reverse();
c = [];
c[3] = 42;
c.reverse();
d = [];
d[7] = "a";
d[3] = "b";
d.reverse();
av = a.length+":"+a.toString();
bv = b.length+":"+b.toString();
cv = c.length+":"+c.toString();
dv = d.length+":"+d.toString();
result = av=="3:3,2,1" && bv=="4:4,3,2,1" && cv=="4:42,,," && dv=="8:a,,,,b,,,";
|
hvpaiva/java-padroes-projeto
|
prototype/src/main/java/com/hvpaiva/prototype/App.java
|
<filename>prototype/src/main/java/com/hvpaiva/prototype/App.java<gh_stars>0
package com.hvpaiva.prototype;
import lombok.extern.java.Log;
/**
* Neste exemplo, temos uma classe Factory {@link Pedido} produzindo ofjetos por
* meio de clones de outros existentes. Os objetos protótipos da fábrica são dados
* como parâmetros construtores.
*/
@Log
final class App {
/**
* Construtor privado.
*/
private App() {
}
/**
* Main.
*
* @param args Argumentos do programa.
*/
public static void main(String[] args) {
PedidoFactory pedido;
Lanche lanche;
Bebida bebida;
// Criar objetos de lanche a partir do clone do pedido
pedido = new Pedido(new Pizza("Calabreza"), new Suco("Laranja"));
lanche = pedido.criarLanche();
bebida = pedido.criarBebida();
log.info(lanche.toString()); // Pizza Calabreza
log.info(bebida.toString()); // Suco de Laranja
// Criar objetos de lanche a partir do clone do pedido
pedido = new Pedido(new Sanduiche("X-Bacon"), new Refrigerante("Fanta Laranja"));
lanche = pedido.criarLanche();
bebida = pedido.criarBebida();
log.info(lanche.toString()); // Sanduíche X-Bacon
log.info(bebida.toString()); // Refrigerante Fanta Laranja
}
}
|
SvenWollinger/SnipSniper
|
src/main/java/net/snipsniper/utils/FileUtils.java
|
<reponame>SvenWollinger/SnipSniper
package net.snipsniper.utils;
import net.snipsniper.LogManager;
import net.snipsniper.utils.enums.LogLevel;
import org.capturecoop.ccutils.utils.StringUtils;
import java.awt.*;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
public class FileUtils {
public static boolean deleteRecursively(String folder) {
return deleteRecursively(new File(folder));
}
public static boolean deleteRecursively(File folder) {
if(!folder.isDirectory()) {
FileUtils.delete(folder);
return true;
}
for(File file : FileUtils.listFiles(folder)) {
if(file.isDirectory()) {
deleteRecursively(file);
}
FileUtils.delete(file);
}
FileUtils.delete(folder);
return true;
}
public static boolean delete(String file) {
return delete(new File(file));
}
public static boolean delete(File file) {
if(!file.exists()) return true;
if(!file.delete()) {
LogManager.log("File (%c) could not be deleted!", LogLevel.WARNING, file.getAbsolutePath());
return false;
}
return true;
}
public static boolean mkdir(String file) {
return mkdir(new File(file));
}
public static boolean mkdir(File folder) {
if(folder.exists()) return true;
if(!folder.mkdir()) {
LogManager.log("Folder (%c) could not be created.", LogLevel.WARNING, folder.getAbsolutePath());
return false;
}
return true;
}
public static boolean mkdirs(String file) {
return mkdirs(new File(file));
}
public static boolean mkdirs(File folder) {
if(folder.exists()) return true;
if(!folder.mkdirs()) {
LogManager.log("Folders (%c) could not be created.", LogLevel.WARNING, folder);
return false;
}
return true;
}
public static boolean mkdirs(String... folders) {
File[] array = new File[folders.length];
for(int i = 0; i < folders.length; i++)
array[i] = new File(folders[i]);
return mkdirs(array);
}
public static boolean mkdirs(File... folders) {
boolean success = true;
for(File folder : folders)
if(!mkdirs(folder))
success = false;
return success;
}
public static File[] listFiles(String folder) {
return listFiles(new File(folder));
}
public static File[] listFiles(File folder) {
return folder.listFiles();
}
public static ArrayList<String> getFilesInFolders(String path) {
ArrayList<String> result = new ArrayList<>();
for(File file : FileUtils.listFiles(path)) {
if(file.isDirectory())
result.addAll(getFilesInFolders(file.getAbsolutePath()));
if(!file.isDirectory())
result.add(StringUtils.correctSlashes(file.getAbsolutePath()));
}
return result;
}
public static boolean exists(String... files) {
File[] f = new File[files.length];
for(int i = 0; i < files.length; i++)
f[i] = new File(files[i]);
return exists(f);
}
public static boolean exists(File... files) {
boolean allExist = true;
for(File file : files) {
if(!file.exists())
allExist = false;
}
return allExist;
}
public static void openFolder(String path) {
try {
Desktop.getDesktop().open(new File(path));
} catch (IOException ioException) {
LogManager.log("Could not open folder \"%c\"!", LogLevel.ERROR, path);
LogManager.logStacktrace(ioException, LogLevel.ERROR);
}
}
public static void printFile(String filename, String text) {
try {
PrintWriter out = new PrintWriter(filename);
out.print(text);
out.close();
} catch (FileNotFoundException fileNotFoundException) {
LogManager.log("Could not write to file \"%c\"!", LogLevel.ERROR, filename);
LogManager.logStacktrace(fileNotFoundException, LogLevel.ERROR);
}
}
public static String getFileExtension(File file) {
return getFileExtension(file, true);
}
public static String getFileExtension(File file, boolean dot) {
String name = file.getName();
int lastIndexOf = name.lastIndexOf(".");
if (lastIndexOf == -1) {
return ""; // empty extension
}
if(!dot)
lastIndexOf++;
return name.substring(lastIndexOf);
}
public static String getCanonicalPath(String path) {
try {
return new File(path).getCanonicalPath();
} catch (IOException ioException) {
LogManager.log("Could not get path for \"%c\"!", LogLevel.ERROR, path);
LogManager.logStacktrace(ioException, LogLevel.ERROR);
}
return null;
}
public static boolean copyFromJar(String jarPath, String path) {
if(jarPath.startsWith("\\") || jarPath.startsWith("//"))
LogManager.log("jarPath is starting with slashes, this generally does not work inside the jar!", LogLevel.WARNING);
if(FileUtils.exists(path))
FileUtils.delete(path);
InputStream inputStream = ClassLoader.getSystemResourceAsStream(jarPath);
if(inputStream == null) {
LogManager.log("InputStream is null! Copying failed! jarPath: %c, path: %c", LogLevel.ERROR, jarPath, path);
return false;
}
try {
Files.copy(inputStream, new File(path).getCanonicalFile().toPath(), StandardCopyOption.REPLACE_EXISTING);
} catch (IOException ioException) {
LogManager.log("Issue copying from jar!", LogLevel.ERROR);
LogManager.logStacktrace(ioException, LogLevel.ERROR);
return false;
}
return true;
}
public static String loadFileFromJar(String file) {
StringBuilder content = new StringBuilder();
try{
String path = "net/snipsniper/resources/" + file;
InputStream inputStream = ClassLoader.getSystemResourceAsStream(path);
if(inputStream == null) {
LogManager.log(StringUtils.format("Could not load file %c from jar!", path), LogLevel.ERROR);
return null;
}
InputStreamReader streamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
BufferedReader in = new BufferedReader(streamReader);
for (String line; (line = in.readLine()) != null;)
content.append(line);
inputStream.close();
streamReader.close();
} catch (IOException ioException) {
LogManager.log("Could not load file: " + file, LogLevel.ERROR);
LogManager.logStacktrace(ioException, LogLevel.ERROR);
}
return content.toString();
}
}
|
rackerlabs/teeth-overlord
|
teeth_overlord/jobs/base.py
|
<filename>teeth_overlord/jobs/base.py
"""
Copyright 2013 Rackspace, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import abc
import signal
import threading
import uuid
from stevedore import driver
import structlog
from teeth_overlord import agent_client
from teeth_overlord.images import base as images_base
from teeth_overlord import locks
from teeth_overlord import marconi
from teeth_overlord import models
from teeth_overlord.networks import base as networks_base
from teeth_overlord.oob import base as oob_base
from teeth_overlord import scheduler
from teeth_overlord import service
from teeth_overlord import stats
from teeth_overlord import util
JOB_QUEUE_NAME = 'teeth_jobs'
JOB_DRIVER_NAMESPACE = 'teeth_overlord.jobs'
# Use a very high TTL on Marconi messages - we never really want them to
# expire. If we give up on a message, we'll expire it ourselves.
JOB_TTL = 60 * 60 * 24 * 14
# Claim messages for 2 minutes. This tries to establish a balance between how
# long a message can become "stuck" if we die while processing it, while not
# requiring overly frequent updates. In particular, if updates are required
# frequently (say, every few seconds), it is easy to miss an update and end up
# losing our claim to someone else.
CLAIM_TTL = 60
# When a temporal job failure occurs, we back off exponentially.
INITIAL_RETRY_DELAY = 60
MAX_RETRY_DELAY = 3600
BACKOFF_FACTOR = 1.5
JITTER = .2
# Failing to process a job should return it to the queue with as long a grace
# period as we can manage.
CLAIM_GRACE = 60 * 60 * 12
# Poll frequently. Help keep build times low.
BASE_POLLING_INTERVAL = 0.1
MAX_POLLING_INTERVAL = 30
class JobExecutor(service.SynchronousTeethService):
"""A service which executes job requests from a queue."""
def __init__(self, config):
super(JobExecutor, self).__init__(config)
self.config = config
self.log = structlog.get_logger()
self.agent_client = agent_client.get_agent_client(config)
self.interval_timer = util.IntervalTimer(BASE_POLLING_INTERVAL,
MAX_POLLING_INTERVAL)
self.job_client = JobClient(config)
self.image_provider = images_base.get_image_provider(config)
self.oob_provider = oob_base.get_oob_provider(config)
self.network_provider = networks_base.get_network_provider(config)
self.scheduler = scheduler.TeethInstanceScheduler()
self.claim_lock = threading.Lock()
self.queue = marconi.MarconiClient(base_url=config.MARCONI_URL)
self.stats_client = stats.get_stats_client(config, 'jobs')
self.concurrent_jobs_gauge = stats.ConcurrencyGauge(self.stats_client,
'concurrent_jobs')
self._job_type_cache = {}
def _get_job_class(self, job_type):
if job_type not in self._job_type_cache:
self._job_type_cache[job_type] = driver.DriverManager(
namespace=JOB_DRIVER_NAMESPACE,
name=job_type,
)
return self._job_type_cache[job_type].driver
def _process_next_message(self):
with self.claim_lock:
# Now that we actually have the lock, bail out early if we're
# supposed to be stopping
if self.stopping.isSet():
return
try:
message = self.queue.claim_message(JOB_QUEUE_NAME,
CLAIM_TTL,
CLAIM_GRACE)
except Exception as e:
self.log.error('error claiming message', exception=e)
self.interval_timer.wait(event=self.stopping, error=True)
return
if not message:
# Wait up to BASE_POLLING_INTERVAL seconds before releasing the
# lock, but bail out early if the stopping flag gets set.
self.interval_timer.wait(event=self.stopping)
return
job_request_id = message.body['job_request_id']
try:
job_request = models.JobRequest.objects.get(id=job_request_id)
except models.JobRequest.DoesNotExist:
self.log.info('removing message corresponding to non-existent'
' JobRequest',
message_href=message.href,
job_request_id=job_request_id)
self.queue.delete_message(message)
return
with self.concurrent_jobs_gauge:
cls = self._get_job_class(job_request.job_type)
job = cls(self, job_request, message, self.config)
job.execute()
def _process_messages(self):
while not self.stopping.isSet():
self._process_next_message()
def run(self):
"""Start processing jobs."""
super(JobExecutor, self).run()
threads = [threading.Thread(target=self._process_messages)
for i in xrange(0, self.config.JOB_EXECUTION_THREADS)]
for thread in threads:
thread.start()
signal.pause()
for thread in threads:
thread.join()
class JobClient(object):
"""A client for submitting job requests."""
def __init__(self, config):
self.config = config
self.queue = marconi.MarconiClient(base_url=config.MARCONI_URL)
def submit_job(self, job_type, **params):
"""Submit a job request. Specify the type of job desired, as
well as the pareters to the request. Parameters must be a dict
mapping strings to strings.
"""
job_request = models.JobRequest(job_type=job_type, params=params)
job_request.save()
body = {'job_request_id': str(job_request.id)}
return self.queue.push_message(JOB_QUEUE_NAME, body, JOB_TTL)
class Job(object):
"""Abstract base class for defining jobs. Implementations must
override `_execute` and be registered as a stevedore plugin
under the `teeth_overlord.jobs` namespace.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, executor, request, message, config):
self.executor = executor
# XXX this is a bit hacky, may want to refactor in the future
self.stats_client = executor.stats_client
self.params = request.params
self.request = request
self.message = message
self.config = config
self.lock_manager = locks.get_lock_manager(config)
self.log = structlog.get_logger(request_id=str(self.request.id),
attempt_id=str(uuid.uuid4()),
job_type=request.job_type)
@abc.abstractmethod
def _execute(self):
raise NotImplementedError()
def _save_request(self):
try:
self.request.save()
self._mark_assets()
except Exception as e:
self.log.error('error saving JobRequest, ignoring', exception=e)
def _update_claim(self, ttl=CLAIM_TTL):
try:
self.executor.queue.update_claim(self.message, ttl)
except Exception as e:
self.log.error('error updating claim on message, ignoring',
exception=e)
def _delete_message(self):
try:
self.executor.queue.delete_message(self.message)
except Exception as e:
self.log.error('error deleting message, ignoring', exception=e)
def _reset_request(self):
self.request.reset()
if self.request.failed_attempts >= self.max_retries:
self.log.info('job request exceeded retry limit',
max_retries=self.max_retries)
self.request.fail()
self._save_request()
self._delete_message()
else:
self._save_request()
self._update_claim(ttl=INITIAL_RETRY_DELAY)
@abc.abstractmethod
def _mark_assets(self):
raise NotImplementedError()
def execute(self):
"""Called to execute a job. Marks the request `RUNNING` in the
database, and periodically updates it until the task either
completes or fails.
"""
if self.request.state in (models.JobRequestState.FAILED,
models.JobRequestState.COMPLETED):
self.log.info('job request no longer valid, not executing',
state=self.request.state)
self._delete_message()
return
if self.request.state == models.JobRequestState.RUNNING:
self.log.info('job request was found in RUNNING state, assuming'
' it failed')
self._reset_request()
return
self.log.info('executing job request')
self.request.start()
self._save_request()
try:
self._execute()
except Exception as e:
self.log.error('error executing job', exception=e)
self._reset_request()
else:
self.log.info('successfully executed job request')
self.request.complete()
self._save_request()
self._delete_message()
|
netomi/uom
|
src/main/java/com/github/netomi/uom/unit/Prefixes.java
|
<filename>src/main/java/com/github/netomi/uom/unit/Prefixes.java
/*
* Copyright (c) 2020 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.netomi.uom.unit;
import com.github.netomi.uom.Prefix;
import com.github.netomi.uom.Quantity;
import com.github.netomi.uom.Unit;
import com.github.netomi.uom.UnitConverter;
import com.github.netomi.uom.function.UnitConverters;
import com.github.netomi.uom.math.Fraction;
import com.github.netomi.uom.util.ObjectPrinter;
/**
* A utility class to provide access to different {@link Prefix} implementations.
*
* @author <NAME>
*/
public final class Prefixes {
// Hide utility class.
private Prefixes() {}
/**
* Provides support for the 20 prefixes used in the metric system.
*
* @see <a href="http://en.wikipedia.org/wiki/Metric_prefix">Wikipedia: Metric Prefix</a>
*/
public enum Metric implements Prefix {
/** Prefix for 10<sup>24</sup>. */
YOTTA("Y", 24),
/** Prefix for 10<sup>21</sup>. */
ZETTA("Z", 21),
/** Prefix for 10<sup>18</sup>. */
EXA("E", 18),
/** Prefix for 10<sup>15</sup>. */
PETA("P", 15),
/** Prefix for 10<sup>12</sup>. */
TERA("T", 12),
/** Prefix for 10<sup>9</sup>. */
GIGA("G", 9),
/** Prefix for 10<sup>6</sup>. */
MEGA("M", 6),
/** Prefix for 10<sup>3</sup>. */
KILO("k", 3),
/** Prefix for 10<sup>2</sup>. */
HECTO("h", 2),
/** Prefix for 10<sup>1</sup>. */
DEKA("da", 1),
/** Prefix for 10<sup>-1</sup>. */
DECI("d", -1),
/** Prefix for 10<sup>-2</sup>. */
CENTI("c", -2),
/** Prefix for 10<sup>-3</sup>. */
MILLI("m", -3),
/** Prefix for 10<sup>-6</sup>. */
MICRO("\u00b5", -6),
/** Prefix for 10<sup>-9</sup>. */
NANO("n", -9),
/** Prefix for 10<sup>-12</sup>. */
PICO("p", -12),
/** Prefix for 10<sup>-15</sup>. */
FEMTO("f", -15),
/** Prefix for 10<sup>-18</sup>. */
ATTO("a", -18),
/** Prefix for 10<sup>-21</sup>. */
ZEPTO("z", -21),
/** Prefix for 10<sup>-24</sup>. */
YOCTO("y", -24);
private final String symbol;
private final int exponent;
Metric(String symbol, int exponent) {
this.symbol = symbol;
this.exponent = exponent;
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>24</sup></code>.
* @return {@code unit.multiply(1e24)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> YOTTA(Unit<Q> unit) {
return unit.withPrefix(YOTTA);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>21</sup></code>
* @return {@code unit.multiply(1e21)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> ZETTA(Unit<Q> unit) {
return unit.withPrefix(ZETTA);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>18</sup></code>
* @return {@code unit.multiply(1e18)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> EXA(Unit<Q> unit) {
return unit.withPrefix(EXA);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>15</sup></code>
* @return {@code unit.multiply(1e15)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> PETA(Unit<Q> unit) {
return unit.withPrefix(PETA);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>12</sup></code>
* @return {@code unit.multiply(1e12)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> TERA(Unit<Q> unit) {
return unit.withPrefix(TERA);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>9</sup></code>
* @return {@code unit.multiply(1e9)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> GIGA(Unit<Q> unit) {
return unit.withPrefix(GIGA);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>6</sup></code>
* @return {@code unit.multiply(1e6)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> MEGA(Unit<Q> unit) {
return unit.withPrefix(MEGA);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>3</sup></code>
* @return {@code unit.multiply(1e3)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> KILO(Unit<Q> unit) {
return unit.withPrefix(KILO);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>2</sup></code>
* @return {@code unit.multiply(1e2)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> HECTO(Unit<Q> unit) {
return unit.withPrefix(HECTO);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>1</sup></code>
* @return {@code unit.multiply(1e1)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> DEKA(Unit<Q> unit) {
return unit.withPrefix(DEKA);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-1</sup></code>
* @return {@code unit.multiply(1e-1)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> DECI(Unit<Q> unit) {
return unit.withPrefix(DECI);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-2</sup></code>
* @return {@code unit.multiply(1e-2)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> CENTI(Unit<Q> unit) {
return unit.withPrefix(CENTI);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-3</sup></code>
* @return {@code unit.multiply(1e-3)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> MILLI(Unit<Q> unit) {
return unit.withPrefix(MILLI);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-6</sup></code>
* @return {@code unit.multiply(1e-6)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> MICRO(Unit<Q> unit) {
return unit.withPrefix(MICRO);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-9</sup></code>
* @return {@code unit.multiply(1e-9)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> NANO(Unit<Q> unit) {
return unit.withPrefix(NANO);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-12</sup></code>
* @return {@code unit.multiply(1e-12)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> PICO(Unit<Q> unit) {
return unit.withPrefix(PICO);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-15</sup></code>
* @return {@code unit.multiply(1e-15)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> FEMTO(Unit<Q> unit) {
return unit.withPrefix(FEMTO);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-18</sup></code>
* @return {@code unit.multiply(1e-18)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> ATTO(Unit<Q> unit) {
return unit.withPrefix(ATTO);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-21</sup></code>
* @return {@code unit.multiply(1e-21)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> ZEPTO(Unit<Q> unit) {
return unit.withPrefix(ZEPTO);
}
/**
* Returns the specified unit multiplied by the factor <code>10<sup>-24</sup></code>
* @return {@code unit.multiply(1e-24)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> YOCTO(Unit<Q> unit) {
return unit.withPrefix(YOCTO);
}
@Override
public String getSymbol() {
return symbol;
}
@Override
public int getBase() {
return 10;
}
@Override
public int getExponent() {
return exponent;
}
@Override
public String getName() {
return name();
}
@Override
public Prefix withExponent(int exponent) {
for (Metric m : values()) {
if (m.getExponent() == exponent) {
return m;
}
}
return new GenericPrefix(this, getBase(), exponent);
}
@Override
public UnitConverter getUnitConverter() {
return UnitConverters.pow(getBase(), getExponent());
}
}
/**
* Provides support for common binary prefixes to be used by units.
*
* @see <a href="https://en.wikipedia.org/wiki/Binary_prefix">Wikipedia: Binary Prefix</a>
*/
public enum Binary implements Prefix {
/** Prefix for 1024. */
KIBI("Ki", 1),
/** Prefix for 1024<sup>2</sup>. */
MEBI("Mi", 2),
/** Prefix for 1024<sup>3</sup>. */
GIBI("Gi", 3),
/** Prefix for 1024<sup>4</sup>. */
TEBI("Ti", 4),
/** Prefix for 1024<sup>5</sup>. */
PEBI("Pi", 5),
/** Prefix for 1024<sup>6</sup>. */
EXBI("Ei", 6),
/** Prefix for 1024<sup>7</sup>. */
ZEBI("Zi", 7),
/** Prefix for 1024<sup>8</sup>. */
YOBI("Yi", 8);
private final String symbol;
private final int exponent;
Binary(String symbol, int exponent) {
this.symbol = symbol;
this.exponent = exponent;
}
/**
* Returns the specified unit multiplied by the factor <code>1024</code> (binary prefix).
* @return {@code unit.multiply(1024)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> KIBI(Unit<Q> unit) {
return unit.withPrefix(KIBI);
}
/**
* Returns the specified unit multiplied by the factor <code>1024<sup>2</sup></code> (binary prefix).
* @return {@code unit.multiply(1024^2)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> MEBI(Unit<Q> unit) {
return unit.withPrefix(MEBI);
}
/**
* Returns the specified unit multiplied by the factor <code>1024<sup>3</sup></code> (binary prefix).
* @return {@code unit.multiply(1024^3)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> GIBI(Unit<Q> unit) {
return unit.withPrefix(GIBI);
}
/**
* Returns the specified unit multiplied by the factor <code>1024<sup>4</sup></code> (binary prefix).
* @return {@code unit.multiply(1024^4)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> TEBI(Unit<Q> unit) {
return unit.withPrefix(TEBI);
}
/**
* Returns the specified unit multiplied by the factor <code>1024<sup>5</sup></code> (binary prefix).
* @return {@code unit.multiply(1024^5)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> PEBI(Unit<Q> unit) {
return unit.withPrefix(PEBI);
}
/**
* Returns the specified unit multiplied by the factor <code>1024<sup>6</sup></code> (binary prefix).
* @return {@code unit.multiply(1024^6)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> EXBI(Unit<Q> unit) {
return unit.withPrefix(EXBI);
}
/**
* Returns the specified unit multiplied by the factor <code>1024<sup>7</sup></code> (binary prefix).
* @return {@code unit.multiply(1024^7)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> ZEBI(Unit<Q> unit) {
return unit.withPrefix(ZEBI);
}
/**
* Returns the specified unit multiplied by the factor <code>1024<sup>8</sup></code> (binary prefix).
* @return {@code unit.multiply(1024^8)}.
*/
public static <Q extends Quantity<Q>> Unit<Q> YOBI(Unit<Q> unit) {
return unit.withPrefix(YOBI);
}
@Override
public String getSymbol() {
return symbol;
}
@Override
public int getBase() {
return 1024;
}
@Override
public int getExponent() {
return exponent;
}
@Override
public String getName() {
return name();
}
@Override
public Prefix withExponent(int exponent) {
for (Binary m : values()) {
if (m.getExponent() == exponent) {
return m;
}
}
return new GenericPrefix(this, getBase(), exponent);
}
@Override
public UnitConverter getUnitConverter() {
return UnitConverters.pow(getBase(), getExponent());
}
}
private static class GenericPrefix implements Prefix {
private final Prefix prototype;
private final int base;
private final int exponent;
GenericPrefix(Prefix prototype, int base, int exponent) {
this.prototype = prototype;
this.base = base;
this.exponent = exponent;
}
@Override
public String getName() {
return getSymbol();
}
@Override
public String getSymbol() {
return String.format("%d%s", base, ObjectPrinter.instance().print(Fraction.of(exponent)));
}
@Override
public int getBase() {
return base;
}
@Override
public int getExponent() {
return exponent;
}
@Override
public Prefix withExponent(int exponent) {
return prototype.withExponent(exponent);
}
@Override
public UnitConverter getUnitConverter() {
return UnitConverters.pow(base, exponent);
}
}
}
|
6543-forks/jx
|
pkg/cmd/get/get_stream.go
|
package get
import (
"strings"
"github.com/jenkins-x/jx/v2/pkg/versionstream"
"github.com/jenkins-x/jx/v2/pkg/cmd/helper"
"github.com/jenkins-x/jx/v2/pkg/log"
"github.com/jenkins-x/jx/v2/pkg/util"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/jenkins-x/jx/v2/pkg/cmd/opts"
"github.com/jenkins-x/jx/v2/pkg/cmd/templates"
)
// GetStreamOptions the command line options
type GetStreamOptions struct {
GetOptions
Kind string
VersionsRepository string
VersionsGitRef string
}
var (
getStreamLong = templates.LongDesc(`
Displays the version of a chart, package or docker image from the Version Stream
For more information see: [https://jenkins-x.io/docs/concepts/version-stream/](https://jenkins-x.io/docs/concepts/version-stream/)
`)
getStreamExample = templates.Examples(`
# List the version of a docker image
jx get stream -k docker gcr.io/jenkinsxio/builder-jx
# List the version of a chart
jx get stream -k charts jenkins-x/tekton
`)
)
// NewCmdGetStream creates the command
func NewCmdGetStream(commonOpts *opts.CommonOptions) *cobra.Command {
options := &GetStreamOptions{
GetOptions: GetOptions{
CommonOptions: commonOpts,
},
}
cmd := &cobra.Command{
Use: "stream",
Short: "Displays the version of a chart, package or docker image from the Version Stream",
Long: getStreamLong,
Example: getStreamExample,
Aliases: []string{"url"},
Run: func(cmd *cobra.Command, args []string) {
options.Cmd = cmd
options.Args = args
err := options.Run()
helper.CheckErr(err)
},
}
cmd.Flags().StringVarP(&options.Kind, "kind", "k", "docker", "The kind of version. Possible values: "+strings.Join(versionstream.KindStrings, ", "))
cmd.Flags().StringVarP(&options.VersionsRepository, "repo", "r", "", "Jenkins X versions Git repo")
cmd.Flags().StringVarP(&options.VersionsGitRef, "versions-ref", "", "", "Jenkins X versions Git repository reference (tag, branch, sha etc)")
return cmd
}
// Run implements this command
func (o *GetStreamOptions) Run() error {
resolver, err := o.CreateVersionResolver(o.VersionsRepository, o.VersionsGitRef)
if err != nil {
return errors.Wrap(err, "failed to create the VersionResolver")
}
args := o.Args
if len(args) == 0 {
return util.MissingArgument("name")
}
name := args[0]
kind := versionstream.VersionKind(o.Kind)
if kind == versionstream.KindDocker {
result, err := resolver.ResolveDockerImage(name)
if err != nil {
return errors.Wrapf(err, "failed to resolve docker image %s", name)
}
log.Logger().Infof("resolved image %s to %s", util.ColorInfo(name), util.ColorInfo(result))
return nil
}
n, err := resolver.StableVersionNumber(kind, name)
if err != nil {
return errors.Wrapf(err, "failed to resolve %s version of %s", o.Kind, name)
}
log.Logger().Infof("resolved %s %s to version: %s", util.ColorInfo(name), util.ColorInfo(o.Kind), util.ColorInfo(n))
return nil
}
|
ContractI9/Conqueror
|
src/buildings/Stable.java
|
package buildings;
import exceptions.BuildingInCoolDownException;
import exceptions.MaxLevelException;
import exceptions.MaxRecruitedException;
import units.Cavalry;
import units.Unit;
public class Stable extends MilitaryBuilding {
public Stable() {
super(2500, 1500, 600);
}
@Override
public Unit recruit() throws BuildingInCoolDownException, MaxRecruitedException {
if(isCoolDown())
throw new BuildingInCoolDownException("Building is cooling down, please wait for the next turn");
if(getCurrentRecruit()==getMaxRecruit())
throw new MaxRecruitedException("Max recruited units reached, please wait till next turn. ");
setCurrentRecruit(getCurrentRecruit() + 1);
if (getLevel() == 1)
return new Cavalry(1, 40, 0.6, 0.7, 0.75);
else if (getLevel() == 2)
return new Cavalry(2, 40, 0.6, 0.7, 0.75);
else
return new Cavalry(3, 60, 0.7, 0.8, 0.9);
}
@Override
public void upgrade() throws BuildingInCoolDownException, MaxLevelException {
super.upgrade();
if (getLevel() == 1) {
setLevel(2);
setUpgradeCost(2000);
setRecruitmentCost(650);
} else if (getLevel() == 2) {
setLevel(3);
setRecruitmentCost(700);
}
}
}
|
TizzyTTT/Item_Cabs
|
src/main/java/com/gm/wj/New_All/utils/MaxUse.java
|
package com.gm.wj.New_All.utils;
public class MaxUse {
String key;
double value;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public double getValue() {
return value;
}
public void setValue(double value) {
this.value = value;
}
// String chemicalname;
// double sum_use;
//
// public String getChemicalname() {
// return chemicalname;
// }
//
// public void setChemicalname(String chemicalname) {
// this.chemicalname = chemicalname;
// }
//
// public double getSum_use() {
// return sum_use;
// }
//
// public void setSum_use(double sum_use) {
// this.sum_use = sum_use;
// }
}
|
WinsonZhou/winson-for-spring
|
winson-spring-bean-validator/src/main/java/com/winson/spring/bean/validator/SpringValidationDemo.java
|
package com.winson.spring.bean.validator;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.stereotype.Component;
import org.springframework.validation.annotation.Validated;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
/**
* @author winson
* @date 2021/10/3
**/
public class SpringValidationDemo {
String path = "classpath:/META-INF/bean-validation-context.xml";
public static void main(String[] args) {
String path = "classpath:/META-INF/bean-validation-context.xml";
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext(path);
UserProcess userProcess = context.getBean(UserProcess.class);
System.out.println(userProcess);
userProcess.process(new User());
System.out.println("end ");
}
// @Component
// @Validated
// static class UserProcessor {
//
// public void process(@Valid User user) {
// System.out.println(user);
// }
//
// }
@Component
@Validated
public static class UserProcess{
public void process(@Valid User user){
System.out.println("process user");
}
}
static class User{
@NotNull
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return "User{" +
"name='" + name + '\'' +
'}';
}
}
}
|
AdrianaDinca/training
|
ejb/src/main/java/victor/training/ejb/container/security/CustomSecurityAspect.java
|
package victor.training.ejb.container.security;
import java.lang.reflect.Method;
import javax.interceptor.AroundInvoke;
import javax.interceptor.InvocationContext;
public class CustomSecurityAspect {
@AroundInvoke
public Object customSecurityConcern(InvocationContext context) throws Exception {
Method method = context.getMethod();
if (isCallAllowed(method)) {
return context.proceed();
} else {
throw new IllegalAccessError();
}
}
private boolean isCallAllowed(Method method) {
///
return true;
}
}
|
jiangxin/git-repo-go
|
cmd/test-sshinfo.go
|
// Copyright © 2019 Alibaba Co. Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"fmt"
"github.com/alibaba/git-repo-go/helper"
"github.com/spf13/cobra"
)
type testSSHInfoCommand struct {
cmd *cobra.Command
}
func (v *testSSHInfoCommand) Command() *cobra.Command {
if v.cmd != nil {
return v.cmd
}
v.cmd = &cobra.Command{
Use: "sshinfo <connection>",
Short: "test sshinfo",
RunE: func(cmd *cobra.Command, args []string) error {
return v.Execute(args)
},
}
return v.cmd
}
func (v testSSHInfoCommand) Execute(args []string) error {
if len(args) != 1 {
if len(args) == 0 {
return fmt.Errorf("connection is not given in args")
}
if len(args) > 1 {
return fmt.Errorf("only one connection (HTTP/SSH) should be given")
}
}
query := helper.NewSSHInfoQuery("")
sshInfo, err := query.GetSSHInfo(args[0], false)
if err != nil {
return err
}
fmt.Printf("ssh_info: %#v\n", sshInfo)
return nil
}
var testSSHInfoCmd = testSSHInfoCommand{}
func init() {
testCmd.Command().AddCommand(testSSHInfoCmd.Command())
}
|
nistefan/cmssw
|
RecoVertex/BeamSpotProducer/test/classes.h
|
#include "RecoVertex/BeamSpotProducer/test/NtupleHelper.h"
namespace RecoVertex_BeamSpotProducer_test {
struct dictionary {
};
}
|
arccode/factory
|
py/utils/log_utils.py
|
<reponame>arccode/factory
# Copyright 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilities for logging."""
import logging
import os
import sys
import time
from . import file_utils
DEFAULT_LOG_FORMAT = '[%(levelname)s] %(message)s'
_inited_logging = False
def InitLogging(prefix=None, verbose=False):
"""Initializes logging.
Args:
prefix: A prefix to display for each log line, e.g., the program name.
verbose: True for debug logging, false for info logging.
"""
global _inited_logging # pylint: disable=global-statement
assert not _inited_logging, 'May only call log_utils.InitLogging one time.'
_inited_logging = True
if not prefix:
prefix = os.path.basename(sys.argv[0])
# Make sure that nothing else has initialized logging yet (e.g.,
# autotest, whose logging_config does basicConfig).
assert not logging.getLogger().handlers, (
'Logging has already been initialized')
level = logging.DEBUG if verbose else logging.INFO
logging.basicConfig(
format=('[%(levelname)s] ' + prefix +
' %(filename)s:%(lineno)d %(asctime)s.%(msecs)03d %(message)s'),
level=level,
datefmt='%Y-%m-%d %H:%M:%S')
logging.debug('Logging initialized.')
def FileLogger(logger, log_path, log_prefix=None, log_format=None, level=None):
"""Creates a logger storing logs in file.
On creation, the folder of log file will be created, and the log file will be
opened in append mode.
If you need to delay the creation of logger (for example, having the logger
created in module import stage), wrap this function with
``type_utils.LazyObject``.
Args:
logger: A string as name of logger, for example 'console'.
log_path: A string for path to output file.
log_prefix: If specified, prefix this in all log messages with colon.
log_format: A format string to override DEFAULT_LOG_FORMAT.
level: An integer for controlling verbosity (as logging.level).
Returns:
A logger instance (see `logging` module for more information).
"""
if log_format is None:
log_format = DEFAULT_LOG_FORMAT
if log_prefix:
log_format = log_prefix + ': ' + log_format
if level is None:
level = logging.INFO
file_utils.TryMakeDirs(os.path.dirname(log_path))
handler = logging.FileHandler(log_path, 'a')
handler.setFormatter(logging.Formatter(log_format))
ret = logging.getLogger(logger)
ret.addHandler(handler)
ret.setLevel(level)
return ret
class NoisyLogger:
"""A processor for handling logs that repeats quickly.
Most tests implementing retry (or do something periodically) will easily
produce lots of same messages and we may want them being suppressed if the
message is not changed.
"""
def __init__(self, logger, suppress_limit=None, suppress_timeout=None,
suppress_logger=None, all_suppress_logger=None):
"""Constructor.
Args:
logger: A logger function, for example logging.info.
suppress_limit: An integer for limit of times to suppress, or None to
suppress until message is changed.
suppress_timeout: A timeout in seconds, or None to not time out.
suppress_logger: A logger function to be invoked when first time a message
is suppressed. None to use default (``_DefaultSuppressLogger``).
all_suppress_logger: A secondary logger function for suppressed messages,
for example logging.debug. None to use default
(``_DefaultAllSuppressLogger``).
"""
if all_suppress_logger is None:
all_suppress_logger = self._DefaultAllSuppressLogger
if suppress_logger is None:
suppress_logger = self._DefaultSuppressLogger
self._logger = logger
self._all_suppress_logger = all_suppress_logger
self._suppress_limit = suppress_limit
self._suppress_timeout = suppress_timeout
self._suppress_logger = suppress_logger
self._suppress_count = 0
self._suppress_start = time.time()
self._last_message = None
def _DefaultSuppressLogger(self, message, *args, **kargs):
"""The default logger when the message is first time suppressed."""
del args # Unused
del kargs # Unused
logging.info('Suppressed repeating message(s): %s', message)
def _DefaultAllSuppressLogger(self, message, *args, **kargs):
del args # Unused
del kargs # Unused
logging.debug('Suppressed repeating message(s): %s', message)
def ShouldSuppress(self, message):
"""Returns if the new message should be suppressed or not."""
if message != self._last_message:
return False
if (self._suppress_timeout is not None and
time.time() - self._suppress_start >= self._suppress_timeout):
return False
if self._suppress_limit is None:
return True
return self._suppress_count < self._suppress_limit
def Log(self, message, *args, **kargs):
"""Logs the new message.
Args:
message: An object to be logged.
args: Extra arguments sent to logger.
kargs: Keyword arguments sent to logger.
"""
if self.ShouldSuppress(message):
if self._suppress_count == 0:
self._suppress_logger(message, *args, **kargs)
self._all_suppress_logger(message, *args, **kargs)
self._suppress_count += 1
else:
self._last_message = message
self._logger(message, *args, **kargs)
self._suppress_count = 0
if self._suppress_timeout is not None:
self._suppress_start = time.time()
|
halcyoona/youtube-api
|
node_modules/must/lib/es6.js
|
<reponame>halcyoona/youtube-api<gh_stars>0
exports.setPrototypeOf = Object.setPrototypeOf || function(obj, prototype) {
/* eslint no-proto: 0 */
obj.__proto__ = prototype
return obj
}
exports.startsWith = String.prototype.startsWith ?
Function.call.bind(String.prototype.startsWith) :
function(haystack, needle) {
return haystack.lastIndexOf(needle, 0) === 0
}
exports.endsWith = String.prototype.endsWith ?
Function.call.bind(String.prototype.endsWith) :
function(haystack, needle) {
return haystack.indexOf(needle, haystack.length - needle.length) >= 0
}
|
KravchenkoAS/agrest
|
agrest-base/src/test/java/io/agrest/it/fixture/T2.java
|
<filename>agrest-base/src/test/java/io/agrest/it/fixture/T2.java
package io.agrest.it.fixture;
import java.util.Collection;
import java.util.List;
import java.util.Set;
public class T2 {
public static final String P_BOOLEANS = "booleans";
public static final String P_INTEGERS = "integers";
public static final String P_STRINGS = "strings";
private Collection<Boolean> booleans;
private List<Integer> integers;
private Set<String> strings;
public Collection<Boolean> getBooleans() {
return booleans;
}
public void setBooleans(Collection<Boolean> booleans) {
this.booleans = booleans;
}
public List<Integer> getIntegers() {
return integers;
}
public void setIntegers(List<Integer> integers) {
this.integers = integers;
}
public Set<String> getStrings() {
return strings;
}
public void setStrings(Set<String> strings) {
this.strings = strings;
}
}
|
songbinwei1/zstack
|
utils/src/main/java/org/zstack/utils/SHAUtils.java
|
package org.zstack.utils;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* Created by lining on 2019/9/4.
*/
public class SHAUtils {
public static String encrypt(String input, String algorithm) {
try {
MessageDigest md = MessageDigest.getInstance(algorithm);
md.reset();
md.update(input.getBytes("utf8"));
BigInteger bigInteger = new BigInteger(1, md.digest());
return String.format("%0128x", bigInteger);
} catch (NoSuchAlgorithmException | UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
}
|
MikeThomsen/nifi-date-parser-bundle
|
chrono-java/src/test/java/com/wanasit/chrono/parser/jp/JPDateAgoFormatParserTest.java
|
package com.wanasit.chrono.parser.jp;
import static org.junit.Assert.*;
import org.junit.Test;
import com.wanasit.chrono.Chrono;
import com.wanasit.chrono.ParserTestAbstract;
public class JPDateAgoFormatParserTest extends ParserTestAbstract {
@Test
public void testWithSingleDateExpression() {
refDate = createDate(2012, 6, 5, 12, 0);
results = Chrono.Parse("49分前", refDate);
assertEquals(1, results.size());
assertEquals(0, results.get(0).index);
assertEquals("49分前", results.get(0).text);
assertNotNull(results.get(0).start);
assertDateEquals(createDate(2012, 6, 5, 11, 11), results.get(0).start);
assertNull(results.get(0).end);
refDate = createDate(2012, 6, 5, 12, 0);
results = Chrono.Parse("2時間前", refDate);
assertEquals(1, results.size());
assertEquals(0, results.get(0).index);
assertEquals("2時間前", results.get(0).text);
assertNotNull(results.get(0).start);
assertDateEquals(createDate(2012, 6, 5, 10, 0), results.get(0).start);
assertNull(results.get(0).end);
refDate = createDate(2012, 6, 5, 12, 0);
results = Chrono.Parse("2日前", refDate);
assertEquals(1, results.size());
assertEquals(0, results.get(0).index);
assertEquals("2日前", results.get(0).text);
assertNotNull(results.get(0).start);
assertDateEquals(createDate(2012, 6, 3, 12, 0), results.get(0).start);
assertNull(results.get(0).end);
}
}
|
dolsem/tilt
|
internal/k8s/ip.go
|
package k8s
import (
"context"
"fmt"
"os/exec"
"strings"
"github.com/pkg/errors"
)
// Some K8s environments expose a single IP for the whole cluster.
type NodeIP string
func DetectNodeIP(ctx context.Context, env Env) (NodeIP, error) {
if env != EnvMinikube {
return "", nil
}
// TODO(nick): Should this be part of MinikubeClient?
cmd := exec.CommandContext(ctx, "minikube", "ip")
out, err := cmd.Output()
if err != nil {
exitErr, isExitErr := err.(*exec.ExitError)
if isExitErr {
// TODO(nick): Maybe we should automatically run minikube start?
return "", fmt.Errorf("Could not read node IP from minikube.\n"+
"Did you forget to run `minikube start`?\n%s", string(exitErr.Stderr))
}
return "", errors.Wrap(err, "Could not read node IP from minikube")
}
return NodeIP(strings.TrimSpace(string(out))), nil
}
|
UPBIoT/renode-iot
|
src/Infrastructure/src/Emulator/Cores/tlib/tcg/additional.c
|
/*
* Copyright (c) 2010-2015 Antmicro Ltd <www.antmicro.com>
* Copyright (c) 2011-2015 Realtime Embedded AB <www.rte.se>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <stdint.h>
#include <string.h>
#include "tcg.h"
tcg_t *tcg;
void *(*_TCG_malloc)(size_t);
void attach_malloc(void *malloc_callback)
{
_TCG_malloc = malloc_callback;
}
void *TCG_malloc(size_t size)
{
return _TCG_malloc(size);
}
void *(*_TCG_realloc)(void *, size_t);
void attach_realloc(void *reall)
{
_TCG_realloc = reall;
}
void *TCG_realloc(void *ptr, size_t size)
{
return _TCG_realloc(ptr, size);
}
void (*_TCG_free)(void *ptr);
void attach_free(void *free_callback)
{
_TCG_free = free_callback;
}
void TCG_free(void *ptr)
{
_TCG_free(ptr);
}
void TCG_pstrcpy(char *buf, int buf_size, const char *str)
{
int c;
char *q = buf;
if (buf_size <= 0) {
return;
}
for (;;) {
c = *str++;
if (c == 0 || q >= buf + buf_size - 1) {
break;
}
*q++ = c;
}
*q = '\0';
}
char *TCG_pstrcat(char *buf, int buf_size, const char *s)
{
int len;
len = strlen(buf);
if (len < buf_size) {
TCG_pstrcpy(buf + len, buf_size - len, s);
}
return buf;
}
unsigned int temp_buf_offset;
unsigned int tlb_table_n_0[7];
unsigned int tlb_table_n_0_addr_read[7];
unsigned int tlb_table_n_0_addr_write[7];
unsigned int tlb_table_n_0_addend[7];
unsigned int tlb_entry_addr_read;
unsigned int tlb_entry_addr_write;
unsigned int tlb_entry_addend;
unsigned int sizeof_CPUTLBEntry;
int TARGET_PAGE_BITS;
void set_TARGET_PAGE_BITS(int val)
{
TARGET_PAGE_BITS = val;
}
void set_sizeof_CPUTLBEntry(unsigned int sz)
{
sizeof_CPUTLBEntry = sz;
}
void set_temp_buf_offset(unsigned int offset)
{
temp_buf_offset = offset;
}
void set_tlb_entry_addr_rwu(unsigned int read, unsigned int write, unsigned int addend)
{
tlb_entry_addr_read = read;
tlb_entry_addr_write = write;
tlb_entry_addend = addend;
}
void set_tlb_table_n_0(int i, unsigned int offset)
{
tlb_table_n_0[i] = offset;
}
void set_tlb_table_n_0_rwa(int i, unsigned int read, unsigned int write, unsigned int addend)
{
tlb_table_n_0_addr_read[i] = read;
tlb_table_n_0_addr_write[i] = write;
tlb_table_n_0_addend[i] = addend;
}
|
wromansky/incubator-heron
|
heron/instance/tests/java/org/apache/heron/network/AbstractNetworkTest.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.heron.network;
import java.io.Closeable;
import java.io.IOException;
import java.net.SocketException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import com.google.protobuf.Message;
import org.junit.After;
import org.junit.Before;
import org.apache.heron.common.basics.Communicator;
import org.apache.heron.common.basics.NIOLooper;
import org.apache.heron.common.basics.SingletonRegistry;
import org.apache.heron.common.basics.SysUtils;
import org.apache.heron.common.config.SystemConfig;
import org.apache.heron.common.network.HeronSocketOptions;
import org.apache.heron.common.network.IncomingPacket;
import org.apache.heron.common.testhelpers.HeronServerTester;
import org.apache.heron.instance.CommunicatorTester;
import org.apache.heron.instance.InstanceControlMsg;
import org.apache.heron.metrics.GatewayMetrics;
import org.apache.heron.resource.UnitTestHelper;
import static org.junit.Assert.assertEquals;
/**
* Common superclass to share setup required for network tests.
*/
public abstract class AbstractNetworkTest {
static final String HOST = "127.0.0.1";
private int serverPort;
private StreamManagerClient streamManagerClient;
private CommunicatorTester communicatorTester;
private GatewayMetrics gatewayMetrics;
private ExecutorService threadPool;
private CountDownLatch inControlQueueOfferLatch;
private CountDownLatch inStreamQueueOfferLatch;
static void close(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (IOException ignored) {
}
}
}
static SocketChannel acceptSocketChannel(
ServerSocketChannel serverSocketChannel) throws IOException {
SocketChannel socketChannel = serverSocketChannel.accept();
configure(socketChannel);
socketChannel.configureBlocking(false);
close(serverSocketChannel);
return socketChannel;
}
private static void configure(SocketChannel socketChannel) throws SocketException {
socketChannel.socket().setTcpNoDelay(true);
}
@Before
public void before() throws Exception {
inControlQueueOfferLatch = new CountDownLatch(1);
inStreamQueueOfferLatch = new CountDownLatch(1);
communicatorTester = new CommunicatorTester(inControlQueueOfferLatch, inStreamQueueOfferLatch);
gatewayMetrics = new GatewayMetrics();
threadPool = Executors.newSingleThreadExecutor();
// Get an available port
serverPort = SysUtils.getFreePort();
}
@After
public void after() throws NoSuchFieldException, IllegalAccessException {
if (communicatorTester != null) {
communicatorTester.stop();
}
if (streamManagerClient != null) {
streamManagerClient.stop();
}
if (threadPool != null) {
threadPool.shutdownNow();
}
}
protected int getServerPort() {
return serverPort;
}
protected ExecutorService getThreadPool() {
return threadPool;
}
protected NIOLooper getNIOLooper() {
return (NIOLooper) communicatorTester.getTestLooper();
}
protected Communicator<InstanceControlMsg> getInControlQueue() {
return communicatorTester.getInControlQueue();
}
protected Communicator<Message> getInStreamQueue() {
return communicatorTester.getInStreamQueue();
}
public CountDownLatch getInControlQueueOfferLatch() {
return inControlQueueOfferLatch;
}
public CountDownLatch getInStreamQueueOfferLatch() {
return inStreamQueueOfferLatch;
}
IncomingPacket readIncomingPacket(SocketChannel socketChannel) throws IOException {
// Receive request
IncomingPacket incomingPacket = new IncomingPacket();
Selector readSelector = Selector.open();
socketChannel.register(readSelector, SelectionKey.OP_READ);
readSelector.select(HeronServerTester.RESPONSE_RECEIVED_TIMEOUT.toMillis());
// reading might not return the full payload in one shot. It could take 2 due to the header
// and the data read
if (incomingPacket.readFromChannel(socketChannel) != 0) {
readSelector.select(HeronServerTester.RESPONSE_RECEIVED_TIMEOUT.toMillis());
assertEquals(0, incomingPacket.readFromChannel(socketChannel));
}
// Though we do not use typeName, we need to unpack it first, since the order is required.
// doing this as a convenience since none of the callers of this method need this.
incomingPacket.unpackString();
return incomingPacket;
}
StreamManagerClient runStreamManagerClient() {
SystemConfig systemConfig =
(SystemConfig) SingletonRegistry.INSTANCE.getSingleton(SystemConfig.HERON_SYSTEM_CONFIG);
HeronSocketOptions socketOptions = new HeronSocketOptions(
systemConfig.getInstanceNetworkWriteBatchSize(),
systemConfig.getInstanceNetworkWriteBatchTime(),
systemConfig.getInstanceNetworkReadBatchSize(),
systemConfig.getInstanceNetworkReadBatchTime(),
systemConfig.getInstanceNetworkOptionsSocketSendBufferSize(),
systemConfig.getInstanceNetworkOptionsSocketReceivedBufferSize(),
systemConfig.getInstanceNetworkOptionsMaximumPacketSize());
final NIOLooper nioLooper = (NIOLooper) communicatorTester.getTestLooper();
streamManagerClient = new StreamManagerClient(nioLooper, HOST, serverPort,
"topology-name", "topologyId", UnitTestHelper.getInstance("bolt-id"),
communicatorTester.getInStreamQueue(), communicatorTester.getOutStreamQueue(),
communicatorTester.getInControlQueue(), socketOptions, gatewayMetrics);
Runnable r = new Runnable() {
@Override
public void run() {
try {
streamManagerClient.start();
nioLooper.loop();
} finally {
streamManagerClient.stop();
nioLooper.exitLoop();
}
}
};
threadPool.execute(r);
return streamManagerClient;
}
}
|
DanGodfreyjr/FoxitWacomNotes
|
samples/docs/API_Reference/html/search/enums_8.js
|
var searchData=
[
['markup_5fannotation_5fstate',['MARKUP_ANNOTATION_STATE',['../group___annot_consts_p_d_f.html#gac511125eeb5ba47ccc2c14528af098fe',1,'MARKUP_ANNOTATION_STATE.js']]],
['mouseeventobjecttype',['MouseEventObjectType',['../group___viewer_constants.html#ga7f03b6b4873ad9b13aef3434e345de31',1,'mouseEventObjectType.js']]]
];
|
Saber-Team/SOI
|
lib/plugin/uglifier.js
|
/**
* The MIT License (MIT)
* Copyright (c) 2016 Saber-Team
*
* @file uglifier插件, js压缩, 监听task的`compiled-resource`事件
* @author AceMood
*/
'use strict';
const UglifyJS = require('uglify-js');
// default uglify config
let defaultOptions = require('./uglifier.json');
class Uglifier {
constructor(options) {
options = options || {};
this.options = soi.util.merge({}, defaultOptions, options);
this.ignore = options.ignore || soi.fn.FALSE;
}
init(task) {
this.host = task;
this.exec = this.exec.bind(this);
task.on('compiled-resource', this.exec);
}
exec(resource) {
if ((resource.type === 'js')
&& !this.ignore(resource.path)
&& !resource.isPermanent) {
let code = resource.getContent();
let ast;
try {
ast = UglifyJS.parse(code);
} catch (e) {
soi.log.error(e);
process.exit(1);
}
// compressor needs figure_out_scope too
ast.figure_out_scope();
let compressor = UglifyJS.Compressor(this.options);
ast = ast.transform(compressor);
// need to figure out scope again so mangler works optimally
ast.figure_out_scope();
ast.compute_char_frequency();
ast.mangle_names();
// get Ugly code back :)
// get compressed code
code = ast.print_to_string();
resource.setContent(code);
}
}
uninstall() {
this.host.removeListener('compiled-resource', this.exec);
}
}
module.exports = Uglifier;
|
wittyGIrl/form
|
src/components/form/options/mixins/dataHidden.js
|
<reponame>wittyGIrl/form<gh_stars>0
import config from '../../../../config';
const data = {
hidden: '',
};
const dataOptions = {
};
export {data, dataOptions};
|
abrander/imagemeta
|
bmff/bmff.go
|
<filename>bmff/bmff.go<gh_stars>10-100
/*
Copyright 2021 <NAME>
Copyright 2018 The go4 Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package bmff reads ISOBMFF boxes, as used by HEIF, AVIF, CR3, etc. and other riff based files
package bmff
import (
"bufio"
"io"
"github.com/pkg/errors"
)
// Common Errors
var (
ErrBrandNotSupported = errors.New("error brand not supported")
ErrWrongBoxType = errors.New("error wrong box type")
ErrNoMoreBoxes = errors.New("no more boxes to be parsed")
)
// brandCount is the number of compatible brands supported.
const brandCount = 8
// NewReader returns a new bmff.Reader
func NewReader(r io.Reader) Reader {
br, ok := r.(*bufio.Reader)
if !ok {
br = bufio.NewReader(r)
}
return Reader{br: bufReader{Reader: br, remain: 8}}
}
// Reader is a BMFF reader
type Reader struct {
br bufReader
brand Brand
noMoreBoxes bool // a box with size 0 (the final box) was seen
}
// ReadAndParseBox wraps the ReadBox method, ensuring that the read box is of type typ
// and parses successfully. It returns the parsed box.
func (r *Reader) ReadAndParseBox(typ BoxType) (Box, error) {
box, err := r.readBox()
if err != nil {
return nil, errors.Errorf("error reading %q box: %v", typ, err)
}
if box.Type() != typ {
return nil, errors.Errorf("error reading %q box: got box type %q instead", typ, box.Type())
}
pbox, err := box.Parse()
if err != nil {
return nil, errors.Errorf("error parsing read %q box: %v", typ, err)
}
return pbox, nil
}
// ReadFtypBox reads an 'ftyp' box from a BMFF file.
//
// This should be the first read function called.
func (r *Reader) ReadFtypBox() (FileTypeBox, error) {
b, err := r.readBox()
if err != nil {
return FileTypeBox{}, errors.Wrapf(err, "ReadFtypBox")
}
ftyp, err := b.parseFileTypeBox()
r.brand = ftyp.MajorBrand
r.br.offset = b.offset
return ftyp, err
}
// ReadMetaBox reads a 'meta' box from a BMFF file.
//
// This should be called in order. First call ReadFtypBox
func (r *Reader) ReadMetaBox() (mb MetaBox, err error) {
if r.brand == brandUnknown {
return mb, ErrBrandNotSupported
}
if r.noMoreBoxes {
return mb, ErrNoMoreBoxes
}
b, err := r.readBox()
if err != nil {
err = errors.Wrapf(err, "ReadMetaBox")
return mb, err
}
return parseMetaBox(&b)
}
// ReadMoovBox reads a 'moov' box from a BMFF file.
//
// This should be called in order. First call ReadFtypBox
func (r *Reader) ReadMoovBox() (moov MoovBox, err error) {
if r.brand == brandUnknown {
return moov, ErrBrandNotSupported
}
if r.noMoreBoxes {
return moov, ErrNoMoreBoxes
}
b, err := r.readBox()
if err != nil {
err = errors.Wrapf(err, "ReadMetaBox")
return moov, err
}
return b.parseMoovBox()
}
// ReadBox reads a box and returns it
func (r *Reader) readBox() (b box, err error) {
return r.br.readInnerBox()
}
|
tkgwJeff/Clairvoyance
|
front-end/app/components/LoadingBlock/index.js
|
<reponame>tkgwJeff/Clairvoyance
import CSSModules from 'react-css-modules'
import styles from './styles.css'
import Loading from 'components/Loading'
class LoadingBlock extends React.Component {
render() {
const {user} = this.props
if (user.status === 'loading') {
return (
<div styleName='logining'>
<Loading/>
</div>
)
}
}
}
export default CSSModules(LoadingBlock, styles)
|
mutilin/cpachecker-ldv
|
src/org/sosy_lab/cpachecker/cpa/value/symbolic/ConstraintsStrengthenOperator.java
|
<filename>src/org/sosy_lab/cpachecker/cpa/value/symbolic/ConstraintsStrengthenOperator.java<gh_stars>1-10
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2015 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.value.symbolic;
import java.io.PrintStream;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import javax.annotation.Nullable;
import org.sosy_lab.common.configuration.Configuration;
import org.sosy_lab.common.configuration.InvalidConfigurationException;
import org.sosy_lab.common.configuration.Option;
import org.sosy_lab.common.configuration.Options;
import org.sosy_lab.common.time.Timer;
import org.sosy_lab.cpachecker.cfa.model.CFAEdge;
import org.sosy_lab.cpachecker.cfa.model.CFAEdgeType;
import org.sosy_lab.cpachecker.cfa.model.CFANode;
import org.sosy_lab.cpachecker.cfa.types.Type;
import org.sosy_lab.cpachecker.core.CPAcheckerResult.Result;
import org.sosy_lab.cpachecker.core.interfaces.Statistics;
import org.sosy_lab.cpachecker.core.reachedset.ReachedSet;
import org.sosy_lab.cpachecker.cpa.constraints.ConstraintsCPA;
import org.sosy_lab.cpachecker.cpa.constraints.constraint.Constraint;
import org.sosy_lab.cpachecker.cpa.constraints.constraint.IdentifierAssignment;
import org.sosy_lab.cpachecker.cpa.constraints.domain.ConstraintsState;
import org.sosy_lab.cpachecker.cpa.value.ValueAnalysisState;
import org.sosy_lab.cpachecker.cpa.value.symbolic.type.ConstantSymbolicExpression;
import org.sosy_lab.cpachecker.cpa.value.symbolic.type.SymbolicIdentifier;
import org.sosy_lab.cpachecker.cpa.value.symbolic.type.SymbolicValue;
import org.sosy_lab.cpachecker.cpa.value.symbolic.type.SymbolicValueFactory;
import org.sosy_lab.cpachecker.cpa.value.symbolic.util.SymbolicValues;
import org.sosy_lab.cpachecker.cpa.value.type.Value;
import org.sosy_lab.cpachecker.util.CFAUtils;
import org.sosy_lab.cpachecker.util.states.MemoryLocation;
/**
* Strengthener for ValueAnalysis with {@link ConstraintsCPA}.
*/
@Options(prefix = "cpa.value.symbolic")
public class ConstraintsStrengthenOperator implements Statistics {
@Option(description = "Whether to simplify symbolic expressions, if possible.")
private boolean simplifySymbolics = true;
@Option(description = "Whether to adopt definite assignments computed by the ConstraintsCPA")
private boolean adoptDefinites = true;
// statistics
private final Timer totalTime = new Timer();
private int replacedSymbolicExpressions = 0;
public ConstraintsStrengthenOperator(final Configuration pConfig)
throws InvalidConfigurationException {
pConfig.inject(this);
}
/**
* Strengthen the given {@link org.sosy_lab.cpachecker.cpa.value.ValueAnalysisState} with the given {@link org.sosy_lab.cpachecker.cpa.constraints.domain.ConstraintsState}.
*
* <p>The returned <code>Collection</code> contains all reachable states after strengthening.
* A returned empty <code>Collection</code> represents 'bottom', a returned <code>null</code>
* represents that no changes were made to the given <code>ValueAnalysisState</code>.</p>
*
*
* @param pStateToStrengthen the state to strengthen
* @param pStrengtheningState the state to strengthen the first state with
* @return <code>null</code> if no changes were made to the given <code>ValueAnalysisState</code>,
* an empty <code>Collection</code>, if the resulting state is not reachable and
* a <code>Collection</code> containing all reachable states, otherwise
*/
public Collection<ValueAnalysisState> strengthen(
final ValueAnalysisState pStateToStrengthen,
final ConstraintsState pStrengtheningState,
final CFAEdge pEdge
) {
totalTime.start();
try {
ValueAnalysisState newState;
if (adoptDefinites) {
newState =
evaluateAssignment(pStrengtheningState.getDefiniteAssignment(), pStateToStrengthen);
} else {
newState = pStateToStrengthen;
}
if (simplifySymbolics) {
newState = simplifySymbolicValues(newState, pStrengtheningState, pEdge);
}
if (!newState.equals(pStateToStrengthen)) {
return Collections.singleton(newState);
} else {
return null;
}
} finally {
totalTime.stop();
}
}
private ValueAnalysisState evaluateAssignment(
final IdentifierAssignment pAssignment,
final ValueAnalysisState pValueState
) {
ValueAnalysisState newElement = ValueAnalysisState.copyOf(pValueState);
for (Map.Entry<? extends SymbolicIdentifier, Value> onlyValidAssignment : pAssignment.entrySet()) {
final SymbolicIdentifier identifierToReplace = onlyValidAssignment.getKey();
final Value newIdentifierValue = onlyValidAssignment.getValue();
newElement.assignConstant(identifierToReplace, newIdentifierValue);
}
return newElement;
}
// replaces symbolic expressions that are not used anywhere yet with a new symbolic identifier.
// this method does not copy the given value analysis state, but works directly with it
private ValueAnalysisState simplifySymbolicValues(
final ValueAnalysisState pValueState,
final ConstraintsState pConstraints,
final CFAEdge pEdge
) {
// we only simplify symbolic values if one of the possible next edges is an assume edge.
// otherwise, symbolic values won't be used in one of the next steps and we don't have to
// simplify.
// If the current edge is an assume edge, simplification doesn't work reliable since
// a constraint added at this edge is not yet in the strengthening ConstraintsState.
// For strengthening, unstrengthened states are used, always.
if (!couldNextEdgeUseValues(pEdge) && pEdge.getEdgeType() != CFAEdgeType.AssumeEdge) {
return pValueState;
}
final SymbolicValueFactory factory = SymbolicValueFactory.getInstance();
for (Map.Entry<MemoryLocation, Value> e : pValueState.getConstantsMapView().entrySet()) {
Value currV = e.getValue();
if (!(currV instanceof SymbolicValue) || isSimpleSymbolicValue((SymbolicValue) currV)) {
continue;
}
SymbolicValue castVal = (SymbolicValue) currV;
MemoryLocation currLoc = e.getKey();
if (isIndependentInValueState(castVal, currLoc, pValueState)
&& doesNotAppearInConstraints(castVal, pConstraints)) {
Type valueType = pValueState.getTypeForMemoryLocation(currLoc);
SymbolicValue newIdentifier = factory.asConstant(factory.newIdentifier(), valueType);
pValueState.assignConstant(currLoc, newIdentifier, valueType);
replacedSymbolicExpressions++;
}
}
return pValueState;
}
private boolean couldNextEdgeUseValues(CFAEdge pEdge) {
final CFANode nextNode = pEdge.getSuccessor();
for (CFAEdge currEdge : CFAUtils.leavingEdges(nextNode)) {
if (usesValues(currEdge)) {
return true;
}
}
return false;
}
private boolean usesValues(CFAEdge pCurrEdge) {
return !pCurrEdge.getEdgeType().equals(CFAEdgeType.BlankEdge);
}
private boolean doesNotAppearInConstraints(
final SymbolicValue pValue,
final ConstraintsState pConstraints
) {
Collection<SymbolicIdentifier> identifiersInValue =
SymbolicValues.getContainedSymbolicIdentifiers(pValue);
Collection<SymbolicIdentifier> identifiersInConstraints =
SymbolicValues.getContainedSymbolicIdentifiers(pConstraints);
return containsAnyOf(identifiersInConstraints, identifiersInValue);
}
private boolean isSimpleSymbolicValue(final SymbolicValue pValue) {
return pValue instanceof SymbolicIdentifier || pValue instanceof ConstantSymbolicExpression
|| pValue instanceof Constraint;
}
private boolean isIndependentInValueState(
final SymbolicValue pValue,
final MemoryLocation pMemLoc,
final ValueAnalysisState pState
) {
ValueAnalysisState stateWithoutValue = ValueAnalysisState.copyOf(pState);
stateWithoutValue.forget(pMemLoc);
Collection<SymbolicIdentifier> identifiersInValue =
SymbolicValues.getContainedSymbolicIdentifiers(pValue);
Collection<SymbolicIdentifier> identifiersInState = getIdentifiersInState(pState);
return !containsAnyOf(identifiersInState, identifiersInValue);
}
private boolean containsAnyOf(
final Collection<SymbolicIdentifier> pContainer,
final Collection<SymbolicIdentifier> pSelection
) {
Collection<SymbolicIdentifier> smallerCollection;
Collection<SymbolicIdentifier> biggerCollection;
if (pContainer.size() <= pSelection.size()) {
smallerCollection = pContainer;
biggerCollection = pSelection;
} else {
smallerCollection = pSelection;
biggerCollection = pContainer;
}
for (SymbolicIdentifier i : smallerCollection) {
if (biggerCollection.contains(i)) {
return true;
}
}
return false;
}
private Collection<SymbolicIdentifier> getIdentifiersInState(final ValueAnalysisState pState) {
Collection<SymbolicIdentifier> ret = new HashSet<>();
for (Value v : pState.getConstantsMapView().values()) {
if (v instanceof SymbolicValue) {
ret.addAll(SymbolicValues.getContainedSymbolicIdentifiers((SymbolicValue) v));
}
}
return ret;
}
@Override
public void printStatistics(PrintStream out, Result result, ReachedSet reached) {
out.println("Total time for strengthening by ConstraintsCPA: " + totalTime);
out.println("Replaced symbolic expressions: " + replacedSymbolicExpressions);
}
@Nullable
@Override
public String getName() {
return ConstraintsStrengthenOperator.this.getClass().getSimpleName();
}
}
|
amichard/tfrs
|
frontend/src/admin/compliance_reporting/DefaultCarbonIntensityDetailContainer.js
|
/*
* Container component
* All data handling & manipulation should be handled here.
*/
import React, { Component } from 'react';
import { connect } from 'react-redux';
import PropTypes from 'prop-types';
import { Tab, Tabs } from 'react-bootstrap';
import { defaultCarbonIntensities } from '../../actions/defaultCarbonIntensities';
import Loading from '../../app/components/Loading';
import PastAndFutureValuesTable from './components/PastAndFutureValuesTable';
import CarbonIntensityDetails from './components/CarbonIntensityDetails';
import CREDIT_CALCULATIONS from '../../constants/routes/CreditCalculations';
class DefaultCarbonIntensityDetailContainer extends Component {
constructor (props) {
super(props);
this.state = {
};
}
componentDidMount () {
this.props.getDefaultCarbonIntensity(this.props.match.params.id);
}
render () {
const { item, isFetching, success } = this.props.defaultCarbonIntensity;
if (success && !isFetching && item) {
return (
<Tabs defaultActiveKey="details" id="citabs">
<Tab eventKey="details" title="Current">
<CarbonIntensityDetails
editUrl={CREDIT_CALCULATIONS.DEFAULT_CARBON_INTENSITIES_EDIT}
item={item}
loggedInUser={this.props.loggedInUser}
title="Default Carbon Intensity Details"
/>
</Tab>
<Tab eventKey="allValues" title="Past And Future">
<h1>Past and Future Values</h1>
<PastAndFutureValuesTable
items={item.allValues}
includeLimit
/>
</Tab>
</Tabs>
);
}
return <Loading />;
}
}
DefaultCarbonIntensityDetailContainer.defaultProps = {
};
DefaultCarbonIntensityDetailContainer.propTypes = {
defaultCarbonIntensity: PropTypes.shape({
isFetching: PropTypes.bool,
item: PropTypes.shape(),
success: PropTypes.bool
}).isRequired,
getDefaultCarbonIntensity: PropTypes.func.isRequired,
loggedInUser: PropTypes.shape().isRequired,
match: PropTypes.shape({
params: PropTypes.shape({
id: PropTypes.string.isRequired
}).isRequired
}).isRequired
};
const mapStateToProps = state => ({
defaultCarbonIntensity: {
isFetching: state.rootReducer.defaultCarbonIntensities.isGetting,
item: state.rootReducer.defaultCarbonIntensities.item,
success: state.rootReducer.defaultCarbonIntensities.success
},
loggedInUser: state.rootReducer.userRequest.loggedInUser
});
const mapDispatchToProps = {
getDefaultCarbonIntensity: defaultCarbonIntensities.get
};
export default connect(mapStateToProps, mapDispatchToProps)(DefaultCarbonIntensityDetailContainer);
|
PhamKien100398/react_client
|
client/src/service/user/question/questionAction.js
|
import * as QT from './questionType'
import axios from 'axios'
export const getExamQuestion = (id)=>{
return async dispatch =>{
const token = localStorage.getItem("jwtToken")
const AuthStr = 'Bearer ' + token;
dispatch(request_exam_question());
await axios.get("http://localhost:8082/api/exams/"+id+"/questions", {
headers : {
'Authorization': AuthStr
}
})
.then((res)=>{
dispatch(success_question(res.data));
})
.catch((error) =>{
dispatch(failse_question());
})
}
}
const request_exam_question = () =>{
return {
type : QT.REQUEST_EXAM_QUESTION
}
}
const success_question = (user) =>{
return {
type: QT.SUCCESS_QUESTION,
payload: user
};
}
const failse_question = () =>{
return {
type: QT.FALSE_QUESTION,
payload: []
};
}
|
learningtcc/dubbox
|
dubbo-solr/dubbo-solr-api/src/main/java/com/frank/search/transmitData/Request.java
|
package com.frank.search.transmitData;
import java.io.Serializable;
/**
*
* Created with IntelliJ IDEA.
* User: frank
* Date: 15-6-18
* Time: 下午7:14
* To change this template use File | Settings | File Templates.
*/
public interface Request<T> extends Cloneable, Serializable {
/**
* 获取 traceId
* @return
*/
String getTraceId();
/**
* 根据Class
* 获取具体的对象数据
*
* @param c
* @return
*/
public T getContent(Class<?> c);
/**
* 获取实际内容
* @return
*/
public T getContent();
/**
* 设置内容
* @param content
*/
public void setContent(T content);
/**
* 设置traceId
* @param traceId
*/
public void setTraceId(String traceId);
/**
* 设置资源
* @param source
*/
public void setSource(String source);
/**
* 获取资源
* @return
*/
public String getSource();
}
|
jiri-filipovic/KTT
|
source/compute_engine/opencl/opencl_buffer.h
|
#pragma once
#include <algorithm>
#include <cstring>
#include <memory>
#include <stdexcept>
#include <string>
#include <vector>
#include "CL/cl.h"
#include "opencl_event.h"
#include "opencl_utility.h"
#include "kernel_argument/kernel_argument.h"
#include "enum/argument_access_type.h"
#include "enum/argument_data_type.h"
#include "enum/argument_memory_location.h"
namespace ktt
{
class OpenCLBuffer
{
public:
explicit OpenCLBuffer(const cl_context context, KernelArgument& kernelArgument, const bool zeroCopy) :
context(context),
kernelArgumentId(kernelArgument.getId()),
bufferSize(kernelArgument.getDataSizeInBytes()),
elementSize(kernelArgument.getElementSizeInBytes()),
dataType(kernelArgument.getDataType()),
memoryLocation(kernelArgument.getMemoryLocation()),
accessType(kernelArgument.getAccessType()),
openclMemoryFlag(getOpenCLMemoryType(accessType)),
hostPointer(nullptr),
zeroCopy(zeroCopy)
{
if (memoryLocation == ArgumentMemoryLocation::Host)
{
if (!zeroCopy)
{
openclMemoryFlag = openclMemoryFlag | CL_MEM_ALLOC_HOST_PTR;
}
else
{
openclMemoryFlag = openclMemoryFlag | CL_MEM_USE_HOST_PTR;
hostPointer = kernelArgument.getData();
}
}
cl_int result;
buffer = clCreateBuffer(context, openclMemoryFlag, bufferSize, hostPointer, &result);
checkOpenCLError(result, "clCreateBuffer");
}
~OpenCLBuffer()
{
checkOpenCLError(clReleaseMemObject(buffer), "clReleaseMemObject");
}
void resize(cl_command_queue queue, const size_t newBufferSize, const bool preserveData)
{
if (zeroCopy)
{
throw std::runtime_error("Cannot resize buffer with CL_MEM_USE_HOST_PTR flag");
}
if (bufferSize == newBufferSize)
{
return;
}
if (!preserveData)
{
checkOpenCLError(clReleaseMemObject(buffer), "clReleaseMemObject");
cl_int result;
buffer = clCreateBuffer(context, openclMemoryFlag, newBufferSize, hostPointer, &result);
checkOpenCLError(result, "clCreateBuffer");
}
else
{
cl_mem newBuffer;
cl_int result;
auto event = std::make_unique<OpenCLEvent>(0, true);
newBuffer = clCreateBuffer(context, openclMemoryFlag, newBufferSize, hostPointer, &result);
checkOpenCLError(result, "clCreateBuffer");
result = clEnqueueCopyBuffer(queue, buffer, newBuffer, 0, 0, std::min(bufferSize, newBufferSize), 0, nullptr, event->getEvent());
checkOpenCLError(result, "clEnqueueCopyBuffer");
event->setReleaseFlag();
checkOpenCLError(clWaitForEvents(1, event->getEvent()), "clWaitForEvents");
checkOpenCLError(clReleaseMemObject(buffer), "clReleaseMemObject");
buffer = newBuffer;
}
bufferSize = newBufferSize;
}
void uploadData(cl_command_queue queue, const void* source, const size_t dataSize, cl_event* recordingEvent)
{
if (bufferSize < dataSize)
{
resize(queue, dataSize, false);
}
if (memoryLocation == ArgumentMemoryLocation::Device)
{
if (recordingEvent == nullptr)
{
cl_int result = clEnqueueWriteBuffer(queue, buffer, CL_TRUE, 0, dataSize, source, 0, nullptr, nullptr);
checkOpenCLError(result, "clEnqueueWriteBuffer");
}
else
{
cl_int result = clEnqueueWriteBuffer(queue, buffer, CL_FALSE, 0, dataSize, source, 0, nullptr, recordingEvent);
checkOpenCLError(result, "clEnqueueWriteBuffer");
}
}
else
{
// Asynchronous buffer operations on mapped memory are currently not supported
cl_int result;
void* destination = clEnqueueMapBuffer(queue, buffer, CL_TRUE, CL_MAP_WRITE, 0, dataSize, 0, nullptr, nullptr, &result);
checkOpenCLError(result, "clEnqueueMapBuffer");
std::memcpy(destination, source, dataSize);
checkOpenCLError(clEnqueueUnmapMemObject(queue, buffer, destination, 0, nullptr, recordingEvent), "clEnqueueUnmapMemObject");
}
}
void uploadData(cl_command_queue queue, const cl_mem source, const size_t dataSize, cl_event* recordingEvent)
{
if (bufferSize < dataSize)
{
resize(queue, dataSize, false);
}
if (recordingEvent == nullptr)
{
throw std::runtime_error("Recording event for buffer copying operation cannot be null");
}
cl_int result = clEnqueueCopyBuffer(queue, source, buffer, 0, 0, dataSize, 0, nullptr, recordingEvent);
checkOpenCLError(result, "clEnqueueCopyBuffer");
}
void downloadData(cl_command_queue queue, void* destination, const size_t dataSize, cl_event* recordingEvent) const
{
if (bufferSize < dataSize)
{
throw std::runtime_error("Size of data to download is larger than size of buffer");
}
if (memoryLocation == ArgumentMemoryLocation::Device)
{
if (recordingEvent == nullptr)
{
cl_int result = clEnqueueReadBuffer(queue, buffer, CL_TRUE, 0, dataSize, destination, 0, nullptr, nullptr);
checkOpenCLError(result, "clEnqueueReadBuffer");
}
else
{
cl_int result = clEnqueueReadBuffer(queue, buffer, CL_FALSE, 0, dataSize, destination, 0, nullptr, recordingEvent);
checkOpenCLError(result, "clEnqueueReadBuffer");
}
}
else
{
// Asynchronous buffer operations on mapped memory are currently not supported
cl_int result;
void* source = clEnqueueMapBuffer(queue, buffer, CL_TRUE, CL_MAP_READ, 0, dataSize, 0, nullptr, nullptr, &result);
checkOpenCLError(result, "clEnqueueMapBuffer");
std::memcpy(destination, source, dataSize);
checkOpenCLError(clEnqueueUnmapMemObject(queue, buffer, source, 0, nullptr, recordingEvent), "clEnqueueUnmapMemObject");
}
}
cl_context getContext() const
{
return context;
}
ArgumentId getKernelArgumentId() const
{
return kernelArgumentId;
}
size_t getBufferSize() const
{
return bufferSize;
}
size_t getElementSize() const
{
return elementSize;
}
ArgumentDataType getDataType() const
{
return dataType;
}
ArgumentMemoryLocation getMemoryLocation() const
{
return memoryLocation;
}
ArgumentAccessType getAccessType() const
{
return accessType;
}
cl_mem_flags getOpenclMemoryFlag() const
{
return openclMemoryFlag;
}
cl_mem getBuffer() const
{
return buffer;
}
private:
cl_context context;
ArgumentId kernelArgumentId;
size_t bufferSize;
size_t elementSize;
ArgumentDataType dataType;
ArgumentMemoryLocation memoryLocation;
ArgumentAccessType accessType;
cl_mem_flags openclMemoryFlag;
cl_mem buffer;
void* hostPointer;
bool zeroCopy;
};
} // namespace ktt
|
IIiys/zadig
|
pkg/microservice/reaper/core/service/reaper/jmeter.go
|
<gh_stars>0
/*
Copyright 2021 The KodeRover Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package reaper
import (
"encoding/csv"
"fmt"
"io/ioutil"
"os"
"path"
"path/filepath"
"sort"
"github.com/koderover/zadig/pkg/tool/log"
)
func JmeterTestResults(testResultFile, testResultPath, testUploadPath string) error {
var err error
if len(testResultPath) == 0 {
return nil
}
files, err := ioutil.ReadDir(testResultPath)
if err != nil || len(files) == 0 {
return fmt.Errorf("test result files not found in path %s", testResultPath)
}
// sort and process cvs files by modified time
sort.SliceStable(files, func(i, j int) bool {
return files[i].ModTime().Before(files[j].ModTime())
})
for _, file := range files {
if filepath.Ext(file.Name()) == ".csv" {
filePath := path.Join(testResultPath, file.Name())
log.Infof("name %s mod time: %v", file.Name(), file.ModTime())
csvFileReader, err := os.Open(filePath)
if err != nil {
log.Warningf("open file [%s] , error: %v", filePath, err)
continue
}
defer csvFileReader.Close()
csvReader := csv.NewReader(csvFileReader)
row, err := csvReader.Read()
if err != nil {
log.Warningf("Read file [%s] first row , error: %v", filePath, err)
continue
}
if len(row) != 11 {
log.Warningf("csv file type match error", filePath, err)
continue
}
csvFileWrite, err := os.Create(path.Join(testUploadPath, testResultFile))
if err != nil {
log.Warningf("write file [%s], error: %v", filePath, err)
continue
}
defer csvFileWrite.Close()
csvWriter := csv.NewWriter(csvFileWrite)
err = csvWriter.Write(row)
if err != nil {
log.Warningf("write file [%s] first row , error: %v", path.Join(testUploadPath, testResultFile), err)
continue
}
rows, err := csvReader.ReadAll()
if err != nil {
log.Warningf("Read file [%s], error: %v", filePath, err)
continue
}
err = csvWriter.WriteAll(rows)
if err != nil {
log.Warningf("write file [%s] all other row , error: %v", path.Join(testUploadPath, testResultFile), err)
continue
}
csvWriter.Flush()
break
}
}
log.Infof("perfermace test results files %s succeeded", testResultFile)
return nil
}
|
yassinedoghri/marvel-jarvig
|
src/components/FlexSection.js
|
<reponame>yassinedoghri/marvel-jarvig
import styled from "styled-components";
import media from "utils/media";
const FlexSection = styled.section`
display: flex;
flex-direction: column;
padding: 0 ${props => props.theme.spaces.lg}em;
max-width: 90em;
${media.desktop`
padding: 0 ${props => props.theme.spaces.xl}em;
`};
`;
export default FlexSection;
|
Fresher001/Competitive-Programming-2
|
Infoarena/Concurs.cpp
|
<gh_stars>10-100
#include <bits/stdc++.h>
using namespace std;
const int Nmax = 32e3 + 1;
const int Mmax = 5e5 + 1;
vector <int> G[Nmax];
int father[Nmax], size[Nmax], depth[Nmax], vis[Nmax], value[Nmax];
int start_node[Nmax], pos_in_path[Nmax], length_path[Nmax], path[Nmax];
int N, M, NumberPaths;
void DFS( int node )
{
int hson = 0;
size[node] = 1;
for ( auto x: G[node] )
{
if ( father[x] == 0 )
{
father[x] = node;
depth[x] = depth[node] + 1;
DFS( x );
size[node] += size[x];
if ( size[x] > size[hson] )
hson = x;
}
}
if ( hson == 0 )
path[node] = NumberPaths++;
else
path[node] = path[hson];
pos_in_path[node] = length_path[ path[node] ]++;
}
void build_hpd()
{
int root = 0;
for ( int i = 1; i <= N; ++i )
if ( !vis[i] )
root = i;
father[root] = root;
depth[root] = 0;
DFS( root );
for ( int i = 1; i <= N; ++i )
{
pos_in_path[i] = length_path[ path[i] ] - pos_in_path[i] - 1;
if ( pos_in_path[i] == 0 )
start_node[ path[i] ] = i;
}
}
int lca( int x, int y )
{
while ( path[x] != path[y] )
{
if ( depth[ start_node[ path[x] ] ] < depth[ start_node[ path[y] ] ] )
y = father[ start_node[ path[y] ] ];
else
x = father[ start_node[ path[x] ] ];
}
return pos_in_path[x] < pos_in_path[y] ? x : y;
}
int main()
{
ifstream in("concurs.in");
ofstream out("concurs.out");
in.sync_with_stdio( false );
in >> N >> M;
for ( int i = 1; i <= N; ++i )
in >> value[i];
for ( int i = 1, a, b; i < N; ++i )
{
in >> a >> b;
G[a].push_back( b );
vis[b] = 1;
}
build_hpd();
int minx = 1e9, miny = 1e9, ans = -1e9;
for ( int i = 1, x, y; i <= M; ++i )
{
in >> x >> y;
int L = lca( x, y );
if ( value[L] > ans )
{
ans = value[L];
minx = x;
miny = y;
}
else
{
if ( value[L] == ans )
{
if ( x < minx )
{
minx = x;
miny = y;
}
else
{
if ( x == minx )
{
if ( y < miny )
miny = y;
}
}
}
}
}
out << ans << " " << minx << " " << miny << "\n";
return 0;
}
|
jforge/vaadin
|
uitest/src/main/java/com/vaadin/tests/layouts/gridlayout/GridLayoutWidthChange.java
|
package com.vaadin.tests.layouts.gridlayout;
import com.vaadin.server.VaadinRequest;
import com.vaadin.tests.components.AbstractReindeerTestUI;
import com.vaadin.ui.Button;
import com.vaadin.ui.CustomComponent;
import com.vaadin.ui.GridLayout;
import com.vaadin.ui.NativeButton;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.v7.ui.TextField;
public class GridLayoutWidthChange extends AbstractReindeerTestUI {
private GridLayout generateLayout() {
VerticalLayout fields1 = new VerticalLayout();
fields1.setMargin(false);
fields1.setSpacing(false);
NativeButton nb = new NativeButton("A button");
nb.setHeight("300px");
fields1.addComponent(nb);
VerticalLayout fields3 = new VerticalLayout();
fields3.setMargin(false);
fields3.setSpacing(false);
fields3.addComponent(new TextField("field14"));
NativeButton b = new NativeButton("A big button");
b.setWidth("200px");
b.setHeight("200px");
GridLayout layout = new GridLayout(3, 2);
layout.setHideEmptyRowsAndColumns(true);
layout.setWidth("100%");
layout.addComponent(fields1, 0, 0, 0, 1);
layout.addComponent(b, 2, 1);
return layout;
}
@Override
protected void setup(VaadinRequest request) {
final GridLayout layout1 = generateLayout();
final CustomComponent cc = new CustomComponent(layout1);
cc.setWidth("500px");
addComponent(cc);
Button testButton = new Button("Reduce GridLayout parent width",
event -> cc.setWidth((cc.getWidth() - 100) + "px"));
addComponent(testButton);
}
@Override
protected String getTestDescription() {
return "A 100% wide GridLayout is wrapped inside a CustomComponent. When the width of the CustomComponent is reduced, the size of the GridLayout should be reduced accordingly. The Buttons should stay in place vertically and just move closer to each other horizontally.";
}
@Override
protected Integer getTicketNumber() {
// TODO Auto-generated method stub
return null;
}
}
|
Masmayer/nominal
|
client/src/main/java/common/agreement/Agreement.java
|
/*
Nominal Application
Agreement
*/
package common.agreement;
import common.NominalMaster;
import common.NominalObject;
import java.sql.Timestamp;
import java.util.ArrayList;
public class Agreement extends NominalObject {
// IDENTIFIER
protected int id;
protected Timestamp lastUpdated;
protected String name;
// QUOTATION GROUPS
protected ArrayList<Quotation> quotations;
// CATEGORY GROUPS
protected ArrayList<Category> categories;
// SALARIES
protected ArrayList<Salary> salaries;
// ANTIQUITY
protected ArrayList<Antiquity> antiquities;
public Agreement() {
}
/**
*
* @param id agreemt identificator
* @param lastUpdated date of last update
* @param name agreement name
* @param quotations agreement quotation
* @param categories agreement categories
* @param salaries agreement salaries
* @param antiquities agreement antiquities
*/
public Agreement(int id, Timestamp lastUpdated, String name, ArrayList<Quotation> quotations, ArrayList<Category> categories, ArrayList<Salary> salaries, ArrayList<Antiquity> antiquities) {
this.id = id;
this.lastUpdated = lastUpdated;
this.name = name;
this.quotations = quotations;
this.categories = categories;
this.salaries = salaries;
this.antiquities = antiquities;
}
// GETTERS
@Override
public int getId() {
return id;
}
public Timestamp getLastUpdated() {
return lastUpdated;
}
public String getName() {
return name;
}
public ArrayList<Quotation> getQuotations() {
return quotations;
}
public ArrayList<Category> getCategories() {
return categories;
}
public ArrayList<Salary> getSalaries() {
return salaries;
}
public ArrayList<Antiquity> getAntiquities() {
return antiquities;
}
}
|
Wadjetz/innovalys-social-network
|
client/user/UsersApi.js
|
<filename>client/user/UsersApi.js
import Qajax from 'qajax';
import { BASE_URL } from '../conf';
/**
* Users Service who use server API for users
*/
class UsersApi {
/**
* Constructor
* @param {string} url Api Base Url
*/
constructor(url) {
this.url = url;
}
/**
* Get all users
* @return {promise} List of users
*/
getAll() {
return Qajax({
url: `${this.url}/users`,
method: 'GET'
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Get me
* @return {promise} User object
*/
me() {
return Qajax({
url: `${this.url}/users/me`,
method: 'GET'
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Get user profil
* @param {number} id User id
* @return {promise} User object
*/
getProfil(id) {
return Qajax({
url: `${this.url}/users/profil/${id}`,
method: 'GET'
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Get user roles
* @return {promise} List of user roles
*/
roles() {
return Qajax({
url: `${this.url}/users/roles`,
method: 'GET'
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Create new user
* @param {User} user User object
* @return {promise} Result
*/
create(user) {
return Qajax({
url: `${this.url}/users/signup`,
method: 'POST',
data: user
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Login
* @param {Login} user Login object
* @return {promise} Result
*/
login(user) {
return Qajax({
url: `${this.url}/users/login`,
method: 'POST',
data: user
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Get all users
* @return {promise} List of users
*/
getAllUsers() {
return Qajax({
url: `${this.url}/users`,
method: 'GET'
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Change password
* @param {string} currentPassword Current password
* @param {string} newPassword News password
* @return {promise} Result
*/
changePassword(currentPassword, newPassword) {
return Qajax({
url: `${this.url}/users/password`,
method: 'PUT',
data: {
current_password: <PASSWORD>,
new_password: <PASSWORD>
}
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Get user by id
* @param {number} id User id
* @return {promise} User
*/
getOneById(id) {
return Qajax({
url: `${this.url}/users/${id}`,
method: 'GET'
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Delete user
* @param {number} id User id
* @return {promise} Result
*/
delete(id) {
return Qajax({
url: `${this.url}/users/${id}`,
method: 'DELETE'
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
/**
* Update user
* @param {number} id User id
* @param {User} user User object
* @return {promise} Updated User
*/
update(id, user) {
return Qajax({
url: `${this.url}/users/${id}`,
method: 'PUT',
data: user
})
.then(Qajax.filterSuccess)
.then(Qajax.toJSON);
}
}
export default new UsersApi(BASE_URL);
|
ishine/neural_sp
|
neural_sp/models/modules/mocha/mocha_test.py
|
<gh_stars>100-1000
# Copyright 2021 Kyoto University (<NAME>)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""Chunkwise attention in MoChA at test time."""
import logging
import numpy as np
import torch
logger = logging.getLogger(__name__)
def hard_chunkwise_attention(alpha, u, mask, chunk_size, H_ca,
sharpening_factor, share_chunkwise_attention):
"""Chunkwise attention in MoChA at test time.
Args:
alpha (FloatTensor): `[B, H_ma, qlen, klen]`
u (FloatTensor): `[B, (H_ma*)H_ca, qlen, klen]`
mask (ByteTensor): `[B, qlen, klen]`
chunk_size (int): window size for chunkwise attention
H_ca (int): number of chunkwise attention heads
sharpening_factor (float): sharping factor for beta calculation
share_chunkwise_attention (int): share CA heads among MA heads
Returns:
beta (FloatTensor): `[B, H_ma * H_ca, qlen, klen]`
"""
bs, H_ma, qlen, klen = alpha.size()
assert (u.size(2) == qlen) and (u.size(3) == klen), (u.size(), alpha.size())
alpha = alpha.unsqueeze(2) # `[B, H_ma, 1, qlen, klen]`
u = u.unsqueeze(1) # `[B, 1, (H_ma*)H_ca, qlen, klen]`
if H_ca > 1:
alpha = alpha.repeat([1, 1, H_ca, 1, 1])
if H_ma > 1:
if share_chunkwise_attention:
u = u.repeat([1, H_ma, 1, 1, 1])
else:
u = u.view(bs, H_ma, H_ca, qlen, klen)
mask = alpha.clone().byte() # `[B, H_ma, H_ca, qlen, klen]`
for b in range(bs):
for h in range(H_ma):
if alpha[b, h, 0, 0].sum() > 0:
boundary = alpha[b, h, 0, 0].nonzero()[:, -1].min().item()
if chunk_size == -1:
# infinite lookback attention
mask[b, h, :, 0, 0:boundary + 1] = 1
else:
mask[b, h, :, 0, max(0, boundary - chunk_size + 1):boundary + 1] = 1
NEG_INF = float(np.finfo(torch.tensor(0, dtype=u.dtype).numpy().dtype).min)
u = u.masked_fill(mask == 0, NEG_INF)
beta = torch.softmax(u, dim=-1)
return beta.view(bs, -1, qlen, klen)
|
reels-research/iOS-Private-Frameworks
|
ITMLKit.framework/IKCSSDeclarationList.h
|
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/ITMLKit.framework/ITMLKit
*/
@interface IKCSSDeclarationList : NSObject <NSCopying> {
IKMutableArray * _declarations;
struct _NSRange {
unsigned long long location;
unsigned long long length;
} _range;
}
@property (nonatomic, readonly) IKMutableArray *declarations;
@property (nonatomic) struct _NSRange { unsigned long long x1; unsigned long long x2; } range;
- (void).cxx_destruct;
- (void)addDeclaration:(id)arg1;
- (id)copyWithZone:(struct _NSZone { }*)arg1;
- (unsigned long long)count;
- (id)declarationAtIndex:(unsigned long long)arg1;
- (id)declarations;
- (id)description;
- (id)firstDeclaration;
- (id)init;
- (id)lastDeclaration;
- (struct _NSRange { unsigned long long x1; unsigned long long x2; })range;
- (void)setRange:(struct _NSRange { unsigned long long x1; unsigned long long x2; })arg1;
@end
|
phunt/flume
|
src/java/com/cloudera/flume/master/MemoryBackedConfigStore.java
|
<reponame>phunt/flume
/**
* Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.flume.master;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import com.cloudera.flume.conf.thrift.FlumeConfigData;
import com.cloudera.util.Clock;
import com.google.common.base.Preconditions;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
/**
* Simple config store that doesn't persist or do anything fancy.
*
* This is is not thread safe.
*/
public class MemoryBackedConfigStore extends ConfigStore {
final Map<String, FlumeConfigData> cfgs = new HashMap<String, FlumeConfigData>();
@Override
public FlumeConfigData getConfig(String host) {
if (cfgs.containsKey(host)) {
return cfgs.get(host);
}
return null;
}
@Override
public void setConfig(String host, String flowid, String source, String sink)
throws IOException {
Preconditions.checkArgument(host != null,
"Attempted to set config but missing host name!");
Preconditions.checkArgument(flowid != null, "Attempted to set config "
+ host + " but missing flowid!");
Preconditions.checkArgument(source != null, "Attempted to set config "
+ host + " but missing source!");
Preconditions.checkArgument(sink != null, "Attempted to set config " + host
+ " but missing sink");
long time = Clock.unixTime();
cfgs.put(host, new FlumeConfigData(time, source, sink, time, time, flowid));
}
@Override
public Map<String, FlumeConfigData> getConfigs() {
return Collections.unmodifiableMap(cfgs);
}
// physnode to logicalNode
final ListMultimap<String, String> nodeMap = ArrayListMultimap
.<String, String> create();
public void addLogicalNode(String physNode, String logicNode) {
if (nodeMap.containsEntry(physNode, logicNode)) {
// already present.
return;
}
nodeMap.put(physNode, logicNode);
}
public List<String> getLogicalNodes(String physNode) {
return Collections.unmodifiableList(nodeMap.get(physNode));
}
@Override
public Multimap<String, String> getLogicalNodeMap() {
return Multimaps.unmodifiableListMultimap(nodeMap);
}
@Override
public void bulkSetConfig(Map<String, FlumeConfigData> configs)
throws IOException {
for (Entry<String, FlumeConfigData> e : configs.entrySet()) {
FlumeConfigData f = e.getValue();
setConfig(e.getKey(), f.getFlowID(), f.getSourceConfig(), f
.getSinkConfig());
}
}
/**
* Removes the mapping of physNode to a particular logicalNode
*/
@Override
public void removeLogicalNode(String logicNode) {
cfgs.remove(logicNode);
}
/**
* Remove a logical node from the logical node data flow mapping.
*/
@Override
public void unmapLogicalNode(String physNode, String logicNode) {
nodeMap.remove(physNode, logicNode);
}
@Override
public void init() throws IOException, InterruptedException {
// Nothing to do here
}
@Override
public void shutdown() throws IOException {
// Nothing to do here
}
/**
* {@inheritDoc}
*/
@Override
public void unmapAllLogicalNodes() {
// this method should be called relatively rarely.
ListMultimap<String, String> clone = ArrayListMultimap.create(nodeMap);
for (Entry<String, String> e : clone.entries()) {
// reject removing a logical node named the same thing as
// the physical node.
if (e.getKey().equals(e.getValue())) {
continue;
}
unmapLogicalNode(e.getKey(), e.getValue());
}
}
}
|
thinkful-ei-narwhal/penny-thoughts-client
|
src/components/ErrorPage/ErrorPage.js
|
<filename>src/components/ErrorPage/ErrorPage.js<gh_stars>1-10
import React, { Component } from 'react';
import {Link} from 'react-router-dom';
export default class ErrorPage extends Component {
state = {error: null};
static getDerivedStateFormError(error) {
return (error)
}
render() {
if (this.state.error) {
return (
<div className="not-found-container">
<p className="error-404">App Error</p>
<div className="penny-not-found"></div>
<p className="error-message">Oops! Something went wrong with the application. Click the button below to go back while I figure out what's wrong.</p>
<Link to='/home' className="go-back-btn">Go Back</Link>
</div>
)
}
//otherwise render the children
return this.props.children
}
}
|
wangsenyuan/learn-go
|
src/codechef/easy/section10/section10/beausub/solution_test.go
|
package main
import "testing"
func runSample(t *testing.T, n int, k int, A []int, expect int) {
res := solve(n, k, A)
if res != expect {
t.Errorf("Sample expect %d, but got %d", expect, res)
}
}
func TestSample1(t *testing.T) {
n, k := 4, 1
A := []int{1, 1, 2, 3}
expect := 3
runSample(t, n, k, A, expect)
}
func TestSample2(t *testing.T) {
n, k := 9, 2
A := []int{1, 2, 3, 2, 4, 5, 6, 7, 5}
expect := 5
runSample(t, n, k, A, expect)
}
func TestSample3(t *testing.T) {
n, k := 5, 5
A := []int{1, 1, 1, 1, 1}
expect := 5
runSample(t, n, k, A, expect)
}
func TestSample4(t *testing.T) {
n, k := 10, 1
A := []int{1, 2, 1, 2, 1, 2, 1, 2, 1, 2}
expect := 6
runSample(t, n, k, A, expect)
}
|
zhoujiagen/giant-data-analysis
|
data-management-infrastructure/infrastructure-apache-zookeeper/src/main/java/com/spike/giantdataanalysis/zookeeper/support/ZooKeepers.java
|
package com.spike.giantdataanalysis.zookeeper.support;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.ACL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
public class ZooKeepers {
private static final Logger LOG = LoggerFactory.getLogger(ZooKeepers.class);
private static final File QUORUM_WORK_DIR =
new File("src/main/resources/zookeeper-3nodes-quorum");
/** 根路径 */
public static String ROOT_PATH = "/";
private static String NEWLINE = System.lineSeparator();
public static void main(String[] args) throws Exception {
START_QUORUM();
// STOP_QUORUM();
}
// ======================================== properties
// connection strings
public static final String DEFAULT_CONN_STRING = "127.0.0.1:2181";
public static final String DEFAULT_QUORUM_CONN_STRING =
"127.0.0.1:2181,127.0.0.1:2182,127.0.0.1:2183";
// acls
public static final List<ACL> DEFAULT_ACLS = ZooDefs.Ids.OPEN_ACL_UNSAFE;
// znode modes
public static final CreateMode NODE_MODE_PERSISTENT = CreateMode.PERSISTENT;
public static final CreateMode NODE_MODE_EPHEMERAL = CreateMode.EPHEMERAL;
public static final CreateMode NODE_MODE_PERSISTENT_SEQUENTIAL = CreateMode.PERSISTENT_SEQUENTIAL;
public static final CreateMode NODE_MODE_EPHEMERAL_SEQUENTIAL = CreateMode.EPHEMERAL_SEQUENTIAL;
// watcher
public static final Watcher DEFAULT_WATCHER = new Watcher() {
@Override
public void process(WatchedEvent event) {
LOG.info("default watcher watched event: " + event);
}
};
// znode paths
public static final String MASTER_ZNODE_PATH = "/master";
public static final String WORKER_PARENT_ZNODE_PATH = "/workers";
public static final String WORKER_PARENT_ZNODE_PATH_PREFIX = "/workers/";
public static final String WORKER_ZNODE_PATH_PREFIX = "/workers/worker-";
public static final String TASK_PARENT_ZNODE_PATH = "/tasks";
public static final String TASK_PARENT_ZNODE_PATH_PREFIX = "/tasks/";
public static final String TASK_ZNODE_PATH_PREFIX = "/tasks/task-";
public static final String STATUS_PARENT_ZNODE_PATH = "/status";
public static final String STATUS_PARENT_ZNODE_PATH_PREFIX = "/status/";
public static final String STATUS_TASK_ZNODE_PATH_PREFIX = "/status/task-";
public static final String ASSIGN_PARENT_ZNODE_PATH = "/assign";
public static final String ASSIGN_PARENT_ZNODE_PATH_PREFIX = "/assign/";
public static final String ASSIGN_WORKER_ZNONE_PATH_PREFIX = "/assign/worker-";
// ======================================== methods
/**
* 启动三节点集群
* @throws Exception
*/
public static void START_QUORUM() throws Exception {
Process process =
Runtime.getRuntime().exec(new String[] { "./start_all.sh" }, new String[] {},
QUORUM_WORK_DIR);
process.waitFor();
System.out.println(process.exitValue());
}
/**
* 停止三节点集群
* @throws Exception
*/
public static void STOP_QUORUM() throws Exception {
Process process =
Runtime.getRuntime().exec(new String[] { "./kill_all.sh" }, new String[] {},
QUORUM_WORK_DIR);
process.waitFor();
System.out.println(process.exitValue());
}
/**
* 展示目录树
* @param connectionString ZooKeeper连接串
* @param root 根目录
* @param depth 深度
*/
public static void listTree(String connectionString, String root, int depth) {
Preconditions.checkNotNull(connectionString);
Preconditions.checkNotNull(root);
Preconditions.checkState(depth > 0);
if (!root.startsWith(ROOT_PATH)) {
root = ROOT_PATH + root;
}
StringBuilder sb = new StringBuilder();
sb.append(root + NEWLINE);
try {
// 1 init handle
ZooKeeper zooKeeper = new ZooKeeper(connectionString, 15000, DEFAULT_WATCHER);
// 2 do process
List<String> rootChildren = zooKeeper.getChildren(root, false);
if (CollectionUtils.isNotEmpty(rootChildren)) {
for (String rootChild : rootChildren) {
sb.append(doListTree(zooKeeper, preAppendPath(root, rootChild), depth - 1));
}
}
// System.out.println(sb.toString());
String[] records = sb.toString().split(NEWLINE);
List<String> recordList = Arrays.asList(records);
Collections.sort(recordList);
for (String record : recordList) {
System.out.println(record);
}
// 3 close handle
zooKeeper.close();
} catch (Exception e) {
LOG.error("查看树目录结构失败", e);
}
}
private static String doListTree(ZooKeeper zooKeeper, String root, int depth)
throws KeeperException, InterruptedException {
StringBuilder sb = new StringBuilder();
if (depth == 0) {
sb.append(root + NEWLINE);
} else {
List<String> rootChildren = zooKeeper.getChildren(root, false);
if (CollectionUtils.isNotEmpty(rootChildren)) {
sb.append(root + NEWLINE);
for (String rootChild : rootChildren) {
sb.append(doListTree(zooKeeper, preAppendPath(root, rootChild), depth - 1));
}
} else {
sb.append(root + NEWLINE);
}
}
return sb.toString();
}
private static String preAppendPath(String root, String child) {
if (StringUtils.isBlank(root) || StringUtils.isBlank(child)) {
return null;
}
if (ROOT_PATH.equals(root)) {
return root + child;
} else {
return root + ROOT_PATH + child;
}
}
// ======================================== classes
}
|
Andreas237/AndroidPolicyAutomation
|
ExtractedJars/Ibotta_com.ibotta.android/javafiles/com/google/android/gms/auth/account/WorkAccount.java
|
<reponame>Andreas237/AndroidPolicyAutomation
// Decompiled by Jad v1.5.8g. Copyright 2001 <NAME>.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3) annotate safe
package com.google.android.gms.auth.account;
import android.app.Activity;
import android.content.Context;
import com.google.android.gms.common.api.Api;
import com.google.android.gms.internal.auth.zzk;
// Referenced classes of package com.google.android.gms.auth.account:
// zzi, WorkAccountClient, WorkAccountApi
public class WorkAccount
{
private WorkAccount()
{
// 0 0:aload_0
// 1 1:invokespecial #46 <Method void Object()>
// 2 4:return
}
public static WorkAccountClient getClient(Activity activity)
{
return new WorkAccountClient(activity);
// 0 0:new #51 <Class WorkAccountClient>
// 1 3:dup
// 2 4:aload_0
// 3 5:invokespecial #54 <Method void WorkAccountClient(Activity)>
// 4 8:areturn
}
public static WorkAccountClient getClient(Context context)
{
return new WorkAccountClient(context);
// 0 0:new #51 <Class WorkAccountClient>
// 1 3:dup
// 2 4:aload_0
// 3 5:invokespecial #59 <Method void WorkAccountClient(Context)>
// 4 8:areturn
}
public static final Api API;
private static final com.google.android.gms.common.api.Api.AbstractClientBuilder CLIENT_BUILDER;
private static final com.google.android.gms.common.api.Api.ClientKey CLIENT_KEY;
public static final WorkAccountApi WorkAccountApi = new zzk();
static
{
CLIENT_KEY = new com.google.android.gms.common.api.Api.ClientKey();
// 0 0:new #20 <Class com.google.android.gms.common.api.Api$ClientKey>
// 1 3:dup
// 2 4:invokespecial #23 <Method void com.google.android.gms.common.api.Api$ClientKey()>
// 3 7:putstatic #25 <Field com.google.android.gms.common.api.Api$ClientKey CLIENT_KEY>
CLIENT_BUILDER = ((com.google.android.gms.common.api.Api.AbstractClientBuilder) (new zzi()));
// 4 10:new #27 <Class zzi>
// 5 13:dup
// 6 14:invokespecial #28 <Method void zzi()>
// 7 17:putstatic #30 <Field com.google.android.gms.common.api.Api$AbstractClientBuilder CLIENT_BUILDER>
API = new Api("WorkAccount.API", CLIENT_BUILDER, CLIENT_KEY);
// 8 20:new #32 <Class Api>
// 9 23:dup
// 10 24:ldc1 #34 <String "WorkAccount.API">
// 11 26:getstatic #30 <Field com.google.android.gms.common.api.Api$AbstractClientBuilder CLIENT_BUILDER>
// 12 29:getstatic #25 <Field com.google.android.gms.common.api.Api$ClientKey CLIENT_KEY>
// 13 32:invokespecial #37 <Method void Api(String, com.google.android.gms.common.api.Api$AbstractClientBuilder, com.google.android.gms.common.api.Api$ClientKey)>
// 14 35:putstatic #39 <Field Api API>
// 15 38:new #41 <Class zzk>
// 16 41:dup
// 17 42:invokespecial #42 <Method void zzk()>
// 18 45:putstatic #44 <Field WorkAccountApi WorkAccountApi>
//* 19 48:return
}
}
|
cwi-dis/igor
|
igor/std-plugins/philips/scripts/philips.py
|
<gh_stars>1-10
#!/usr/bin/env python3
import socket
import struct
import select
import json
import urllib.request, urllib.parse, urllib.error
import sys
DEBUG=False
ORDER = [
('192', '168', '1'),
('10', '0', '1'),
('10', '0', '2')
]
JOINTSPACE_PORT=1925
VOODOO_PORT=2323
VOODOO_VERSION=0x03010401
VPMT_DISCOVER=1
VOODOO_DISCOVER = struct.pack('<l28xll16s96s96s96s', VOODOO_VERSION, VPMT_DISCOVER, 0, '1234567890123456', 'Python Control', 'Jack', 'Philips.py')
class JointSpaceRemote:
def __init__(self, ipaddr=None):
self.tv = None
def connect(self):
while not self.tv:
self.tv = self.findTV()
if self.tv:
break
if DEBUG: print("TV not found, is it turned on?'")
return False
return True
def findTV(self, ipaddr=None):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.bind(('', VOODOO_PORT))
if ipaddr:
sock.sendto(VOODOO_DISCOVER, (ipaddr, VOODOO_PORT))
else:
sock.sendto(VOODOO_DISCOVER, ('<broadcast>', VOODOO_PORT))
while True:
result = select.select([sock], [], [], 5)
if sock in result[0]:
msg, sender = sock.recvfrom(2000)
if DEBUG: print('Got message from', sender[0])
myHostName = socket.gethostname()
if not '.' in myHostName:
myHostName = myHostName + '.local'
if not sender[0] in socket.gethostbyname_ex(myHostName)[2]:
# It is not our own message. It must be the Philips TV.
return sender[0]
else:
break
return None
def getData(self, path):
assert self.tv
url = 'http://%s:1925/1/%s' % (self.tv, path)
if DEBUG: print('GET', url)
data = urllib.request.urlopen(url).read()
##print 'RAW', data
data = json.loads(data)
##print 'DECODED', data
return data
def putData(self, path, data):
assert self.tv
url = 'http://%s:1925/1/%s' % (self.tv, path)
data = json.dumps(data)
if DEBUG: print('POST %s DATA %s' % (url, data))
data = urllib.request.urlopen(url, data).read()
if data:
if DEBUG: print('PUTDATA RETURNED', data)
def curWatching(self):
assert self.tv
data = self.getData('sources/current')
source = data['id']
if source == 'tv':
chanID = self.getData('channels/current')['id']
chanInfo = self.getData('channels/%s' % chanID)
name = chanInfo['name']
else:
names = self.getData('sources')
name = names[source]['name']
return source, name
def cmd_sources(self):
"""List available input sources"""
assert self.tv
data = self.getData('sources')
for source, descr in list(data.items()):
print('%s\t%s' % (source, descr['name']))
def cmd_channels(self):
"""List available TV channels"""
assert self.tv
data = self.getData('channels')
all = []
for fingerprint, descr in list(data.items()):
all.append((int(descr['preset']), descr['name']))
all.sort()
for preset, name in all:
print('%s\t%s' % (preset, name))
def cmd_source(self, source=None):
"""Set to the given input source (or print current source)"""
assert self.tv
if source:
self.putData('sources/current', {'id' : source })
else:
data = self.getData('sources/current')
print(data['id'])
def cmd_channel(self, channel=None):
"""Set to the given TV channel, by name, number or ID (or list current channel)"""
assert self.tv
if channel:
data = self.getData('channels')
for chID, chDescr in list(data.items()):
if chID == channel or chDescr['preset'] == channel or chDescr['name'] == channel:
self.putData('channels/current', { 'id' : chID })
self.putData('sources/current', {'id' : 'tv' })
return
print('No such channel: %s' % channel, file=sys.stderr)
else:
data = self.getData('channels/current')
chID = data['id']
data = self.getData('channels')
print('%s\t%s' % (data[chID]['preset'], data[chID]['name']))
def cmd_volume(self, volume=None):
"""Change volume on the TV"""
assert self.tv
if volume is None:
data = self.getData('audio/volume')
muted = ' (muted)' if data['muted'] else ''
print('%d%s' % (data['current'], muted))
else:
volume = int(volume)
self.putData('audio/volume', { 'muted' : False, 'current' : volume })
def cmd_json(self, data=None):
"""Return all data as a JSON object"""
if data is None:
data = {}
volumeData = self.getData('audio/volume')
data['volume'] = volumeData['current']
data['muted'] = volumeData['muted']
data['source'] = self.getData('sources/current')['id']
data['power'] = True
data['ip-address'] = self.tv
data['url'] = 'http://%s:1925/1/' % (self.tv)
else:
jData = json.loads(data)
assert 0
print(json.dumps(data))
def cmd_help(self):
"""List available commands"""
for name in dir(self):
if name[:4] == 'cmd_':
method = getattr(self, name)
doc = method.__doc__
print('%s\t%s' % (name[4:], doc))
def main():
if len(sys.argv) > 1 and sys.argv[1] == '-d':
global DEBUG
DEBUG=True
del sys.argv[1]
tv = JointSpaceRemote()
if not tv.connect():
if len(sys.argv) == 2 and sys.argv[1] == 'json':
print('{"power":false}')
sys.exit(0)
print("TV not found, is it turned on?", file=sys.stderr)
sys.exit(1)
if len(sys.argv) <= 1:
print(tv.curWatching())
else:
cmdName = 'cmd_' + sys.argv[1]
if not hasattr(tv, cmdName):
print('Unknown command: %s. Use help for help' % sys.argv[1], file=sys.stderr)
sys.exit(2)
cmd = getattr(tv, cmdName)
cmd(*sys.argv[2:])
if __name__ == '__main__':
main()
|
cffbots/ESMValTool
|
esmvaltool/diag_scripts/radiation_budget/seasonal_radiation_budget.py
|
"""Write the global climatological seasonal radiation budget to a text file."""
import csv
import logging
import os
import iris
from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic
SEASONS = {0: "djf", 1: "mam", 2: "jja", 3: "son"}
def organise_seasonal_data(model_data):
"""Return the seasonal data from the cubes.
Parameters
----------
model_data : :class:`iris.cube.CubeList`
The cubes containing seasonal data.
Returns
-------
list of lists
The seasonal data in the form ``[[<long_name + season>, value], ...]``.
"""
seasonal_data = []
for cube in model_data:
long_name = cube.long_name
for season in cube.slices_over("season_number"):
season_name = SEASONS[season.coord("season_number").points[0]]
value = season.data
seasonal_data.append([f"{long_name} {season_name}", str(value)])
average_value = cube.data.mean()
seasonal_data.append([f'{long_name} {"ann"}', str(average_value)])
return seasonal_data
def write_seasonal_data_output(output_dir, model_dataset, seasonal_data):
"""Write seasonal data to CSV file.
The CSV file will have the name ``<model_dataset>_metrics.csv`` and can be
used for the normalised metric assessment plot.
Parameters
----------
output_dir : string
The full path to the directory in which the CSV file will be written.
model_dataset : string
The model name.
seasonal_data : list of lists
The seasonal data to write.
"""
file_name = f"{model_dataset}_metrics.csv"
file_path = os.path.join(output_dir, file_name)
with open(file_path, "w", newline="") as csvfile:
csv_writer = csv.writer(csvfile)
for line in seasonal_data:
csv_writer.writerow(line)
def main(config):
"""Seasonal radiation budget comparison for models defined in the
radiation_budget recipe file.
Parameters
----------
config : dict
The ESMValTool configuration.
"""
logger = logging.getLogger(__name__)
input_data = config["input_data"]
datasets = group_metadata(input_data.values(), "dataset")
for model_dataset, group in datasets.items():
# 'model_dataset' is the name of the model dataset.
# 'group' is a list of dictionaries containing metadata.
logger.info("Processing data for %s", model_dataset)
filenames = [item["filename"] for item in group]
model_data = iris.load(filenames)
seasonal_data = organise_seasonal_data(model_data)
write_seasonal_data_output(config["work_dir"], model_dataset,
seasonal_data)
if __name__ == "__main__":
with run_diagnostic() as CONFIG:
main(CONFIG)
|
CavalryXD/APN6_Repo
|
net-next/drivers/net/dsa/sja1105/sja1105_spi.c
|
// SPDX-License-Identifier: BSD-3-Clause
/* Copyright (c) 2016-2018, NXP Semiconductors
* Copyright (c) 2018, Sensor-Technik Wiedemann GmbH
* Copyright (c) 2018-2019, <NAME> <<EMAIL>>
*/
#include <linux/spi/spi.h>
#include <linux/packing.h>
#include "sja1105.h"
#define SJA1105_SIZE_RESET_CMD 4
#define SJA1105_SIZE_SPI_MSG_HEADER 4
#define SJA1105_SIZE_SPI_MSG_MAXLEN (64 * 4)
struct sja1105_chunk {
u8 *buf;
size_t len;
u64 reg_addr;
};
static void
sja1105_spi_message_pack(void *buf, const struct sja1105_spi_message *msg)
{
const int size = SJA1105_SIZE_SPI_MSG_HEADER;
memset(buf, 0, size);
sja1105_pack(buf, &msg->access, 31, 31, size);
sja1105_pack(buf, &msg->read_count, 30, 25, size);
sja1105_pack(buf, &msg->address, 24, 4, size);
}
#define sja1105_hdr_xfer(xfers, chunk) \
((xfers) + 2 * (chunk))
#define sja1105_chunk_xfer(xfers, chunk) \
((xfers) + 2 * (chunk) + 1)
#define sja1105_hdr_buf(hdr_bufs, chunk) \
((hdr_bufs) + (chunk) * SJA1105_SIZE_SPI_MSG_HEADER)
/* If @rw is:
* - SPI_WRITE: creates and sends an SPI write message at absolute
* address reg_addr, taking @len bytes from *buf
* - SPI_READ: creates and sends an SPI read message from absolute
* address reg_addr, writing @len bytes into *buf
*/
static int sja1105_xfer(const struct sja1105_private *priv,
sja1105_spi_rw_mode_t rw, u64 reg_addr, u8 *buf,
size_t len, struct ptp_system_timestamp *ptp_sts)
{
struct sja1105_chunk chunk = {
.len = min_t(size_t, len, SJA1105_SIZE_SPI_MSG_MAXLEN),
.reg_addr = reg_addr,
.buf = buf,
};
struct spi_device *spi = priv->spidev;
struct spi_transfer *xfers;
int num_chunks;
int rc, i = 0;
u8 *hdr_bufs;
num_chunks = DIV_ROUND_UP(len, SJA1105_SIZE_SPI_MSG_MAXLEN);
/* One transfer for each message header, one for each message
* payload (chunk).
*/
xfers = kcalloc(2 * num_chunks, sizeof(struct spi_transfer),
GFP_KERNEL);
if (!xfers)
return -ENOMEM;
/* Packed buffers for the num_chunks SPI message headers,
* stored as a contiguous array
*/
hdr_bufs = kcalloc(num_chunks, SJA1105_SIZE_SPI_MSG_HEADER,
GFP_KERNEL);
if (!hdr_bufs) {
kfree(xfers);
return -ENOMEM;
}
for (i = 0; i < num_chunks; i++) {
struct spi_transfer *chunk_xfer = sja1105_chunk_xfer(xfers, i);
struct spi_transfer *hdr_xfer = sja1105_hdr_xfer(xfers, i);
u8 *hdr_buf = sja1105_hdr_buf(hdr_bufs, i);
struct spi_transfer *ptp_sts_xfer;
struct sja1105_spi_message msg;
/* Populate the transfer's header buffer */
msg.address = chunk.reg_addr;
msg.access = rw;
if (rw == SPI_READ)
msg.read_count = chunk.len / 4;
else
/* Ignored */
msg.read_count = 0;
sja1105_spi_message_pack(hdr_buf, &msg);
hdr_xfer->tx_buf = hdr_buf;
hdr_xfer->len = SJA1105_SIZE_SPI_MSG_HEADER;
/* Populate the transfer's data buffer */
if (rw == SPI_READ)
chunk_xfer->rx_buf = chunk.buf;
else
chunk_xfer->tx_buf = chunk.buf;
chunk_xfer->len = chunk.len;
/* Request timestamping for the transfer. Instead of letting
* callers specify which byte they want to timestamp, we can
* make certain assumptions:
* - A read operation will request a software timestamp when
* what's being read is the PTP time. That is snapshotted by
* the switch hardware at the end of the command portion
* (hdr_xfer).
* - A write operation will request a software timestamp on
* actions that modify the PTP time. Taking clock stepping as
* an example, the switch writes the PTP time at the end of
* the data portion (chunk_xfer).
*/
if (rw == SPI_READ)
ptp_sts_xfer = hdr_xfer;
else
ptp_sts_xfer = chunk_xfer;
ptp_sts_xfer->ptp_sts_word_pre = ptp_sts_xfer->len - 1;
ptp_sts_xfer->ptp_sts_word_post = ptp_sts_xfer->len - 1;
ptp_sts_xfer->ptp_sts = ptp_sts;
/* Calculate next chunk */
chunk.buf += chunk.len;
chunk.reg_addr += chunk.len / 4;
chunk.len = min_t(size_t, (ptrdiff_t)(buf + len - chunk.buf),
SJA1105_SIZE_SPI_MSG_MAXLEN);
/* De-assert the chip select after each chunk. */
if (chunk.len)
chunk_xfer->cs_change = 1;
}
rc = spi_sync_transfer(spi, xfers, 2 * num_chunks);
if (rc < 0)
dev_err(&spi->dev, "SPI transfer failed: %d\n", rc);
kfree(hdr_bufs);
kfree(xfers);
return rc;
}
int sja1105_xfer_buf(const struct sja1105_private *priv,
sja1105_spi_rw_mode_t rw, u64 reg_addr,
u8 *buf, size_t len)
{
return sja1105_xfer(priv, rw, reg_addr, buf, len, NULL);
}
/* If @rw is:
* - SPI_WRITE: creates and sends an SPI write message at absolute
* address reg_addr
* - SPI_READ: creates and sends an SPI read message from absolute
* address reg_addr
*
* The u64 *value is unpacked, meaning that it's stored in the native
* CPU endianness and directly usable by software running on the core.
*/
int sja1105_xfer_u64(const struct sja1105_private *priv,
sja1105_spi_rw_mode_t rw, u64 reg_addr, u64 *value,
struct ptp_system_timestamp *ptp_sts)
{
u8 packed_buf[8];
int rc;
if (rw == SPI_WRITE)
sja1105_pack(packed_buf, value, 63, 0, 8);
rc = sja1105_xfer(priv, rw, reg_addr, packed_buf, 8, ptp_sts);
if (rw == SPI_READ)
sja1105_unpack(packed_buf, value, 63, 0, 8);
return rc;
}
/* Same as above, but transfers only a 4 byte word */
int sja1105_xfer_u32(const struct sja1105_private *priv,
sja1105_spi_rw_mode_t rw, u64 reg_addr, u32 *value,
struct ptp_system_timestamp *ptp_sts)
{
u8 packed_buf[4];
u64 tmp;
int rc;
if (rw == SPI_WRITE) {
/* The packing API only supports u64 as CPU word size,
* so we need to convert.
*/
tmp = *value;
sja1105_pack(packed_buf, &tmp, 31, 0, 4);
}
rc = sja1105_xfer(priv, rw, reg_addr, packed_buf, 4, ptp_sts);
if (rw == SPI_READ) {
sja1105_unpack(packed_buf, &tmp, 31, 0, 4);
*value = tmp;
}
return rc;
}
static int sja1105et_reset_cmd(struct dsa_switch *ds)
{
struct sja1105_private *priv = ds->priv;
const struct sja1105_regs *regs = priv->info->regs;
u8 packed_buf[SJA1105_SIZE_RESET_CMD] = {0};
const int size = SJA1105_SIZE_RESET_CMD;
u64 cold_rst = 1;
sja1105_pack(packed_buf, &cold_rst, 3, 3, size);
return sja1105_xfer_buf(priv, SPI_WRITE, regs->rgu, packed_buf,
SJA1105_SIZE_RESET_CMD);
}
static int sja1105pqrs_reset_cmd(struct dsa_switch *ds)
{
struct sja1105_private *priv = ds->priv;
const struct sja1105_regs *regs = priv->info->regs;
u8 packed_buf[SJA1105_SIZE_RESET_CMD] = {0};
const int size = SJA1105_SIZE_RESET_CMD;
u64 cold_rst = 1;
sja1105_pack(packed_buf, &cold_rst, 2, 2, size);
return sja1105_xfer_buf(priv, SPI_WRITE, regs->rgu, packed_buf,
SJA1105_SIZE_RESET_CMD);
}
int sja1105_inhibit_tx(const struct sja1105_private *priv,
unsigned long port_bitmap, bool tx_inhibited)
{
const struct sja1105_regs *regs = priv->info->regs;
u32 inhibit_cmd;
int rc;
rc = sja1105_xfer_u32(priv, SPI_READ, regs->port_control,
&inhibit_cmd, NULL);
if (rc < 0)
return rc;
if (tx_inhibited)
inhibit_cmd |= port_bitmap;
else
inhibit_cmd &= ~port_bitmap;
return sja1105_xfer_u32(priv, SPI_WRITE, regs->port_control,
&inhibit_cmd, NULL);
}
struct sja1105_status {
u64 configs;
u64 crcchkl;
u64 ids;
u64 crcchkg;
};
/* This is not reading the entire General Status area, which is also
* divergent between E/T and P/Q/R/S, but only the relevant bits for
* ensuring that the static config upload procedure was successful.
*/
static void sja1105_status_unpack(void *buf, struct sja1105_status *status)
{
/* So that addition translates to 4 bytes */
u32 *p = buf;
/* device_id is missing from the buffer, but we don't
* want to diverge from the manual definition of the
* register addresses, so we'll back off one step with
* the register pointer, and never access p[0].
*/
p--;
sja1105_unpack(p + 0x1, &status->configs, 31, 31, 4);
sja1105_unpack(p + 0x1, &status->crcchkl, 30, 30, 4);
sja1105_unpack(p + 0x1, &status->ids, 29, 29, 4);
sja1105_unpack(p + 0x1, &status->crcchkg, 28, 28, 4);
}
static int sja1105_status_get(struct sja1105_private *priv,
struct sja1105_status *status)
{
const struct sja1105_regs *regs = priv->info->regs;
u8 packed_buf[4];
int rc;
rc = sja1105_xfer_buf(priv, SPI_READ, regs->status, packed_buf, 4);
if (rc < 0)
return rc;
sja1105_status_unpack(packed_buf, status);
return 0;
}
/* Not const because unpacking priv->static_config into buffers and preparing
* for upload requires the recalculation of table CRCs and updating the
* structures with these.
*/
static int
static_config_buf_prepare_for_upload(struct sja1105_private *priv,
void *config_buf, int buf_len)
{
struct sja1105_static_config *config = &priv->static_config;
struct sja1105_table_header final_header;
sja1105_config_valid_t valid;
char *final_header_ptr;
int crc_len;
valid = sja1105_static_config_check_valid(config);
if (valid != SJA1105_CONFIG_OK) {
dev_err(&priv->spidev->dev,
sja1105_static_config_error_msg[valid]);
return -EINVAL;
}
/* Write Device ID and config tables to config_buf */
sja1105_static_config_pack(config_buf, config);
/* Recalculate CRC of the last header (right now 0xDEADBEEF).
* Don't include the CRC field itself.
*/
crc_len = buf_len - 4;
/* Read the whole table header */
final_header_ptr = config_buf + buf_len - SJA1105_SIZE_TABLE_HEADER;
sja1105_table_header_packing(final_header_ptr, &final_header, UNPACK);
/* Modify */
final_header.crc = sja1105_crc32(config_buf, crc_len);
/* Rewrite */
sja1105_table_header_packing(final_header_ptr, &final_header, PACK);
return 0;
}
#define RETRIES 10
int sja1105_static_config_upload(struct sja1105_private *priv)
{
unsigned long port_bitmap = GENMASK_ULL(SJA1105_NUM_PORTS - 1, 0);
struct sja1105_static_config *config = &priv->static_config;
const struct sja1105_regs *regs = priv->info->regs;
struct device *dev = &priv->spidev->dev;
struct sja1105_status status;
int rc, retries = RETRIES;
u8 *config_buf;
int buf_len;
buf_len = sja1105_static_config_get_length(config);
config_buf = kcalloc(buf_len, sizeof(char), GFP_KERNEL);
if (!config_buf)
return -ENOMEM;
rc = static_config_buf_prepare_for_upload(priv, config_buf, buf_len);
if (rc < 0) {
dev_err(dev, "Invalid config, cannot upload\n");
rc = -EINVAL;
goto out;
}
/* Prevent PHY jabbering during switch reset by inhibiting
* Tx on all ports and waiting for current packet to drain.
* Otherwise, the PHY will see an unterminated Ethernet packet.
*/
rc = sja1105_inhibit_tx(priv, port_bitmap, true);
if (rc < 0) {
dev_err(dev, "Failed to inhibit Tx on ports\n");
rc = -ENXIO;
goto out;
}
/* Wait for an eventual egress packet to finish transmission
* (reach IFG). It is guaranteed that a second one will not
* follow, and that switch cold reset is thus safe
*/
usleep_range(500, 1000);
do {
/* Put the SJA1105 in programming mode */
rc = priv->info->reset_cmd(priv->ds);
if (rc < 0) {
dev_err(dev, "Failed to reset switch, retrying...\n");
continue;
}
/* Wait for the switch to come out of reset */
usleep_range(1000, 5000);
/* Upload the static config to the device */
rc = sja1105_xfer_buf(priv, SPI_WRITE, regs->config,
config_buf, buf_len);
if (rc < 0) {
dev_err(dev, "Failed to upload config, retrying...\n");
continue;
}
/* Check that SJA1105 responded well to the config upload */
rc = sja1105_status_get(priv, &status);
if (rc < 0)
continue;
if (status.ids == 1) {
dev_err(dev, "Mismatch between hardware and static config "
"device id. Wrote 0x%llx, wants 0x%llx\n",
config->device_id, priv->info->device_id);
continue;
}
if (status.crcchkl == 1) {
dev_err(dev, "Switch reported invalid local CRC on "
"the uploaded config, retrying...\n");
continue;
}
if (status.crcchkg == 1) {
dev_err(dev, "Switch reported invalid global CRC on "
"the uploaded config, retrying...\n");
continue;
}
if (status.configs == 0) {
dev_err(dev, "Switch reported that configuration is "
"invalid, retrying...\n");
continue;
}
/* Success! */
break;
} while (--retries);
if (!retries) {
rc = -EIO;
dev_err(dev, "Failed to upload config to device, giving up\n");
goto out;
} else if (retries != RETRIES) {
dev_info(dev, "Succeeded after %d tried\n", RETRIES - retries);
}
out:
kfree(config_buf);
return rc;
}
static struct sja1105_regs sja1105et_regs = {
.device_id = 0x0,
.prod_id = 0x100BC3,
.status = 0x1,
.port_control = 0x11,
.config = 0x020000,
.rgu = 0x100440,
/* UM10944.pdf, Table 86, ACU Register overview */
.pad_mii_tx = {0x100800, 0x100802, 0x100804, 0x100806, 0x100808},
.pad_mii_rx = {0x100801, 0x100803, 0x100805, 0x100807, 0x100809},
.rmii_pll1 = 0x10000A,
.cgu_idiv = {0x10000B, 0x10000C, 0x10000D, 0x10000E, 0x10000F},
.mac = {0x200, 0x202, 0x204, 0x206, 0x208},
.mac_hl1 = {0x400, 0x410, 0x420, 0x430, 0x440},
.mac_hl2 = {0x600, 0x610, 0x620, 0x630, 0x640},
/* UM10944.pdf, Table 78, CGU Register overview */
.mii_tx_clk = {0x100013, 0x10001A, 0x100021, 0x100028, 0x10002F},
.mii_rx_clk = {0x100014, 0x10001B, 0x100022, 0x100029, 0x100030},
.mii_ext_tx_clk = {0x100018, 0x10001F, 0x100026, 0x10002D, 0x100034},
.mii_ext_rx_clk = {0x100019, 0x100020, 0x100027, 0x10002E, 0x100035},
.rgmii_tx_clk = {0x100016, 0x10001D, 0x100024, 0x10002B, 0x100032},
.rmii_ref_clk = {0x100015, 0x10001C, 0x100023, 0x10002A, 0x100031},
.rmii_ext_tx_clk = {0x100018, 0x10001F, 0x100026, 0x10002D, 0x100034},
.ptpegr_ts = {0xC0, 0xC2, 0xC4, 0xC6, 0xC8},
.ptpschtm = 0x12, /* Spans 0x12 to 0x13 */
.ptppinst = 0x14,
.ptppindur = 0x16,
.ptp_control = 0x17,
.ptpclkval = 0x18, /* Spans 0x18 to 0x19 */
.ptpclkrate = 0x1A,
.ptpclkcorp = 0x1D,
};
static struct sja1105_regs sja1105pqrs_regs = {
.device_id = 0x0,
.prod_id = 0x100BC3,
.status = 0x1,
.port_control = 0x12,
.config = 0x020000,
.rgu = 0x100440,
/* UM10944.pdf, Table 86, ACU Register overview */
.pad_mii_tx = {0x100800, 0x100802, 0x100804, 0x100806, 0x100808},
.pad_mii_rx = {0x100801, 0x100803, 0x100805, 0x100807, 0x100809},
.pad_mii_id = {0x100810, 0x100811, 0x100812, 0x100813, 0x100814},
.sgmii = 0x1F0000,
.rmii_pll1 = 0x10000A,
.cgu_idiv = {0x10000B, 0x10000C, 0x10000D, 0x10000E, 0x10000F},
.mac = {0x200, 0x202, 0x204, 0x206, 0x208},
.mac_hl1 = {0x400, 0x410, 0x420, 0x430, 0x440},
.mac_hl2 = {0x600, 0x610, 0x620, 0x630, 0x640},
.ether_stats = {0x1400, 0x1418, 0x1430, 0x1448, 0x1460},
/* UM11040.pdf, Table 114 */
.mii_tx_clk = {0x100013, 0x100019, 0x10001F, 0x100025, 0x10002B},
.mii_rx_clk = {0x100014, 0x10001A, 0x100020, 0x100026, 0x10002C},
.mii_ext_tx_clk = {0x100017, 0x10001D, 0x100023, 0x100029, 0x10002F},
.mii_ext_rx_clk = {0x100018, 0x10001E, 0x100024, 0x10002A, 0x100030},
.rgmii_tx_clk = {0x100016, 0x10001C, 0x100022, 0x100028, 0x10002E},
.rmii_ref_clk = {0x100015, 0x10001B, 0x100021, 0x100027, 0x10002D},
.rmii_ext_tx_clk = {0x100017, 0x10001D, 0x100023, 0x100029, 0x10002F},
.qlevel = {0x604, 0x614, 0x624, 0x634, 0x644},
.ptpegr_ts = {0xC0, 0xC4, 0xC8, 0xCC, 0xD0},
.ptpschtm = 0x13, /* Spans 0x13 to 0x14 */
.ptppinst = 0x15,
.ptppindur = 0x17,
.ptp_control = 0x18,
.ptpclkval = 0x19,
.ptpclkrate = 0x1B,
.ptpclkcorp = 0x1E,
.ptpsyncts = 0x1F,
};
struct sja1105_info sja1105e_info = {
.device_id = SJA1105E_DEVICE_ID,
.part_no = SJA1105ET_PART_NO,
.static_ops = sja1105e_table_ops,
.dyn_ops = sja1105et_dyn_ops,
.ptp_ts_bits = 24,
.ptpegr_ts_bytes = 4,
.reset_cmd = sja1105et_reset_cmd,
.fdb_add_cmd = sja1105et_fdb_add,
.fdb_del_cmd = sja1105et_fdb_del,
.ptp_cmd_packing = sja1105et_ptp_cmd_packing,
.regs = &sja1105et_regs,
.name = "SJA1105E",
};
struct sja1105_info sja1105t_info = {
.device_id = SJA1105T_DEVICE_ID,
.part_no = SJA1105ET_PART_NO,
.static_ops = sja1105t_table_ops,
.dyn_ops = sja1105et_dyn_ops,
.ptp_ts_bits = 24,
.ptpegr_ts_bytes = 4,
.reset_cmd = sja1105et_reset_cmd,
.fdb_add_cmd = sja1105et_fdb_add,
.fdb_del_cmd = sja1105et_fdb_del,
.ptp_cmd_packing = sja1105et_ptp_cmd_packing,
.regs = &sja1105et_regs,
.name = "SJA1105T",
};
struct sja1105_info sja1105p_info = {
.device_id = SJA1105PR_DEVICE_ID,
.part_no = SJA1105P_PART_NO,
.static_ops = sja1105p_table_ops,
.dyn_ops = sja1105pqrs_dyn_ops,
.ptp_ts_bits = 32,
.ptpegr_ts_bytes = 8,
.setup_rgmii_delay = sja1105pqrs_setup_rgmii_delay,
.reset_cmd = sja1105pqrs_reset_cmd,
.fdb_add_cmd = sja1105pqrs_fdb_add,
.fdb_del_cmd = sja1105pqrs_fdb_del,
.ptp_cmd_packing = sja1105pqrs_ptp_cmd_packing,
.regs = &sja1105pqrs_regs,
.name = "SJA1105P",
};
struct sja1105_info sja1105q_info = {
.device_id = SJA1105QS_DEVICE_ID,
.part_no = SJA1105Q_PART_NO,
.static_ops = sja1105q_table_ops,
.dyn_ops = sja1105pqrs_dyn_ops,
.ptp_ts_bits = 32,
.ptpegr_ts_bytes = 8,
.setup_rgmii_delay = sja1105pqrs_setup_rgmii_delay,
.reset_cmd = sja1105pqrs_reset_cmd,
.fdb_add_cmd = sja1105pqrs_fdb_add,
.fdb_del_cmd = sja1105pqrs_fdb_del,
.ptp_cmd_packing = sja1105pqrs_ptp_cmd_packing,
.regs = &sja1105pqrs_regs,
.name = "SJA1105Q",
};
struct sja1105_info sja1105r_info = {
.device_id = SJA1105PR_DEVICE_ID,
.part_no = SJA1105R_PART_NO,
.static_ops = sja1105r_table_ops,
.dyn_ops = sja1105pqrs_dyn_ops,
.ptp_ts_bits = 32,
.ptpegr_ts_bytes = 8,
.setup_rgmii_delay = sja1105pqrs_setup_rgmii_delay,
.reset_cmd = sja1105pqrs_reset_cmd,
.fdb_add_cmd = sja1105pqrs_fdb_add,
.fdb_del_cmd = sja1105pqrs_fdb_del,
.ptp_cmd_packing = sja1105pqrs_ptp_cmd_packing,
.regs = &sja1105pqrs_regs,
.name = "SJA1105R",
};
struct sja1105_info sja1105s_info = {
.device_id = SJA1105QS_DEVICE_ID,
.part_no = SJA1105S_PART_NO,
.static_ops = sja1105s_table_ops,
.dyn_ops = sja1105pqrs_dyn_ops,
.regs = &sja1105pqrs_regs,
.ptp_ts_bits = 32,
.ptpegr_ts_bytes = 8,
.setup_rgmii_delay = sja1105pqrs_setup_rgmii_delay,
.reset_cmd = sja1105pqrs_reset_cmd,
.fdb_add_cmd = sja1105pqrs_fdb_add,
.fdb_del_cmd = sja1105pqrs_fdb_del,
.ptp_cmd_packing = sja1105pqrs_ptp_cmd_packing,
.name = "SJA1105S",
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.