text
stringlengths 1
1.05M
|
|---|
<gh_stars>0
package itunes
type Picture struct {
RawData
}
|
<filename>easy_logger/config.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
DEFAULT_LEVEL = logging.INFO
DEFAULT_FORMAT = '%(asctime)s - %(levelname)s - %(name)s - %(message)s'
|
<filename>back/src/main/java/com/java110/things/kafka/IotServiceKafka.java<gh_stars>0
package com.java110.things.kafka;
import com.alibaba.fastjson.JSONObject;
import com.java110.things.entity.app.AppDto;
import com.java110.things.factory.ApplicationContextFactory;
import com.java110.things.factory.AuthenticationFactory;
import com.java110.things.factory.KafkaFactory;
import com.java110.things.factory.MappingCacheFactory;
import com.java110.things.service.app.IAppService;
import com.java110.things.util.Assert;
import com.java110.things.util.DateUtil;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.web.client.RestTemplate;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* kafka侦听
* Created by wuxw on 2018/4/15.
*/
public class IotServiceKafka {
private final static Logger logger = LoggerFactory.getLogger(IotServiceKafka.class);
public static final String HC_TOPIC = "iot-topic";
private IAppService appServiceImpl;
/**
* 协议
* {
* appId:"",
* action:“”,
* sign:"",
* data:{
* <p>
* },
* <p>
* }
*
* @param record
*/
@KafkaListener(topics = {"${kafka.topic}"})
public void listen(ConsumerRecord<?, ?> record) throws Exception {
logger.info("kafka的key: " + record.key());
logger.info("kafka的value: " + record.value().toString());
String data = record.value().toString();
JSONObject param = JSONObject.parseObject(data);
String appId = param.getString("appId");
AppDto appDto = new AppDto();
appDto.setAppId(appId);
appServiceImpl = ApplicationContextFactory.getBean("appServiceImpl", IAppService.class);
List<AppDto> appDtos = appServiceImpl.getApp(appDto);
Assert.listOnlyOne(appDtos, "应用不存在");
String accessToken = appDtos.get(0).getAccessToken();
try {
AuthenticationFactory.verifyToken(accessToken);
} catch (Exception e) {
Map<String, String> info = new HashMap();
info.put("appId", appId);
info.put(AuthenticationFactory.LOGIN_USER_ID, appId);
accessToken = AuthenticationFactory.createAndSaveToken(info);
appDto.setAccessToken(accessToken);
appDto.setUpdateTime(DateUtil.getNow(DateUtil.DATE_FORMATE_STRING_A));
appServiceImpl.updateApp(appDto);
}
//
RestTemplate restTemplate = ApplicationContextFactory.getBean("restTemplate", RestTemplate.class);
String url = MappingCacheFactory.getValue("IOT_URL") + "/" + param.getString("action");
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.add("access_token", accessToken);
HttpEntity httpEntity = new HttpEntity(JSONObject.toJSONString(param.get("data")), httpHeaders);
ResponseEntity<String> paramOut = restTemplate.exchange(url, HttpMethod.POST, httpEntity, String.class);
JSONObject paramObj = JSONObject.parseObject(paramOut.getBody());
param.put("data", paramObj);
KafkaFactory.sendKafkaMessage(HC_TOPIC, "", param.toJSONString());
}
}
|
import { RowNode } from "@ag-grid-community/core";
// doing _.removeFromArray() multiple times on a large list can be a bottleneck.
// when doing large deletes (eg removing 1,000 rows) then we would be calling _.removeFromArray()
// a thousands of times, in particular RootNode.allGroupChildren could be a large list, and
// 1,000 removes is time consuming as each one requires traversing the full list.
// to get around this, we do all the removes in a batch. this class manages the batch.
//
// This problem was brought to light by a client (AG-2879), with dataset of 20,000
// in 10,000 groups (2 items per group), then deleting all rows with transaction,
// it took about 20 seconds to delete. with the BathRemoved, the reduced to less than 1 second.
interface RemoveDetails {
removeFromChildrenAfterGroup: { [id: string]: boolean; };
removeFromAllLeafChildren: { [id: string]: boolean; };
}
export class BatchRemover {
private allSets: { [parentId: string]: RemoveDetails; } = {};
private allParents: RowNode[] = [];
public removeFromChildrenAfterGroup(parent: RowNode, child: RowNode): void {
const set = this.getSet(parent);
set.removeFromChildrenAfterGroup[child.id!] = true;
}
public removeFromAllLeafChildren(parent: RowNode, child: RowNode): void {
const set = this.getSet(parent);
set.removeFromAllLeafChildren[child.id!] = true;
}
private getSet(parent: RowNode): RemoveDetails {
if (!this.allSets[parent.id!]) {
this.allSets[parent.id!] = {
removeFromAllLeafChildren: {},
removeFromChildrenAfterGroup: {}
};
this.allParents.push(parent);
}
return this.allSets[parent.id!];
}
public flush(): void {
this.allParents.forEach(parent => {
const nodeDetails = this.allSets[parent.id!];
parent.childrenAfterGroup = parent.childrenAfterGroup!.filter(
child => !nodeDetails.removeFromChildrenAfterGroup[child.id!]
);
parent.allLeafChildren = parent.allLeafChildren.filter(
child => !nodeDetails.removeFromAllLeafChildren[child.id!]
);
parent.updateHasChildren();
});
this.allSets = {};
this.allParents.length = 0;
}
}
|
import { mapGetters, mapActions } from 'vuex'
import PerfectScrollbar from 'perfect-scrollbar'
import QuestionMenu from 'components/atoms/Application/QuestionMenu/QuestionMenu'
import QuestionView from 'components/atoms/Application/QuestionView/QuestionView'
import TabView from 'components/atoms/Application/TabView/TabView'
import AttachmentsView from 'components/atoms/Application/Attachments/Attachments'
import _ from 'lodash'
export default {
name: 'ApplicationForm',
components: {
QuestionMenu,
QuestionView,
TabView,
AttachmentsView
},
data () {
return {
isAttachment: false,
questionsData: [],
data: {},
ps: null,
container: null,
cardIndex: 0,
currentIndex: 0,
options: {
acceptedFileTypes: [''],
clickable: false,
adapterOptions: {
url: './upload.php'
}
},
enableSubmit: false
}
},
filters: {
json(value) {
return JSON.stringify(value, null, 2)
}
},
computed: {
...mapGetters([
'isCompleted',
'applicationId',
'questions',
'tabs',
'activeRoute'
])
},
watch: {
'$route' () {
if (this.$route.path === '/home/form') {
this.loadData()
}
}
},
mounted () {
this.loadData()
setTimeout(() => {
this.checkFiles()
this.$refs.attachmentUploader.$on('added-file', () => {
this.isAttachment = true
this.checkFiles()
})
this.$refs.attachmentUploader.$on('removed-file', () => {
this.checkFiles()
})
})
},
destroyed () {
this.container = null
this.ps.destroy()
this.ps = null
},
methods: {
...mapActions([
'activeCard',
'updateQuestions',
'addApplicationForm',
'updateApplicationForm',
'deleteApplicationForm'
]),
triggerBrowse() {
this.$refs.attachmentUploader.triggerBrowseFiles()
},
removeFile(file) {
this.$refs.attachmentUploader.removeFile(file)
},
checkFiles () {
const len = this.$refs.attachmentUploader.files.length
if (this.questionsData.length > 5) {
if (len > 0) {
this.questionsData[5].content[0].valid = true
} else {
this.questionsData[5].content[0].valid = false
}
this.updateQuestions(this.questionsData)
}
},
loadData () {
this.$refs.attachmentUploader.removeAllFiles(true)
this.questionsData = _.cloneDeep(this.questions)
if (this.questionsData.length > 0) {
this.data = this.questionsData[this.currentIndex]
} else {
this.$router.replace('/home')
}
this.setData()
},
setData () {
this.container = document.querySelector('.question-content')
this.ps = new PerfectScrollbar(this.container)
this.container.scrollTop = 0
this.container.scrollYMarginOffset = 100
this.activeCard([0, 0])
this.container.addEventListener('ps-scroll-up', () => {
let offset = 0
for (let i = 0; i < this.questionsData.length; i++) {
let flag = 0
let limit = this.questionsData[i].content.length
for (let j = 0; j < limit; j++) {
if (i == 5) {
offset += this.$refs.attachmentUploader.$children[0].$el.clientHeight
} else {
offset += this.$refs[`question_${j}`][i].$el.clientHeight
}
if (this.container.scrollTop < offset) {
this.cardIndex = j
if (i !== this.currentIndex) {
this.currentIndex = i
this.data = this.questionsData[this.currentIndex]
}
flag = 1
break
}
if (j === limit - 1) {
offset += 100
}
}
if (flag === 1) {
this.activeCard([this.currentIndex, this.cardIndex])
break
}
}
})
this.container.addEventListener('ps-scroll-down', () => {
var offset = 0
for (var i = 0; i < this.questionsData.length; i++) {
var flag = 0
var limit = this.questionsData[i].content.length
for (var j = 0; j < limit; j++) {
if (i == 5) {
offset += this.$refs.attachmentUploader.$children[0].$el.clientHeight
} else {
offset += this.$refs[`question_${j}`][i].$el.clientHeight
}
if (this.container.scrollTop < offset) {
this.cardIndex = j
if (i !== this.currentIndex) {
this.currentIndex = i
this.data = this.questionsData[this.currentIndex]
}
flag = 1
break
}
if (j === limit - 1) {
offset += 100
}
}
if (flag === 1) {
this.activeCard([this.currentIndex, this.cardIndex])
break
}
}
})
},
isActive (parentIndex, index) {
return this.activeRoute[0] === parentIndex && this.activeRoute[1] === index
},
cardUp () {
if (this.cardIndex === 0) {
if (this.currentIndex === 0) {
return
}
if (this.currentIndex > 0) {
this.currentIndex -= 1
this.cardIndex = this.questionsData[this.currentIndex].content.length
this.data = this.questionsData[this.currentIndex]
}
}
var offset = 0
for (var i = 0; i <= this.currentIndex; i++) {
var j = 0
var limit = this.questionsData[i].content.length
for (j = 0; j < limit; j++) {
if (i === this.currentIndex && j >= this.cardIndex - 1) {
break
}
if (i == 5) {
offset += this.$refs.attachmentUploader.$children[0].$el.clientHeight + 24
} else {
offset += this.$refs[`question_${j}`][i].$el.clientHeight + 24
}
if (j === limit - 1) {
offset += 220
}
}
if (i === this.currentIndex && j === this.cardIndex) {
break
}
}
this.container.scrollTop = offset
this.activeCard([this.currentIndex, this.cardIndex - 1])
},
cardDown () {
if (this.cardIndex === this.questionsData[this.currentIndex].content.length - 1) {
if (this.currentIndex === this.questionsData.length - 1) {
return
}
this.currentIndex += 1
this.cardIndex = -1
this.data = this.questionsData[this.currentIndex]
}
var offset = 0
for (var i = 0; i <= this.currentIndex; i++) {
var j = 0
var limit = this.questionsData[i].content.length
for (j = 0; j < limit; j++) {
if (i === this.currentIndex && j > this.cardIndex) {
break
}
if (i == 5) {
offset += this.$refs.attachmentUploader.$children[0].$el.clientHeight + 24
} else {
offset += this.$refs[`question_${j}`][i].$el.clientHeight + 24
}
if (j === limit - 1) {
offset += 100
}
}
if (i === this.currentIndex && j === this.cardIndex) {
break
}
}
this.container.scrollTop = offset
this.activeCard([this.currentIndex, this.cardIndex + 1])
},
cardActivate () {
this.onActiveCard([this.parentOrder, this.order])
},
selectTab (index) {
this.currentIndex = index
this.data = this.questionsData[this.currentIndex]
let offset = 0
for (let i = 0; i < index; i++) {
const element = document.getElementById(`ques_${i}`)
offset += element.clientHeight + 24
}
this.container.scrollTop = offset
},
onDelete () {
if(window.confirm("Are you sure you want to delete this application?")) {
if (this.applicationId != null) {
this.deleteApplicationForm(this.applicationId)
}
this.$router.replace('/home')
}
},
saveForLater () {
if (this.applicationId == null) {
this.addApplicationForm(this.questionsData)
} else {
this.updateApplicationForm({ 'applications': this.questionsData, 'applicationId': _.cloneDeep(this.applicationId) })
}
this.$router.replace('/home')
},
onSubmit () {
this.$router.replace('/home/submitted')
}
}
}
|
<form>
<label>Name</label><input type="text" name="name"><br>
<label>Email</label><input type="email" name="email"><br>
<input type="submit" value="Submit">
</form>
|
import * as activity from '@temporalio/activity';
import { Connection } from '@temporalio/client';
import { ActivityExecuteInput, ActivityInboundCallsInterceptor, Next } from '@temporalio/worker';
export class ConnectionInjectorInterceptor implements ActivityInboundCallsInterceptor {
constructor(public readonly connection: Connection) {}
async execute(input: ActivityExecuteInput, next: Next<ActivityInboundCallsInterceptor, 'execute'>): Promise<unknown> {
(activity.Context.current() as unknown as ContextWithConnection).connection = this.connection;
return next(input);
}
}
/**
* Extend the basic Context with injected client connection
*/
export interface ContextWithConnection extends activity.Context {
connection: Connection;
}
/**
* Type "safe" helper to get a context with connection
*/
export function getContext(): ContextWithConnection {
return activity.Context.current() as unknown as ContextWithConnection;
}
|
import * as stringModule from '../src/string'
import * as orThrowModule from '../src/orThrow'
import stringOrThrow from '../src/stringOrThrow'
jest.spyOn(stringModule, 'default')
jest.spyOn(orThrowModule, 'default')
describe('.stringOrThrow()', () => {
it('calls the appropriate functions', () => {
const key = 'foo'
process.env[key] = 'bar'
stringOrThrow(key)
expect(orThrowModule.default).toHaveBeenCalled()
expect(stringModule.default).toHaveBeenCalledWith(key)
})
})
|
<reponame>HamidMohammadi/iso_8583
const formats = {
'4*4': {
input: '4756065863847844',
example: '4756********7844'
},
'**4': {
input: '4756065863847844',
example: '************7844'
},
'4**': {
input: '4756065863847844',
example: '4756************'
},
'*4*': {
input: '4756065863847844',
example: '****06586384****'
},
};
module.exports = function (pan, format, masker) {
let p = pan;
const m = masker || '*';
if (!formats[format]) return { error: 'unknown pan masking format'};
const pre = parseInt(format[0], 10);
const mid = parseInt(format[1], 10);
const post = parseInt(format[2], 10);
let fill;
if (pre && !mid && post) {
fill = m.repeat(p.length - (pre + post));
p = p.slice(0, pre) + fill + p.slice(p.length - post, p.length);
} else if (!pre && !mid && post) {
fill = m.repeat(p.length - post);
p = fill + p.slice(p.length - post , p.length);
} else if (pre && !mid && !post) {
fill = m.repeat(p.length - pre);
p = p.slice(0, pre) + fill;
} else if (!pre && mid && !post ) {
const lu = parseInt((p.length - mid)/2, 10);
fill = m.repeat(lu);
p = fill + p.slice(lu , p.length - lu) + fill;
} else return { error: 'wrong pan configurations passed'};
return p;
};
|
#!/bin/bash
fs="reiserfs"
source ${src_root}/tests/test_common.sh
source ${src_root}/tests/fs_common.sh
source ${src_root}/tests/fsck.sh
imgname=$fs-depth.img
fs_scan_startup_img $fs $imgname
statcount=`grep "stat_" ${src_root}/tests/scantool-$fs/$imgname.out | sort | uniq | wc -l`
if [ $statcount -ne 302 ]; then
echo "Expected stat items = 302. Got stat items = $statcount"
exit -1
fi
imgname=$fs-many.img
fs_scan_startup_img $fs $imgname
statcount=`grep "stat_" ${src_root}/tests/scantool-$fs/$imgname.out | sort | uniq | wc -l`
# NOTE HERE: WE ONLY COUNT NON-DELETED NODES
if [ $statcount -ne 10202 ]; then
echo "Expected stat items = 10202. Got stat items = $statcount"
exit -1
fi
imgname=$fs-postmark.img
fs_scan_startup_img $fs $imgname
statcount=`grep "stat_" ${src_root}/tests/scantool-$fs/$imgname.out | sort | uniq | wc -l`
# NOTE HERE: WE ONLY COUNT NON-DELETED NODES
if [ $statcount -ne 5007 ]; then
echo "Expected stat items = 5007. Got stat items = $statcount"
exit -1
fi
imgname=$fs-many-blockfile.img
fs_scan_startup_img $fs $imgname
imgname=`fs_reloc_img_name $fs $fs-postmark.img ""`
fs_reloc_startup_img $fs "$fs"-postmark.img
fsck_img $imgname
fs_scan_startup_img $fs $imgname
#imgname=$fs-relocate-problem.img
#cp ${src_root}/img/$fs-postmark.img ${src_root}/img/$imgname
#fs_reloc_img $fs $imgname "${src_root}/tests/reloc.problem.pic"
#fs_scan_startup_img $fs $imgname
imgname=$fs-scatter.img
cp ${src_root}/img/$fs-postmark.img ${src_root}/img/$imgname
fs_scatter_startup_img $fs $imgname
fsck_img $imgname
fs_scan_startup_img $fs $imgname
statcount=`grep "stat_" ${src_root}/tests/scantool-$fs/$imgname.out | sort | uniq | wc -l`
# NOTE HERE: WE ONLY COUNT NON-DELETED NODES
if [ $statcount -ne 5007 ]; then
echo "Expected stat items = 5007. Got stat items = $statcount"
exit -1
fi
imgname=$fs-thrashcopy.img
cp ${src_root}/img/$fs-scatter.img ${src_root}/img/$imgname
fs_thrashcopy_startup_img $fs $imgname
fsck_img $imgname
fs_scan_startup_img $fs $imgname
statcount=`grep "stat_" ${src_root}/tests/scantool-$fs/$imgname.out | sort | uniq | wc -l`
# NOTE HERE: WE ONLY COUNT NON-DELETED NODES
if [ $statcount -ne 5007 ]; then
echo "Expected stat items = 5007. Got stat items = $statcount"
exit -1
fi
imgname=$fs-defrag.img
cp ${src_root}/img/$fs-scatter.img ${src_root}/img/$imgname
fs_defrag_startup_img $fs $imgname
fsck_img $imgname
fs_scan_startup_img $fs $imgname
imgname=$fs-smush.img
cp ${src_root}/img/$fs-defrag.img ${src_root}/img/$imgname
fs_smush_startup_img $fs $imgname
fsck_img $imgname
fs_scan_startup_img $fs $imgname
|
<filename>main.go
package main
import (
"context"
"flag"
"net/url"
"os"
"os/signal"
"strings"
"syscall"
"github.com/hekmon/vigixporter/hubeau"
"github.com/hekmon/vigixporter/watcher"
"github.com/hekmon/hllogger"
systemd "github.com/iguanesolutions/go-systemd/v5"
sysdnotify "github.com/iguanesolutions/go-systemd/v5/notify"
)
const (
confEnvarStations = "VIGIXPORTER_STATIONS"
confEnvarVMURL = "VIGIXPORTER_VMURL"
confEnvarVMUser = "VIGIXPORTER_VMUSER"
confEnvarVMPass = "VIGIXPORTER_VMPASS"
)
var (
logger *hllogger.HlLogger
core *watcher.Controller
mainLock chan struct{}
mainCtx context.Context
mainCtxCancel func()
)
func main() {
// Parse flags
logLevelFlag := flag.String("loglevel", "info", "Set loglevel: debug, info, warning, error, fatal. Default info.")
flag.Parse()
// Init logger
var logLevel hllogger.LogLevel
switch strings.ToLower(*logLevelFlag) {
case "debug":
logLevel = hllogger.Debug
case "info":
logLevel = hllogger.Info
case "warning":
logLevel = hllogger.Warning
case "error":
logLevel = hllogger.Error
case "fatal":
logLevel = hllogger.Fatal
default:
logLevel = hllogger.Info
}
_, systemdStarted := systemd.GetInvocationID()
var logFlags int
if !systemdStarted {
logFlags = hllogger.LstdFlags
}
logger = hllogger.New(os.Stdout, &hllogger.Config{
LogLevel: logLevel,
LoggerFlags: logFlags,
SystemdJournaldCompat: systemdStarted,
})
// Get stations to follow from env
var (
stationsraw string
stations []string
)
if stationsraw = os.Getenv(confEnvarStations); stationsraw == "" {
logger.Fatalf(1, "[Main] conf: no stations set: use '%s' env var to set the stations to track. For example to follow Paris, Alfortville and Créteil: %s='%s,%s,%s'",
confEnvarStations, confEnvarStations, hubeau.StationParis, hubeau.StationAlfortville, hubeau.StationCreteil)
}
stations = strings.Split(stationsraw, ",")
logger.Infof("[Main] conf: %d station(s) declared: %s", len(stations), strings.Join(stations, ", "))
// Get victoria metrics infos from env
var (
vmURL string
vmUser string
vmPass string
)
if vmURL = os.Getenv(confEnvarVMURL); vmURL == "" {
logger.Fatalf(2, "[Main] conf: no victoria metrics JSON line import URL set: use '%s' env var to set the stations to track. For example: %s='http://destination-victoriametrics:8428/api/v1/import'",
confEnvarVMURL, confEnvarVMURL)
}
parsedURL, err := url.Parse(vmURL)
if err != nil {
logger.Fatalf(2, "[Main] conf: victoria metrics JSON line import URL is invalid: %s", err)
}
logger.Infof("[Main] conf: victoria metrics target url set to: %s", parsedURL)
vmUser = os.Getenv(confEnvarVMUser)
vmPass = os.Getenv(confEnvarVMPass)
if vmUser != "" && vmPass != "" {
logger.Info("[Main] conf: basic auth set")
} else {
logger.Debug("[Main] conf: basic auth NOT set")
}
// Prepare main context for broadcasting the stop signal
mainCtx, mainCtxCancel = context.WithCancel(context.Background())
// Launch the watcher
if core, err = watcher.New(mainCtx, watcher.Config{
Stations: stations,
VMURL: vmURL,
VMUser: vmUser,
VMPass: vmPass,
Logger: logger,
}); err != nil {
logger.Fatalf(3, "[Main] failed to instanciate the watcher: %s", err)
}
logger.Info("[Main] watcher started")
// Everything is ready, listen to signals to know when to stop
mainLock = make(chan struct{})
go handleSignals()
// Signal systemd we are ready if needed
if err = sysdnotify.Ready(); err != nil {
logger.Errorf("[Main] failed to notify systemd with ready signal: %s", err)
}
// Let's go to sleep while others do their work
<-mainLock
}
func handleSignals() {
var (
sig os.Signal
err error
)
// If we exit, allow main goroutine to do so
defer close(mainLock)
// Register signals
signalChannel := make(chan os.Signal, 1)
signal.Notify(signalChannel, syscall.SIGTERM, syscall.SIGINT)
// Waiting for signals to catch
for {
sig = <-signalChannel
switch sig {
case syscall.SIGTERM:
fallthrough
case syscall.SIGINT:
logger.Infof("[Main] signal '%v' caught: cleaning up before exiting", sig)
if err = sysdnotify.Stopping(); err != nil {
logger.Errorf("[Main] can't send systemd stopping notification: %v", err)
}
// Cancel main ctx & wait for core to end
mainCtxCancel()
core.WaitStopped()
logger.Debugf("[Main] signal '%v' caught: watcher stopped: unlocking main goroutine to exit", sig)
return
default:
logger.Warningf("[Main] signal '%v' caught but no process set to handle it: skipping", sig)
}
}
}
|
/**
klassi-js
Copyright © 2016 - <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
const gotApi = require('got');
const { dataconfig } = global;
function getCredentials() {
/**
* adding the user and key for browserstack
*/
const user = process.env.BROWSERSTACK_USERNAME || dataconfig.bslocal.userName || bssecrets.userName;
const key = process.env.BROWSERSTACK_ACCESS_KEY || dataconfig.bslocal.accessKey || bssecrets.accessKey;
assert.isNotEmpty(user, 'BrowserStack requires a username');
assert.isNotEmpty(key, 'BrowserStack requires an access key');
return { user, key };
}
async function submitResults(scenario) {
const configBuildName = global.settings.remoteConfig.replace(/-/g, ' ');
const credentials = getCredentials();
const browserstackUsername = credentials.user;
const browserstackApiKey = credentials.key;
const apiCredentials = `${browserstackUsername}:${browserstackApiKey}`;
const scenarioName = scenario.getName();
const buildsBody = await gotApi({
uri: `https://${apiCredentials}@api.browserstack.com/automate/builds.json`,
});
const matchingBuilds = JSON.parse(buildsBody).filter((build) => build.automation_build.name === configBuildName);
const build = matchingBuilds[0].automation_build;
const buildId = build.hashed_id;
const sessionsBody = await gotApi({
uri: `https://${apiCredentials}@api.browserstack.com/automate/builds/${buildId}/sessions.json`,
});
const latestSession = JSON.parse(sessionsBody)[0];
const sessionId = latestSession.automation_session.hashed_id;
const explanations = [];
const statusString = scenario.isSuccessful() ? 'passed' : 'failed';
if (scenario.isSuccessful()) {
explanations.push(`${scenarioName} succeeded`);
}
if (scenario.isPending()) {
explanations.push(`${scenarioName} is pending`);
}
if (scenario.isUndefined()) {
explanations.push(`${scenarioName} is undefined`);
}
if (scenario.isSkipped()) {
explanations.push(`${scenarioName} was skipped`);
}
if (scenario.isFailed()) {
explanations.push(`${scenarioName} failed:${scenario.getException()}`);
explanations.push(`${scenario.getUri()} (${scenario.getLine()})`);
}
await gotApi({
uri: `https://${apiCredentials}@api.browserstack.com/automate/sessions/${sessionId}.json`,
method: 'PUT',
form: {
status: statusString,
reason: explanations.join('; '),
},
});
const buildDetails = await gotApi({
uri: `https://${apiCredentials}@api.browserstack.com/automate/sessions/${sessionId}.json`,
method: 'GET',
});
const detailsToArray = buildDetails.split('"');
const publicUrlPosition = detailsToArray.indexOf('public_url');
console.log('build details ', buildDetails);
console.log(`public_url: ${detailsToArray[publicUrlPosition + 2]}`);
}
module.exports = {
submitResults,
getCredentials,
};
|
package net.avcompris.tools.diagrammer.sample;
public class MigrationScenario6_1 extends MigrationScenario6_0 {
public static void main(final String... args) throws Exception {
new MigrationScenario6_1();
}
@Override
protected void body() {
v0("state=9315001");
arrow(workers0, b_data0);
arrow(workers0, b_converter);
}
}
|
#set -x
MKLDNNROOT=/usr/local/
g++ -std=c++11 -I${MKLDNNROOT}/include -L${MKLDNNROOT}/lib mkldnn_dense_convolution.cpp -lmkldnn -o spconv_mkldnn_result
./spconv_mkldnn_result
|
<reponame>mosaic-cloud/mosaic-distribution-dependencies
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 2004-2010. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
*
* %CopyrightEnd%
*/
import com.ericsson.otp.erlang.*;
class NodePing {
/*
Implements test case jinterface_SUITE:node_ping/1
Creates three OtpNode objects. One with default cookie, one with
specified same cookie as the node running the test case and one
with a faulty cookie. From each OtpNode object the test_server
node is pinged.
Also the default cookie node pings itself, and the node with the
specified cookie pings the node with default cookie.
*/
public static void main(String argv[]) {
String cookie = argv[0];
String erlNode = argv[1];
try {
OtpNode node1 = new OtpNode("javanode1");
ping(node1,erlNode,"Default cookie:",true,1);
ping(node1,node1.node(),"Self:",true,2);
ping(node1,"javanode1","Self:",true,3);
OtpNode node2 = new OtpNode("javanode2",cookie);
ping(node2,erlNode,"Specified cookie:",true,4);
ping(node2,"javanode1","Javanode (no host):",true,5);
ping(node2,node1.node(),"Javanode:",true,6);
OtpNode node3 = new OtpNode("javanode3","faultycookie");
ping(node3,erlNode,"Faulty cookie:",false,7);
// Test OtpNode.cookie() and OtpNode.setCookie(cookie) as well
if (!node3.cookie().equals("faultycookie"))
fail("Testing OtpNode.cookie()",8);
String old = node3.setCookie(cookie);
if (!old.equals("faultycookie"))
fail("Checking return of OtpNode.setCookie(cookie)",9);
ping(node3,erlNode,"setCookie:",true,10);
}
catch (Exception e) {
fail("" + e, 11);
}
}
private static void ping(OtpNode node, String remote, String descr,
boolean expected, int reason) {
if ( node.ping(remote,2000) == expected ) {
System.out.println(descr + " ping(" + remote + ") -> " + expected);
}
else {
fail("ERROR: " + descr + " ping(" + remote +") -> " + !expected,
reason);
}
}
private static void fail(String str, int reason) {
System.out.println(str);
System.exit(reason);
}
}
|
#!/bin/sh
sudo apt-get update
sudo apt-get install python-pygame python-pil python-vlc
# on dietpi/raspbian lite install xinit
sudo apt-get install xinit
|
# !/bin/bash
# Script pour lancer l'émulateur de terminale
# hv 1807010.1555
sudo chmod 666 /dev/ttyUSB0
screen /dev/ttyUSB0 115200
|
<reponame>TauaneLima/Atividade_JavaScript
describe('Devolve Vetor de Pares', () => {
it('Se o vetor estiver vazio, devolve um vetor vazio', () => expect(acharPares([])).toEqual([]))
})
|
<gh_stars>1-10
package builder
import (
"fmt"
"path"
"sort"
"time"
"github.com/nicksnyder/go-i18n/i18n"
"github.com/aymerick/kowa/models"
"github.com/aymerick/raymond"
)
// EventsBuilder builds events pages
type EventsBuilder struct {
*NodeBuilderBase
events []*EventContent
pastEvents []*EventContent
}
// EventContent represents an event node content
type EventContent struct {
Model *models.Event
Cover *ImageVars
Title string
Place string
Body raymond.SafeString
Url string
Dates string
StartDateRFC3339 string
StartDateTime string
StartDate string
StartWeekday string
StartWeekdayShort string
StartDay string
StartMonth string
StartMonthShort string
StartYear string
StartTime string
EndDateRFC3339 string
EndDateTime string
EndDate string
EndWeekday string
EndWeekdayShort string
EndDay string
EndMonth string
EndMonthShort string
EndYear string
EndTime string
}
// EventContentsByStartDate represents sortable event node contents
type EventContentsByStartDate []*EventContent
// EventsContent represents events node content
type EventsContent struct {
Events []*EventContent
PastEvents []*EventContent
// PrevPage string
// NextPage string
}
func init() {
RegisterNodeBuilder(kindEvents, NewEventsBuilder)
}
// NewEventsBuilder instanciate a new NodeBuilder
func NewEventsBuilder(siteBuilder *SiteBuilder) NodeBuilder {
return &EventsBuilder{
NodeBuilderBase: &NodeBuilderBase{
nodeKind: kindEvent,
siteBuilder: siteBuilder,
},
}
}
// Load is part of NodeBuilder interface
func (builder *EventsBuilder) Load() {
builder.loadEvents()
builder.loadEventsLists()
}
// Build all events
func (builder *EventsBuilder) loadEvents() {
for _, event := range *builder.site().FindAllEvents() {
builder.loadEvent(event)
}
}
// Computes event slug
func eventSlug(event *models.Event) string {
year, month, day := event.StartDate.Date()
title := event.Title
if len(title) > maxSlug {
title = title[:maxSlug]
}
return fmt.Sprintf("%d/%02d/%02d/%s", year, month, day, title)
}
// Build event page
func (builder *EventsBuilder) loadEvent(event *models.Event) {
// get page settings
title, tagline, cover, disabled := builder.pageSettings(models.PageKindEvents)
if disabled {
return
}
T := i18n.MustTfunc(builder.siteLang())
slug := T("events")
if title == "" {
title = slug
}
// build node
node := builder.newNode()
node.fillURL(path.Join(slug, eventSlug(event)))
node.Title = title
node.Tagline = tagline
node.Meta = &NodeMeta{
Title: fmt.Sprintf("%s - %s", event.Title, builder.site().Name),
Description: tagline,
Type: "article",
}
eventContent := builder.NewEventContent(event, node)
node.Content = eventContent
if eventContent.Cover != nil {
node.Cover = eventContent.Cover
} else {
node.Cover = cover
}
builder.addNode(node)
if time.Now().After(event.EndDate) {
builder.pastEvents = append(builder.pastEvents, eventContent)
} else {
builder.events = append(builder.events, eventContent)
}
}
// siteTime returns given time expressed in site timezone
func (builder *EventsBuilder) siteTime(t time.Time) time.Time {
return t.In(builder.siteTZLocation())
}
// NewEventContent instanciates a new EventContent
func (builder *EventsBuilder) NewEventContent(event *models.Event, node *Node) *EventContent {
T := i18n.MustTfunc(builder.siteLang())
startDate := builder.siteTime(event.StartDate)
endDate := builder.siteTime(event.EndDate)
result := &EventContent{
Model: event,
Title: event.Title,
Place: event.Place,
Url: node.Url,
StartDateRFC3339: startDate.Format(time.RFC3339),
StartWeekday: T("weekday_" + startDate.Format("Monday")),
StartWeekdayShort: T("weekday_short_" + startDate.Format("Mon")),
StartDay: startDate.Format("02"),
StartMonth: T("month_" + startDate.Format("January")),
StartMonthShort: T("month_short_" + startDate.Format("Jan")),
StartYear: startDate.Format("2006"),
StartTime: startDate.Format(T("format_time")),
EndDateRFC3339: endDate.Format(time.RFC3339),
EndWeekday: T("weekday_" + endDate.Format("Monday")),
EndWeekdayShort: T("weekday_short_" + endDate.Format("Mon")),
EndDay: endDate.Format("02"),
EndMonth: T("month_" + endDate.Format("January")),
EndMonthShort: T("month_short_" + endDate.Format("Jan")),
EndYear: endDate.Format("2006"),
EndTime: endDate.Format(T("format_time")),
}
result.StartDateTime = T("event_format_datetime", map[string]interface{}{
"Year": result.StartYear,
"Month": result.StartMonth,
"Day": result.StartDay,
"Time": result.StartTime,
"Weekday": result.StartWeekday,
})
result.StartDate = T("event_format_date", map[string]interface{}{
"Year": result.StartYear,
"Month": result.StartMonth,
"Day": result.StartDay,
"Weekday": result.StartWeekday,
})
result.EndDateTime = T("event_format_datetime", map[string]interface{}{
"Year": result.EndYear,
"Month": result.EndMonth,
"Day": result.EndDay,
"Time": result.EndTime,
"Weekday": result.EndWeekday,
})
result.EndDate = T("event_format_date", map[string]interface{}{
"Year": result.EndYear,
"Month": result.EndMonth,
"Day": result.EndDay,
"Weekday": result.EndWeekday,
})
if result.StartDate == result.EndDate {
result.Dates = T("date_times_interval", map[string]interface{}{
"StartDate": result.StartDate,
"StartTime": result.StartTime,
"EndTime": result.EndTime,
})
} else {
result.Dates = T("dates_interval", map[string]interface{}{
"StartDateTime": result.StartDateTime,
"EndDateTime": result.EndDateTime,
})
}
cover := event.FindCover()
if cover != nil {
result.Cover = builder.addImage(cover)
}
result.Body = generateHTML(event.Format, event.Body)
return result
}
// Build events list pages
// @todo pagination
func (builder *EventsBuilder) loadEventsLists() {
if len(builder.events) == 0 && len(builder.pastEvents) == 0 {
return
}
// get page settings
title, tagline, cover, disabled := builder.pageSettings(models.PageKindEvents)
if disabled {
return
}
T := i18n.MustTfunc(builder.siteLang())
slug := T("events")
if title == "" {
title = slug
}
// build node
node := builder.newNodeForKind(kindEvents)
node.fillURL(slug)
node.Title = title
node.Tagline = tagline
node.Cover = cover
node.Meta = &NodeMeta{Description: tagline}
node.InNavBar = true
node.NavBarOrder = 10
events := builder.events
sort.Sort(EventContentsByStartDate(events))
pastEvents := builder.pastEvents
sort.Sort(sort.Reverse(EventContentsByStartDate(pastEvents)))
if len(pastEvents) > maxPastEvents {
pastEvents = pastEvents[:maxPastEvents]
}
node.Content = &EventsContent{
Events: events,
PastEvents: pastEvents,
}
builder.addNode(node)
}
//
// EventContentsByStartDate
//
// Implements sort.Interface
func (events EventContentsByStartDate) Len() int {
return len(events)
}
// Implements sort.Interface
func (events EventContentsByStartDate) Swap(i, j int) {
events[i], events[j] = events[j], events[i]
}
// Implements sort.Interface
func (events EventContentsByStartDate) Less(i, j int) bool {
return events[i].Model.StartDate.Before(events[j].Model.StartDate)
}
|
<gh_stars>1-10
import path from "path";
import { defineConfig } from "vite";
import vue from "@vitejs/plugin-vue";
export default defineConfig({
build: {
cssCodeSplit: true,
lib: {
name: "Wheel",
entry: path.resolve(__dirname, "src/library.ts"),
},
rollupOptions: {
external: ["vue"],
output: {
globals: {
vue: "Vue",
},
},
},
},
resolve: {
alias: {
"@": path.resolve(__dirname, "/src"),
},
},
plugins: [vue()],
});
|
<reponame>yamanakahirofumi/RogueInJava
package org.hiro.things.ringtype;
import org.hiro.Const;
import org.hiro.Global;
import org.hiro.Obj_info;
import org.hiro.Util;
import org.hiro.things.Ring;
import org.hiro.things.RingEnum;
public class AddStrengthRing extends Ring {
private int strength;
public AddStrengthRing() {
super();
this._o_which = RingEnum.Protection.getValue();
this.strength = Util.rnd(3);
if (this.strength == 0) {
this.strength = -1;
this.add_o_flags(Const.ISCURSED);
}
}
@Override
public int getWorth() {
Obj_info op = Global.ring_info[this._o_which];
int worth = op.getWorth();
if (this.strength > 0) {
worth += this.strength * 100;
} else {
worth = 10;
}
if (!this.contains_o_flags(Const.ISKNOW)) {
worth /= 2;
}
this.add_o_flags(Const.ISKNOW);
op.know();
return worth;
}
public int getStrength() {
return strength;
}
}
|
#!/bin/bash
# Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
#
usage() {
echo "Usage:"
echo " keepContainerAlive.sh logs_dir hostname server_name"
}
if [ $# -ne 3 ]; then
usage
exit 1
fi
# Read the argument values
# Logs directory where log file will be created
LOGS_DIR=$1
# hostname
HOST_NAME=$2
# Server name
SERVER_NAME=$3
# Timestamp to make file name unique
TIMESTAMP=$(date +%y%m%d%H%M)
CONTAINER_LOG="${LOGS_DIR}/WC_Container-success-${HOST_NAME}-${SERVER_NAME}-${TIMESTAMP}.log"
# Delete the file if it exists already
rm -f $CONTAINER_LOG
CURRENT_TIME=$(date "+%Y-%m-%d %H:%M:%S")
if [ -e $CONTAINER_LOG ]
then
cat > $CONTAINER_LOG<<-EOF
===============================================
POD Server : $SERVER_NAME
POD IP Address: $HOST_NAME
Set up time : $CURRENT_TIME
WCP POD Container is in running state...
===============================================
EOF
tail -f $CONTAINER_LOG &
childPID=$!
wait ${childPID}
else
sleep infinity
fi
|
<gh_stars>0
import Builder from './Http/Builder';
import Core from './Core';
import Request from './Http/Request';
import {
IAttributes,
ICachedResponses,
IModelRequestOptions,
IModelRequestQueryParams,
} from './Interfaces';
/**
* ActiveRecord
*
* @type {[type]}
*/
export default class ActiveRecord extends Core {
/**
* Get builder reference
*
* @return Builder
*/
public get b(): Builder {
return this.builder;
}
/**
* Model if we provide a specific identifier
*
* @return boolean
*/
protected get isModel(): boolean {
return this.builder.identifier !== undefined;
}
/**
* Data set by the request
*
* @type object
*/
// public attributes: Map<string, any> = new Map();
public attributes: any = {};
/**
* Base Url for the API
*
* @type string
*/
public baseUrl: string = '/v1';
/**
* Body for POST
*
* @type object
*/
public body: any = null;
/**
* If this request is allowed to be cached
*
* @type {boolean}
*/
public cacheable: boolean = true;
/**
* Local custom key
*
* @type {string}
*/
public cid: string = '';
/**
* Endpoint key
*
* https://api.sotw.com/v1/{endpoint}
*
* @type string
*/
public endpoint: string = '';
/**
* Optional override for DELETEs
*
* https://api.sotw.com/v1/{post_endpoint}
*
* @type string | undefined
*/
public delete_endpoint: string | undefined;
/**
* Optional override for POSTs
*
* https://api.sotw.com/v1/{post_endpoint}
*
* @type string | undefined
*/
public post_endpoint: string | undefined;
/**
* Optional override for PUTs
*
* https://api.sotw.com/v1/{post_endpoint}
*
* @type string | undefined
*/
public put_endpoint: string | undefined;
/**
* If this has ever fetched
*/
public hasFetched: boolean = false;
/**
* If this has ever fetched and loaded
*/
public hasLoaded: boolean = false;
/**
* List of headers
*
* @type {any}
*/
public headers: any = {};
/**
* Unique key for directly fetching
*
* https://api.sotw.com/v1/{endpoint}
*
* @type string
*/
public id: string = '';
/**
* Limit
*
* @type number
*/
public limit: number = 15;
/**
* If the request is currently loading
*
* @type {boolean}
*/
public loading: boolean = false;
/**
* Meta
*
* @type object
*/
public meta: any = {};
/**
* Modified endpoint takes precedence
*
* @type {string}
*/
public modifiedEndpoint: string | null = null;
/**
* Page
*
* @type number
*/
public page: number = 1;
/**
* Parent object
* Usually when there's a relationship involved
*
* @type ActiveRecord
*/
public parent: any;
/**
* Last request
*
* @type Request
*/
public request?: Request;
/**
* Last Request Time
*/
public requestTime: number;
/**
* API Query Builder
*
* @type Builder
*/
protected builder: Builder;
/**
* Used for identifying local models
*
* @type {string}
*/
protected cidPrefix: string = 'c';
/**
* The key that collection data exists on, e.g.
*
* {
* data: [ .. ]
* }
*
* @type string
*/
protected dataKey: string | undefined = 'data';
/**
* Save options of last _fetch
*
* @type {Object}
*/
protected lastRequest: any;
/**
* Prevent overflow of runLastAttempts
* @type {number}
*/
protected runLastAttempts: number = 0;
/**
* Max attempts to runLast
* @type {number}
*/
protected runLastAttemptsMax: number = 2;
/**
* Reference to object we use in our modified active record
*
* @type ActiveRecord
*/
protected referenceForModifiedEndpoint: ActiveRecord | null | undefined;
/**
* Constructor
*/
constructor(options: any = {}) {
super(options);
// Set options on class
Object.assign(this, options);
// Setup default last request
this.lastRequest = {};
// Setup URL builder
this.builder = new Builder(this);
// Options
this.options(options);
// Mark creation as the rquest
this.requestTime = Date.now();
}
/**
* Get attribute
*
* @param string key
*
* @return string
*/
public attr(key: string): string | number | null {
return this.attributes[key];
}
/**
* Set attributes by hashmap
*
* @param object = {} hash
*
* @return ActiveRecord
*/
public set(hash: IAttributes = {}, trigger: boolean = true): any {
// @ts-ignore
var possibleSetters = Object.getOwnPropertyDescriptors(this.__proto__);
for (let key in hash) {
this.attributes[key] = hash[key];
// Check for setters
if (
possibleSetters &&
possibleSetters[key] &&
possibleSetters[key].set
) {
this[key] = hash[key];
}
}
// Check for ID
if (hash && hash['id']) {
this.id = hash.id;
}
// Trigger
if (trigger) {
this.dispatch('set');
}
return this;
}
/**
* Unset attribute
*
* Attribute will be `undefined` after unsetting
*
* @param string key
*
* @return ActiveRecord
*/
public unset(key: string): any {
delete this.attributes[key];
return this;
}
/**
* Apply an object to change options and set meta
*
* @param {any} options
* @return {ActiveRecord}
*/
public options(options: any = {}): any {
// Override endpoint
if (options.endpoint) {
this.setEndpoint(options.endpoint);
}
// Check options for headers
if (options.headers) {
this.setHeaders(options.headers);
}
// Set metadata
if (options.meta) {
// Increase count
// mk: This is kind of wonky...
if (options.merge) {
if (
options.meta.pagination.count &&
this.meta.pagination.count
) {
options.meta.pagination.count += this.meta.pagination.count;
}
}
// Set
this.meta = options.meta;
}
// Check options for params
if (options.params || options.qp || options.queryParams) {
this.setQueryParams(
options.queryParams || options.qp || options.params
);
}
return this;
}
/**
* Converts model to JSON object
*
* @return object
*/
public toJSON(): object {
let json: any = this.attributes;
// @todo is this code copasetic?
// @ts-ignore
var possibleGetters = Object.getOwnPropertyNames(this.__proto__);
// Convert toJSON on subobjects so they stay in sync
for (var key of possibleGetters) {
// @ts-ignore
if (json[key] && this[key] && this[key].toJSON) {
// @ts-ignore
json[key] = this[key].toJSON();
}
}
return json;
}
//#region Actions
/**
* Create Model
*
* @todo There's a ton to do here too
*/
public create(attributes: any) {
return this.post(attributes);
}
/**
* Delete Model
*
* @todo There's a ton to do here too
*/
public delete(attributes: any = null): Promise<Request> {
// Query params
const url: string = this.builder.identifier(
this.id || (attributes ? attributes.id : '')
).url;
// const url: string = this.builder.identifier(
// this.id || (attributes ? attributes.id : ''),
// ).url;
// Attributes
const body: any = null;
const headers: any = this.headers;
const method: string = 'DELETE';
return this._fetch(null, {}, method, body, headers);
}
/**
* POST Model
*/
public post(attributes: any = null): Promise<Request> {
// Query params
const url: string = this.builder.url;
// Attributes
const body: any = attributes || this.attributes;
const headers: any = this.headers;
const method: string = 'POST';
return this._fetch(null, {}, method, body, headers);
}
/**
* PUT model
*
* @param {any = {}} options
* @param {any = {}} queryParams
* @return {any}
*/
public put(attributes: any): Promise<Request> {
// Query params
const url: string = this.builder.url;
// Attributes
const body: any = attributes || this.attributes;
const headers: any = this.headers;
const method: string = 'PUT';
return this._fetch(null, {}, method, body, headers);
}
/**
* Save model
*
* @todo There so much to do to fix this
*
* @param {any = {}} options
* @param {any = {}} queryParams
* @return {any}
*/
public save(attributes: any = null): Promise<Request> {
// Query params
// const url: string = this.builder.identifier(this.id || (attributes ? attributes.id : "")).url;
// Attributes
const body: any = attributes || this.attributes;
const headers: any = this.headers;
const method: string = this.id ? 'PUT' : 'POST';
return this._fetch(null, {}, method, body, headers);
}
/**
* Interface for Collection
*/
public add(x: any) {}
/**
* Interface for Collection
*/
public remove(x: any) {}
/**
* Empty attributes
*/
public reset() {
this.attributes = {};
}
/**
* Used to get an individual item in a model
*
* Can pass either an ID #XX or a slug
*
* @param {string | number} id
* @return {Promise}
*/
public async find(
id: string | number,
queryParams: IModelRequestQueryParams = {}
): Promise<any> {
// Promise<void | Request | Response>
return await this.fetch({ id }, queryParams).then((request) => {
return this;
});
}
/**
* Upload file
*
* @param {string} name
* @param {any} file
* @return {any}
*/
public file(
name: string,
file: HTMLInputElement | FileList | File
): Promise<void | Request | Response> {
// Query params
const url: string = this.builder.identifier(this.id).url;
// const files = event.target.files
const formData = new FormData();
// Get file
if (file instanceof HTMLInputElement) {
file = (<FileList>file.files)[0];
} else if (file instanceof FileList) {
file = file[0];
} else if (file instanceof File) {
// Good
} else {
console.warn('File provided unacceptable type.');
}
// Set header
this.unsetHeader('Content-Type');
// Add files
formData.append(name, file);
// Set fetch
return this._fetch(null, {}, 'POST', formData).then((request: any) => {
this.dispatch('file:complete', this);
// @note This was duplicating our images
// this.add(request.data);
return request;
});
}
/**
* Public generic fetch method
*
* NOTE: It is favored to use other methods
*
* @param {IModelRequestOptions | null = {}} options
* @param {IModelRequestQueryParams = {}} queryParams
* @return {Promise}
*/
public async fetch(
options: IModelRequestOptions | null = {},
queryParams: IModelRequestQueryParams = {}
): Promise<void | Request | Response> {
return await this._fetch(options, queryParams);
}
/**
* Alias for `file`
*
* @param {string} name
* @param {HTMLInputElement | FileList | File} file
* @return {Promise}
*/
public upload(
name: string,
file: HTMLInputElement | FileList | File
): Promise<void | Request | Response> {
return this.file(name, file);
}
/**
* Run last query
* @return {any}
*/
public runLast(): any {
// Check if we can do this
if (++this.runLastAttempts >= this.runLastAttemptsMax) {
console.warn('Run last attempts expired');
setTimeout(() => {
this.runLastAttempts = 0;
}, 1000);
return;
}
return this._fetch(
this.lastRequest.options,
this.lastRequest.queryParams,
this.lastRequest.method,
this.lastRequest.body,
this.lastRequest.headers
);
}
//#endregion Actions
//#region Get Params
public getUrlByMethod(method: string): string {
// Setup URL
let url: string = '';
let originalEndpoint: string = this.endpoint;
// Use a modified endpoint, if one exists
if (method === 'delete' && this.delete_endpoint) {
originalEndpoint = this.endpoint;
this.endpoint = this.delete_endpoint;
} else if (method === 'put' && this.put_endpoint) {
originalEndpoint = this.endpoint;
this.endpoint = this.put_endpoint;
} else if (method === 'post' && this.post_endpoint) {
originalEndpoint = this.endpoint;
this.endpoint = this.post_endpoint;
}
// Check if we're using modified
if (this.referenceForModifiedEndpoint && this.modifiedEndpoint) {
this.useModifiedEndpoint(this.referenceForModifiedEndpoint);
}
// Mark url
url = this.builder.url;
// Reset endpoint
this.endpoint = originalEndpoint;
// Query params
return url;
}
//#endregion Get Params
//#region Set Params
/**
* We automatically assign modified endpoints through relationships
* like hasOne/hasMany, but sometimes we may not want to change that
* endpoint. This allows us to cancel the change.
*
* @return {any}
*/
public cancelModifiedEndpoint(): any {
this.referenceForModifiedEndpoint = undefined;
this.modifiedEndpoint = null;
return this;
}
/**
* Set specific endpoint override
*
* @param {string} endpoint
* @return {any}
*/
public useModifiedEndpoint(activeRecord: ActiveRecord): any {
// @todo, we shouldn't actually mutate this
// we should turn the endpoint that we actually use into a getter
// then have a way of modifying that so we maintain the original class endpoint
// this.setEndpoint(activeRecord.endpoint + '/' + activeRecord.id + '/' + this.endpoint);
// Object we reference for modified
this.referenceForModifiedEndpoint = activeRecord;
// Warnings
if (activeRecord.id == null) {
console.warn(
'Modified endpoints usually have an ID signature. Are you sure this is right?'
);
}
// Set modified endpoint
this.modifiedEndpoint =
activeRecord.endpoint +
'/' +
activeRecord.id +
(activeRecord.id ? '/' : '') +
this.endpoint;
return this;
}
/**
* Set specific boy
*
* @param {string} value
* @return {any}
*/
public setBody(value: any): any {
this.body = value;
return this;
}
/**
* Set specific endpoint override
*
* @param {string} endpoint
* @return {any}
*/
public setEndpoint(endpoint: string): any {
this.referenceForModifiedEndpoint = undefined;
this.modifiedEndpoint = null;
this.endpoint = endpoint;
return this;
}
/**
* Set specific header
*
* @param {string} header
* @param {string} value
* @return {any}
*/
public setHeader(header: string, value: string | null): any {
this.headers[header] = value;
return this;
}
/**
* Override and set headers
*
* @param {any} headers
* @return {any}
*/
public setHeaders(headers: any): any {
for (var k in headers) {
this.setHeader(k, headers[k]);
}
return this;
}
/**
* Override and set id
*
* @param {any} id
* @return {any}
*/
public setId(id: any): any {
this.id = id;
return this;
}
/**
* Unset id
*
* @param {any} id
* @return {any}
*/
public unsetId(): any {
this.id = '';
return this;
}
/**
* Override and set headers
*
* @param {any} headers
* @return {any}
*/
public unsetHeader(header: string): any {
this.setHeader(header, null);
delete this.headers[header];
return this;
}
/**
* Set specific query param
*
* @param {string} key
* @param {string} value
* @return {any}
*/
public setQueryParam(key: string, value: string): any {
this.builder.qp(key, value);
return this;
}
/**
* Override and set query params
*
* @param {any} params
* @return {any}
*/
public setQueryParams(params: any): any {
for (var k in params) {
this.setQueryParam(k, params[k]);
}
return this;
}
/**
* Override and set query param
*
* @param {any} headers
* @return {any}
*/
public unsetQueryParam(param: string): any {
delete this.builder.queryParams[param];
return this;
}
/**
* Override and set headers
*
* @param {string} token
* @return {any}
*/
public setToken(token: string): any {
this.setHeader('Authorization', 'Bearer ' + token);
return this;
}
/**
* Function to call after setting a fetch
*
* This is useful if we're doing callbacks from cached promises
*/
public setAfterResponse(request: Request, options: any = {}) {
var method: string = request.method || 'get';
// Add model
if (method.toLowerCase() === 'post') {
this.add(request.data);
} else if (method.toLowerCase() === 'delete') {
// Intentionally empty
} else {
var data =
this.dataKey !== undefined
? request.data[this.dataKey]
: request.data;
this.set(data, options);
}
// Set options
this.options(
Object.assign({}, options, {
meta: request.data.meta,
})
);
// Events
this.dispatch('parse:after', this);
}
//#endregion Set Params
// @todo Update return
protected _fetch(
options: IModelRequestOptions | null = {},
queryParams: IModelRequestQueryParams = {},
method: any = null,
body: any = null,
headers: any = null
): Promise<Request> {
// Promise<void | Request | Response>
// Normalize method
method = method ? method.toLowerCase() : 'get';
// Save request params
this.lastRequest = {
options,
queryParams,
method,
body,
headers,
};
// Set last request time
this.requestTime = Date.now();
// Check cacheable
if (!this.cacheable) {
this.builder.qp('cb', Date.now());
}
// Check for query params
for (let key in queryParams) {
this.builder.qp(key, queryParams[key]);
}
// Check for ID
if (options && options.id) {
this.builder.identifier(options.id);
}
// Query params
const url: string = this.getUrlByMethod(method);
// Events
this.dispatch('requesting', this);
// Has fetched
this.hasFetched = true;
// Set loading
this.loading = true;
// Setup request
var request = (this.request = new Request(url, {
dataKey: this.dataKey,
}));
// note: this *should* be set by fetch as well, but
// we have an issue right now we're working out
this.request.method = method;
// After parse
request.on('parse:after', (e) => {
method = method || 'get';
// Add model
if (method.toLowerCase() === 'post') {
this.add(request.data);
} else if (method.toLowerCase() === 'delete') {
// Intentionally empty
} else {
this.set(
this.dataKey !== undefined
? request.data[this.dataKey]
: request.data
);
}
// Events
this.dispatch('fetched', this);
});
// Bubble `progress` event
request.on('progress', (e) => {
this.dispatch('progress', e.data);
});
// Bubble `complete` event
request.on('complete', (e) => {
// Set loading
this.loading = false;
// Bubble
this.dispatch('complete');
});
// After parse
request.on('parse:after', (e) =>
this.FetchParseAfter(request, e, options)
);
request.on('progress', (e) => this.FetchProgress(request, e, options));
request.on('complete', (e) => this.FetchComplete(request, e, options));
request.on('complete:get', (e) => this.dispatch('complete:get'));
request.on('complete:put', (e) => this.dispatch('complete:put'));
request.on('complete:post', (e) => this.dispatch('complete:post'));
request.on('complete:delete', (e) => this.dispatch('complete:delete'));
// Request (method, body headers)
return request.fetch(
method,
body || this.body,
headers || this.headers
);
}
//#region Cache
/**
* Cached responses by URL
*
* Example:
*
* 'sotw.com/v1/film..': { complete: false, time: ... }
*/
protected static cachedResponses: ICachedResponses = {};
/**
* Create cached entry
*
* Usage:
*
* this.cache('foo', 'bar');
*
* @param {string} key
* @param {any} value
* @param {boolean} isComplete
* @param {number} ttl
*
* @return void
*/
protected cache(
key: string,
value: any,
isComplete: boolean = false,
ttl: number = 5000
): void {
// If exists, save only value as to not overwrite subscribers
if (ActiveRecord.cachedResponses[key]) {
ActiveRecord.cachedResponses[key].complete = isComplete;
ActiveRecord.cachedResponses[key].time = Date.now();
ActiveRecord.cachedResponses[key].value = value;
} else {
ActiveRecord.cachedResponses[key] = {
complete: false,
subscribers: [],
time: Date.now(),
ttl: ttl,
value: value,
};
}
}
/**
* Check if we have a cached item
*
* @param {string} Cache key
*
* @return boolean
*/
protected isCached(key: string): boolean {
return !!ActiveRecord.cachedResponses[key];
/*
* return !!ActiveRecord.cachedResponses[key]
* && (ActiveRecord.cachedResponses[key].time + ActiveRecord.cachedResponses[key].ttl) < Date.now();
*/
}
/**
* Says we have a cached item that is currently incomplete
*
* @param {string} key
*
* @return boolean
*/
protected isCachePending(key: string): boolean {
return (
this.isCached(key) &&
(!this.getCache(key).complete || this.getCache(key).failed)
);
}
/**
* Get cached object
*
* @param {string} key
*
* @return any
*/
protected getCache(key: string): any {
return ActiveRecord.cachedResponses[key];
}
/**
* Add subscriber
*
* @param {string} key
* @param {any} resolve
* @param {any} reject
* @param {any} collection
*/
protected addCacheSubscriber(
key: string,
resolve: any,
reject: any,
collection: any
) {
const cache: any = this.getCache(key);
cache.subscribers.push({ collection, reject, resolve });
}
/**
* Clear subscribers
*
* @param {string} key
*/
protected clearCacheSubscribers(key: string) {
const cache: any = this.getCache(key);
cache.subscribers = [];
}
//#endregion Cache
/*
* Complete from fetch request
*
* @param {Request} request
* @param {any} e
*/
protected FetchComplete(request: Request, e: any, options: any = {}) {
var method: string = request.method || 'get';
// Has loaded ever
this.hasLoaded = true;
// Set loading
this.loading = false;
// Bubble
this.dispatch('complete', request.data);
}
/**
* Progress from fetch request
*
* @param {Request} request
* @param {any} e
*/
protected FetchProgress(request: Request, e: any, options: any = {}) {
this.dispatch('progress', e.data);
}
/**
* Overrideable fetch parse:after
*
* @param {string = 'get'} method
* @param {Request} request
*/
protected FetchParseAfter(request: Request, e: any, options: any = {}) {
const response: Response = <Response>request.response;
const code: number = <number>response.status;
// Only set for acceptable responses
if (code < 400) {
this.setAfterResponse(request, options);
}
// Fetched event
this.dispatch('fetched', this);
}
}
|
import React, { Component } from 'react';
import ReactDOM from 'react-dom';
import { DelphiTree } from './Tree';
import * as d3 from 'd3';
import { TXT_TOGGLE_TREE_OFF, TXT_TOGGLE_TREE_ON,
BLOCK_INSTRUCTIONS, BLOCKS_INSTRUCT } from './Constants';
const cssfont = require('../css/bpreplay-webfont.woff');
export default class DelphiTreeContainer extends Component {
constructor(props) {
super(props);
this.node = null;
this.state = { };
this.goToInstructions = this.goToInstructions.bind(this);
this.showOrHideTree = this.showOrHideTree.bind(this);
}
goToInstructions(event) {
event.preventDefault();
const { AppObj } = { ...this.props};
const { historyCBlockId, currentCBlockId } = { ...AppObj.state};
const newHistoryCBlockId = [ ...historyCBlockId];
newHistoryCBlockId.push(currentCBlockId);
var newState = {
currentCBlockId: BLOCKS_INSTRUCT,
historyCBlockId: newHistoryCBlockId
};
AppObj.setState(newState);
}
showOrHideTree(event) {
event.preventDefault();
const { AppObj } = { ...this.props};
var { treeVisible } = { ...AppObj.state};
treeVisible = !treeVisible;
AppObj.setState({ treeVisible: treeVisible });
const iframeDoc = document.getElementById('treeFrame').contentWindow.document;
const treeRoot = iframeDoc.getElementById('treeRoot');
var treeVisibility = (treeVisible ? 'visible' : 'hidden');
d3.select(treeRoot).style("visibility", treeVisibility);
}
componentDidMount() {
const iframeDoc = document.getElementById('treeFrame').contentWindow.document;
iframeDoc.open("text/html", "replace");
iframeDoc.write(`
<html>
<head></head>
<style>
@font-face {
font-family: 'bpreplayregular';
src: url('` + cssfont + `') format('woff');
font-weight: normal;
font-style: normal;
}
* {
font-family: bpreplayregular, sans-serif;
}
.node circle {
fill: #fff;
stroke: steelblue;
stroke-width: 2px;
}
.node text {
stroke-width: 2px;
}
.link {
fill: none;
stroke: #ddd;
stroke-width: 2px;
}
</style>
<body><div id="treeRootDiv"></div></body>
</html>`);
iframeDoc.close();
ReactDOM.render(<DelphiTree AppObj={this.props.AppObj} />,
iframeDoc.getElementById('treeRootDiv'));
}
render() {
const { AppObj } = { ...this.props};
var { currentCBlockId, sessionOk} = { ...AppObj.state};
return (
<div>
<div className="delphi-general-paragraph-small">
<button onClick={this.showOrHideTree}>{
AppObj.state.treeVisible ? TXT_TOGGLE_TREE_OFF : TXT_TOGGLE_TREE_ON
}</button>
{ ((!!sessionOk) && (currentCBlockId !== BLOCKS_INSTRUCT)) ?
<span>
<font color="#FFFFFF">--------</font>
<button onClick={this.goToInstructions}>{BLOCK_INSTRUCTIONS}</button>
</span>
: ""
}
</div>
<div><iframe frameBorder="0" width="100%" id="treeFrame" title="DM Tree"
style={(!!AppObj.state.treeVisible ?
{overflow: "scroll", height: AppObj.state.treeHeight, width: "98%", position: "absolute"} :
{overflow: "hidden", height: "0px", width: "98%", position: "absolute"})}>
</iframe></div>
</div>
)
}
}
|
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from tpot.builtins import StackingEstimator
import pandas as pd
import numpy as np
# Load the Iris dataset
iris = load_iris()
df = pd.DataFrame(data=np.c_[iris['data'], iris['target']], columns=iris['feature_names'] + ['target'])
# Split the data into training and testing sets
training_features, testing_features, training_target, testing_target = train_test_split(iris.data, iris.target, random_state=None)
# Create a decision tree classifier
decision_tree = DecisionTreeClassifier()
# Create a random forest classifier
random_forest = RandomForestClassifier()
# Stack the decision tree and random forest classifiers
stacked_classifier = StackingEstimator(estimators=[('dt', decision_tree), ('rf', random_forest)])
# Train the stacked classifier on the training data
stacked_classifier.fit(training_features, training_target)
# Evaluate the stacked classifier's accuracy on the testing data
accuracy = stacked_classifier.score(testing_features, testing_target)
print("Accuracy of the stacked classifier on the testing set:", accuracy)
|
package org.infinispan.persistence.cloud.configuration;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import java.util.Properties;
import org.infinispan.configuration.cache.Configuration;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.testng.annotations.Test;
@Test(groups = "unit", testName = "persistence.cloud.configuration.ConfigurationTest")
public class ConfigurationTest {
public void testCacheStoreConfiguration() {
Properties props = new Properties();
props.put("key1", "val1");
props.put("key2", "val2");
ConfigurationBuilder b = new ConfigurationBuilder();
b.persistence().addStore(CloudStoreConfigurationBuilder.class)
.provider("transient")
.location("test-location")
.identity("me")
.credential("s3cr3t")
.container("test-container")
.endpoint("http://test.endpoint")
.compress(true)
.properties(props)
.normalizeCacheNames(true);
Configuration configuration = b.build();
CloudStoreConfiguration store = (CloudStoreConfiguration) configuration.persistence().stores().get(0);
assertEquals(store.provider(), "transient");
assertEquals(store.location(), "test-location");
assertEquals(store.identity(), "me");
assertEquals(store.credential(), "s3cr3t");
assertEquals(store.container(), "test-container");
assertEquals(store.endpoint(), "http://test.endpoint");
assertTrue(store.compress());
assertEquals(store.properties().get("key1"), "val1");
assertEquals(store.properties().get("key2"), "val2");
assertTrue(store.normalizeCacheNames());
b = new ConfigurationBuilder();
b.persistence().addStore(CloudStoreConfigurationBuilder.class).read(store);
Configuration configuration2 = b.build();
CloudStoreConfiguration store2 = (CloudStoreConfiguration) configuration2.persistence().stores()
.get(0);
assertEquals(store2.provider(), "transient");
assertEquals(store2.location(), "test-location");
assertEquals(store2.identity(), "me");
assertEquals(store2.credential(), "s3cr3t");
assertEquals(store2.container(), "test-container");
assertEquals(store2.endpoint(), "http://test.endpoint");
assertTrue(store2.compress());
assertEquals(store2.properties().get("key1"), "val1");
assertEquals(store2.properties().get("key2"), "val2");
assertTrue(store2.normalizeCacheNames());
assertTrue(store2.normalizeCacheNames());
}
}
|
<gh_stars>0
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface UIColor (HBCategory)
+ (UIColor *)tab_normalDynamicBackgroundColor;
+ (UIColor *)tab_getColorWithLightColor:(UIColor *)lightColor
darkColor:(UIColor * _Nullable)darkColor;
/// 主题色颜色
+ (UIColor *)themeTitleColor;
/// 副标题颜色
+ (UIColor *)subheadTitleColor;
/// 控件背景颜色
+ (UIColor *)secondBackgroundColor;
/// 分割线颜色
+ (UIColor *)lineColor;
/// 列表背景颜色
+ (UIColor *)listBackgroundColor;
/// 随机色
+ (UIColor *)randomColor;
/// 使用16进制数字创建颜色
/// @param hex 0xFFFFFF
+ (UIColor *)colorWithHex:(uint32_t)hex;
/// 使用16进制数字创建颜色
/// @param hex 0xFFFFFF
/// @param alpha 透明度0.0-1.0
+(UIColor *)colorWithHex:(uint32_t)hex alpha:(CGFloat)alpha;
/**
* @brief 渐变颜色
*
* @param fromColor 开始颜色
* @param toColor 结束颜色
* @param height 渐变高度
*
* @return 渐变颜色
*/
+ (UIColor*)gradientFromColor:(UIColor*)fromColor toColor:(UIColor*)toColor withHeight:(CGFloat)height;
/// RGB颜色
//+(UIColor *)colorWithRed:(uint8_t)red green:(uint8_t)green blue:(uint8_t)blue alpha:(CGFloat)alpha;
/// 是否为暗黑模式
+ (BOOL)isStyleDark;
@end
NS_ASSUME_NONNULL_END
|
#!/usr/bin/env bash
export LAB_DOMAIN=REDACTED # Our lab domain
export IAAS_PREFIX=iaas- # Our lab prefix
export PAAS_PREFIX=paas- # Our lab prefix
# asking for name
echo "Who are you? [matteo.franzil/claudio.facchinetti]: "
read -r NAME_INPUT
export NAME=$NAME_INPUT
export EMAIL=${NAME}@REDACTED # Your unitn email as provided
export GROUP_NUM=19 # Your group number
export LOGIN="$(printf "%s" ${EMAIL} | sed -e 's/@.*$//')" # The login will be the local-part of your email address
# some more stuff
export IAAS_HOST="${IAAS_PREFIX}${GROUP_NUM}${LAB_DOMAIN}" # The host plus your group plus the course domain
export PAAS_HOST="${PAAS_PREFIX}${GROUP_NUM}${LAB_DOMAIN}"
# site names for completeness
#export IAAS_SITE=$(ssh ${LOGIN}@${IAAS_HOST} hostname) # iaas-19
#export PAAS_SITE=$(ssh ${LOGIN}@${PAAS_HOST} hostname) # paas-19
# Socks port
export SOCKS_PORT=8888
|
def filter_strings(input_list):
filtered_list = [string for string in input_list if len(string) > 5 and string.startswith('A')]
return filtered_list
|
parallel --jobs 6 < ./results/exp_iterations/run-0/sea_mem_4n_6t_6d_1000f_617m_1i/jobs/jobs_n3.txt
|
<gh_stars>0
package com.tuya.iot.suite.ability.notice.model;
import lombok.Data;
import java.util.List;
/**
* <p> TODO
*
* @author 哲也
* @since 2021/5/11
*/
@Data
public class NoticeTemplateListResult {
private long total;
private List<NoticeTemplateList> list;
private boolean has_more;
@Data
static class NoticeTemplateList {
private String template_id;
private String name;
private String content;
private int status;
}
}
|
#include<iostream.h>
#include<conio.h>
#include<math.h>
int main()
{
int n;
float x;
cout<<"Enter a number : ";
cin>>n;
x = (n>0&&n%2==1)? sqrt(n): pow(n,5.0);
cout<<"\n"<<x;
getch();
return 0;
}
|
<filename>Javascript Full Course for Beginners to Advanced/javascript/app/examples/4.functionsMethodsAndObjects/arrayDestructuring.js
/* styling */
require('styles/main.scss');
/* js */
import { log, logTitle } from 'logger';
/* your imports */
logTitle('Array Destructuring');
const names = ['Anna', 'Mariam', 'Joe', 'Mark', 'Matt'];
// const anna = names[0];
// const mariam = names[1];
// const joe = names[2];
const [anna, , joe, ...restOfNames] = names;
log(`${anna} ${joe}`);
log(restOfNames);
log(restOfNames.length);
|
package ntw
import (
"fmt"
"strings"
)
func init() {
// register the language
Languages["fr-fr"] = Language{
Name: "French",
Aliases: []string{"fr", "fr-fr", "fr_FR", "french"},
Flag: "🇫🇷",
IntegerToWords: IntegerToFrFr,
}
}
// IntegerToFrFr converts an integer to French words
func IntegerToFrFr(input int) string {
var frenchMegas = []string{"", "mille", "million", "milliard", "billion", "billiard", "trillion", "trilliard", "quadrillion", "quadrilliard", "quintillion", "quintilliard"}
var frenchUnits = []string{"", "un", "deux", "trois", "quatre", "cinq", "six", "sept", "huit", "neuf"}
var frenchTens = []string{"", "dix", "vingt", "trente", "quarante", "cinquante", "soixante", "soixante", "quatre-vingt", "quatre-vingt"}
var frenchTeens = []string{"dix", "onze", "douze", "treize", "quatorze", "quinze", "seize", "dix-sept", "dix-huit", "dix-neuf"}
//log.Printf("Input: %d\n", input)
words := []string{}
if input < 0 {
words = append(words, "moins")
input *= -1
}
// split integer in triplets
triplets := integerToTriplets(input)
//log.Printf("Triplets: %v\n", triplets)
// zero is a special case
if len(triplets) == 0 {
return "zéro"
}
// iterate over triplets
for idx := len(triplets) - 1; idx >= 0; idx-- {
triplet := triplets[idx]
//log.Printf("Triplet: %d (idx=%d)\n", triplet, idx)
// nothing todo for empty triplet
if triplet == 0 {
continue
}
// special cases
if triplet == 1 && idx == 1 {
words = append(words, "mille")
continue
}
// three-digits
hundreds := triplet / 100 % 10
tens := triplet / 10 % 10
units := triplet % 10
//log.Printf("Hundreds:%d, Tens:%d, Units:%d\n", hundreds, tens, units)
if hundreds > 0 {
if hundreds == 1 {
words = append(words, "cent")
} else {
if tens == 0 && units == 0 {
words = append(words, frenchUnits[hundreds], "cents")
goto tripletEnd
} else {
words = append(words, frenchUnits[hundreds], "cent")
}
}
}
if tens == 0 && units == 0 {
goto tripletEnd
}
switch tens {
case 0:
words = append(words, frenchUnits[units])
case 1:
words = append(words, frenchTeens[units])
break
case 7:
switch units {
case 1:
words = append(words, frenchTens[tens], "et", frenchTeens[units])
break
default:
word := fmt.Sprintf("%s-%s", frenchTens[tens], frenchTeens[units])
words = append(words, word)
break
}
break
case 8:
switch units {
case 0:
words = append(words, frenchTens[tens]+"s")
break
default:
words = append(words, frenchTens[tens], frenchUnits[units])
break
}
break
case 9:
word := fmt.Sprintf("%s-%s", frenchTens[tens], frenchTeens[units])
words = append(words, word)
break
default:
switch units {
case 0:
words = append(words, frenchTens[tens])
break
case 1:
words = append(words, frenchTens[tens], "et", frenchUnits[units])
break
default:
word := fmt.Sprintf("%s-%s", frenchTens[tens], frenchUnits[units])
words = append(words, word)
break
}
break
}
tripletEnd:
// mega
mega := frenchMegas[idx]
if mega != "" {
if mega != "mille" && triplet > 1 {
mega += "s"
}
words = append(words, mega)
}
}
//log.Printf("Words length: %d\n", len(words))
return strings.Join(words, " ")
}
|
package sshvault
import (
"fmt"
"syscall"
"golang.org/x/crypto/ssh/terminal"
)
// GetPasswordPrompt ask for key passoword
func (v *vault) GetPasswordPrompt() ([]byte, error) {
fmt.Printf("Enter the key password (%s)\n", v.key)
keyPassword, err := terminal.ReadPassword(int(syscall.Stdin))
if err != nil {
return nil, err
}
return keyPassword, nil
}
|
<reponame>tabatsky/imctools
import logging
import os
import imctools.io.mcdxmlparser as mcdmeta
import zipfile
logger = logging.getLogger(__name__)
class ImcFolderWriter(object):
""" A class to write an imc folder """
def __init__(self, out_folder, mcddata=None, imcacquisitions=None, mcdmeta=None):
"""
Initializes an ImcFolderWriter that can be used to write out an imcfolder
and compress it to zip.
"""
self.acquisitions = list()
self.mcd = None
self.meta = None
self.out_folder = out_folder
self.acquisitions = list()
if mcdmeta is not None:
self.meta = mcdmeta
if imcacquisitions is not None:
self.add_imcacquisitions(imcacquisitions)
add_ac = False
else:
add_ac = True
if mcddata is not None:
self.add_mcddata(mcddata, add_acquisitions=add_ac)
if self.meta is None:
raise ValueError('At least mcdata or mcdmeta need to be specified!')
@property
def foldername(self):
return self.meta.metaname
def add_imcacquisitions(self, imcacquisitions, append=False):
if not append:
self.acquisitions = list()
self.acquisitions.extend(imcacquisitions)
def add_mcddata(self, mcddata, add_acquisitions=True):
self.mcd = mcddata
self.meta = mcddata.meta
if add_acquisitions:
imcacs = self.mcd.get_all_imcacquistions()
self.add_imcacquisitions(imcacs)
def write_imc_folder(self, zipfolder=True, remove_folder=None):
if remove_folder is None:
remove_folder = zipfolder
base_folder = self.out_folder
foldername = self.foldername
out_folder = os.path.join(self.out_folder, self.foldername)
if not(os.path.exists(out_folder)):
os.makedirs(out_folder)
for ac in self.acquisitions:
self._write_acquisition(ac, out_folder)
if self.meta:
self.meta.save_meta_xml(out_folder)
self.meta.save_meta_csv(out_folder)
if self.mcd:
slide_ids = self.meta.objects.get(mcdmeta.SLIDE, dict()).keys()
for sid in slide_ids:
self.mcd.save_slideimage(sid, out_folder)
pano_ids = self.meta.objects.get(mcdmeta.PANORAMA, dict()).keys()
for pid in pano_ids:
self.mcd.save_panorama(pid, out_folder)
ac_ids = self.meta.objects.get(mcdmeta.ACQUISITION, dict()).keys()
for aid in ac_ids:
self.mcd.save_acquisition_bfimage_after(aid, out_folder)
self.mcd.save_acquisition_bfimage_before(aid, out_folder)
if zipfolder:
with zipfile.ZipFile(os.path.join(base_folder, foldername +'_imc.zip'),
'w', compression=zipfile.ZIP_DEFLATED,
allowZip64=True) as imczip:
for root, d, files in os.walk(out_folder):
for fn in files:
imczip.write(os.path.join(root,fn),fn)
if remove_folder:
os.remove(os.path.join(root,fn))
if remove_folder:
os.removedirs(out_folder)
def _write_acquisition(self, ac, out_folder):
if 0 in ac.shape:
logger.error(f"Cannot write acquisition with the shape: {ac.shape}")
return
file_end = '_ac.ome.tiff'
if ac.image_description is None:
ac_id = ac.image_ID
fn = self.meta.get_object(mcdmeta.ACQUISITION, ac_id).metaname
else:
fn = ac.image_description
img_writer = ac.get_image_writer(os.path.join(out_folder,
fn+file_end))
img_writer.save_image(mode='ome')
def _find_ac_metaname_from_txt_fn(self, ac):
raise NotImplementedError
if __name__ == '__main__':
import imctools.io.mcdparser as mcdp
#fn_mcd = '/home/vitoz/temp/txtvsmcd/20170805_p60-63_slide6_ac1_vz.mcd'
#fn_mcd = '/mnt/imls-bod/VitoZ/Spheres/20161130_p25_slide2_ac1/20161130_p25_slide2_ac1.mcd'
#fn_mcd='/mnt/imls-bod/VitoZ/Spheres/20161005_IS2362_4_site1_ac1/20161005_IS2362_4_site1_ac1.mcd'
# an example of not functional mcd but working txt
# fn_mcd = /mnt/imls-bod/DanielS/ACD/IMC\ 2.06/Her2_grade3
fn_mcd ='/mnt/imls-bod/VitoZ/Spheres/20161018_OCT1_slide4_ac1/20161018_OCT1_slide4_ac1.mcd'
mcd = mcdp.McdParser(fn_mcd)
mcd.save_meta_xml('/home/vitoz/temp/')
ifw = ImcFolderWriter('/home/vitoz/temp/', mcddata=mcd)
ifw.write_imc_folder()
|
class car():
def __init__(self, color, speed, passengers, max_speed):
self.color = color
self.speed = speed
self.passengers = passengers
self.max_speed = max_speed
def accelerate(self):
self.speed += 5
def decrease(self):
self.speed -= 5
def getSpeed(self):
return self.speed
def print_stats(self):
print("Color: " + self.color)
print("Speed: " + str(self.speed))
print("Passengers: " + str(self.passengers))
print("Max Speed: " + str(self.max_speed))
|
#!/bin/sh
set -e
# Helper function to exit on nonzero code
function exitOnFailureCode() {
if [ $1 -ne 0 ]
then
echo "Error occurred, abort"
git checkout .
exit $1
fi
}
# clean
if [ -n $1 ] && [ "$1" == "clean" ];
then
rm -rf builtFramework
echo "Cleaning Completed"
exit 0
fi
rm -rf builtFramework
set -u
if [ -f "CircleciScripts/package_sdk.sh" ]; then
chmod +x CircleciScripts/package_sdk.sh
CircleciScripts/package_sdk.sh AWSAPIGateway
CircleciScripts/package_sdk.sh AWSAutoScaling
CircleciScripts/package_sdk.sh AWSCloudWatch
CircleciScripts/package_sdk.sh AWSCognito
CircleciScripts/package_sdk.sh AWSCognitoAuth
CircleciScripts/package_sdk.sh AWSCognitoIdentityProvider
CircleciScripts/package_sdk.sh AWSCognitoIdentityProviderASF
CircleciScripts/package_sdk.sh AWSComprehend
CircleciScripts/package_sdk.sh AWSConnect
CircleciScripts/package_sdk.sh AWSCore
CircleciScripts/package_sdk.sh AWSDynamoDB
CircleciScripts/package_sdk.sh AWSEC2
CircleciScripts/package_sdk.sh AWSElasticLoadBalancing
CircleciScripts/package_sdk.sh AWSIoT
CircleciScripts/package_sdk.sh AWSKMS
CircleciScripts/package_sdk.sh AWSKinesis
CircleciScripts/package_sdk.sh AWSKinesisVideo
CircleciScripts/package_sdk.sh AWSKinesisVideoArchivedMedia
CircleciScripts/package_sdk.sh AWSLambda
CircleciScripts/package_sdk.sh AWSLex
CircleciScripts/package_sdk.sh AWSLogs
CircleciScripts/package_sdk.sh AWSMachineLearning
CircleciScripts/package_sdk.sh AWSMobileAnalytics
CircleciScripts/package_sdk.sh AWSPinpoint
CircleciScripts/package_sdk.sh AWSPolly
CircleciScripts/package_sdk.sh AWSRekognition
CircleciScripts/package_sdk.sh AWSS3
CircleciScripts/package_sdk.sh AWSSES
CircleciScripts/package_sdk.sh AWSSNS
CircleciScripts/package_sdk.sh AWSSQS
CircleciScripts/package_sdk.sh AWSSageMakerRuntime
CircleciScripts/package_sdk.sh AWSSimpleDB
CircleciScripts/package_sdk.sh AWSTextract
CircleciScripts/package_sdk.sh AWSTranscribe
CircleciScripts/package_sdk.sh AWSTranscribeStreaming
CircleciScripts/package_sdk.sh AWSTranslate
CircleciScripts/package_sdk.sh AWSAuthCore "$(pwd)/AWSAuthSDK/AWSAuthSDK.xcodeproj"
CircleciScripts/package_sdk.sh AWSAuthUI "$(pwd)/AWSAuthSDK/AWSAuthSDK.xcodeproj"
CircleciScripts/package_sdk.sh AWSFacebookSignIn "$(pwd)/AWSAuthSDK/AWSAuthSDK.xcodeproj"
CircleciScripts/package_sdk.sh AWSGoogleSignIn "$(pwd)/AWSAuthSDK/AWSAuthSDK.xcodeproj"
CircleciScripts/package_sdk.sh AWSMobileClient "$(pwd)/AWSAuthSDK/AWSAuthSDK.xcodeproj"
CircleciScripts/package_sdk.sh AWSUserPoolsSignIn "$(pwd)/AWSAuthSDK/AWSAuthSDK.xcodeproj"
else
echo "$(pwd)"
echo "Did not find CircleciScripts/package_sdk.sh"
exit 1
fi
|
package kbasesearchengine.events;
import java.time.Instant;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.PriorityQueue;
import java.util.Set;
import com.google.common.base.Optional;
import kbasesearchengine.events.exceptions.NoSuchEventException;
import kbasesearchengine.tools.Utils;
/** An event queue on the level of an object.
*
* The queue never changes the state of the {@link StoredStatusEvent}s submitted to it.
*
* Note that the calling code is responsible for ensuring that IDs for events added to this queue
* are unique.
* If events with duplicate IDs are added to the queue unexpected behavior may result.
*
* This class is not thread safe.
* @author <EMAIL>
*
*/
public class ObjectEventQueue {
/* may want to initialize with an access group & object ID and ensure that all incoming
* events match
*/
private static final Set<StatusEventType> OBJ_LVL_EVENTS = new HashSet<>(Arrays.asList(
StatusEventType.DELETE_ALL_VERSIONS,
StatusEventType.NEW_ALL_VERSIONS,
StatusEventType.PUBLISH_ALL_VERSIONS,
StatusEventType.RENAME_ALL_VERSIONS,
StatusEventType.UNDELETE_ALL_VERSIONS,
StatusEventType.UNPUBLISH_ALL_VERSIONS,
StatusEventType.NEW_VERSION));
private final PriorityQueue<StoredStatusEvent> queue = new PriorityQueue<StoredStatusEvent>(
new Comparator<StoredStatusEvent>() {
@Override
public int compare(final StoredStatusEvent e1, final StoredStatusEvent e2) {
return e1.getEvent().getTimestamp().compareTo(e2.getEvent().getTimestamp());
}
});
private StoredStatusEvent ready = null;
private StoredStatusEvent processing = null;
private Instant blockTime = null;
private Set<StatusEventID> containedEvents = new HashSet<>();
// could require an access group id and object id and reject any events that don't match
/** Create a new, empty, queue. */
public ObjectEventQueue() {}
/** Create a new queue with an initial state consisting of an object-level
* event. The event must be in either the {@link StatusEventProcessingState#READY} or
* {@link StatusEventProcessingState#PROC} state.
* @param initialEvent the initial event.
*/
public ObjectEventQueue(final StoredStatusEvent initialEvent) {
Utils.nonNull(initialEvent, "initialEvent");
if (!isObjectLevelEvent(initialEvent)) {
throw new IllegalArgumentException("Illegal initial event type: " +
initialEvent.getEvent().getEventType());
}
final StatusEventProcessingState state = initialEvent.getState();
if (state.equals(StatusEventProcessingState.PROC)) {
this.processing = initialEvent;
} else if (state.equals(StatusEventProcessingState.READY)){
this.ready = initialEvent;
} else {
throw new IllegalArgumentException("Illegal initial event state: " + state);
}
containedEvents.add(initialEvent.getID());
}
private boolean isObjectLevelEvent(final StoredStatusEvent event) {
return isObjectLevelEvent(event.getEvent());
}
/**
*
* @param event A status event.
* @return True if the specified event has an object level event type. Else return false.
*/
public static boolean isObjectLevelEvent(final StatusEvent event) {
return OBJ_LVL_EVENTS.contains(event.getEventType());
}
/** Add a new {@link StatusEventProcessingState#UNPROC} event to the queue.
* Events that already exist in the queue as determined by the event id are ignored.
* Before any loaded events are added to the ready or processing states,
* {@link #moveToReady()} must be called.
* @param event the event to add.
* @return true if the object was added to the queue, false if the event already existed in
* the queue
*/
public boolean load(final StoredStatusEvent event) {
Utils.nonNull(event, "event");
if (!event.getState().equals(StatusEventProcessingState.UNPROC)) {
throw new IllegalArgumentException("Illegal state for loading event: " +
event.getState());
}
if (!isObjectLevelEvent(event)) {
throw new IllegalArgumentException("Illegal type for loading event: " +
event.getEvent().getEventType());
}
if (!containedEvents.contains(event.getID())) {
queue.add(event);
containedEvents.add(event.getID());
return true;
}
return false;
}
/** Get the event that the queue has determined are ready for processing, or absent if
* no events are ready.
* @return the event ready for processing.
*/
public Optional<StoredStatusEvent> getReadyForProcessing() {
return Optional.fromNullable(ready);
}
/** Get the event that the queue has determined is ready for processing, and set
* that event as in process in the queue, or absent if no events are ready.
* @return the event that was ready for processing and is now in the processing state.
*/
public Optional<StoredStatusEvent> moveReadyToProcessing() {
if (ready == null) {
return Optional.absent();
} else {
processing = ready;
ready = null;
return Optional.of(processing);
}
}
/** Get the event in the processing state, or absent if no events are in the processing
* state.
* @return the event that is in the processing state.
*/
public Optional<StoredStatusEvent> getProcessing() {
return Optional.fromNullable(processing);
}
/** Remove a processed event from the queue and update the queue state, potentially adding
* an event to the ready state.
* This function implicitly calls {@link #moveToReady()}.
* @param event the event to remove.
* @throws NoSuchEventException if there is no event with the given ID in the processing
* state.
*/
public void setProcessingComplete(final StoredStatusEvent event) {
Utils.nonNull(event, "event");
if (processing != null && event.getID().equals(processing.getID())) {
containedEvents.remove(processing.getID());
processing = null;
moveToReady();
} else {
throw new NoSuchEventException(event);
}
}
/** Returns true if an event is in the processing state.
* @return true if the queue has an event in the processing state.
*/
public boolean isProcessing() {
return processing != null;
}
/** Returns true if an event is in the ready state.
* @return true if the queue has an event in the ready state.
*/
public boolean hasReady() {
return ready != null;
}
/** Returns true if an event is in the ready or processing state.
* @return true if the queue has an event in the ready or processing state.
*/
public boolean isProcessingOrReady() {
return isProcessing() || hasReady();
}
/** Returns the number of events in the queue.
* @return the queue size.
*/
public int size() {
return queue.size() + (ready == null ? 0 : 1) + (processing == null ? 0 : 1);
}
/** Check if the queue is empty.
* @return true if the queue is empty.
*/
public boolean isEmpty() {
return size() == 0;
}
/** Move an event into the ready state if possible, or absent if not.
* Usually called after loading ({@link #load(StoredStatusEvent)}) one or more events.
* @return the event that has been moved into the ready state.
*/
public Optional<StoredStatusEvent> moveToReady() {
// if a object level event is ready for processing or processing, do nothing.
// the queue is blocked.
if (ready != null || processing != null) {
return Optional.absent();
}
final StoredStatusEvent next = queue.peek();
if (next != null && !isBlockActive(next)) {
ready = next;
queue.remove();
}
return Optional.fromNullable(ready);
}
private boolean isBlockActive(final StoredStatusEvent next) {
return blockTime != null && blockTime.isBefore(next.getEvent().getTimestamp());
}
/** Run all events until an event has an later date than blockTime, and then run no
* more events until {@link #removeBlock()} is called. Any events in the ready or
* processing state are not affected.
* @param blockTime the time after which no more events should be set to the ready state.
*/
public void drainAndBlockAt(final Instant blockTime) {
Utils.nonNull(blockTime, "blockTime");
this.blockTime = blockTime;
}
/** Get the block time set by {@link #drainAndBlockAt(Instant)}, if set.
* @return the block time.
*/
public Optional<Instant> getBlockTime() {
return Optional.fromNullable(blockTime);
}
/** Removes the block set by {@link #drainAndBlockAt(Instant)}, but does not otherwise
* alter the queue state.
*/
public void removeBlock() {
blockTime = null;
}
}
|
<?php
$numbers = explode(" ", $_POST['input']);
$total = 0.0;
$minimum = PHP_INT_MAX;
$maximum = -PHP_INT_MAX;
for($i = 0; $i < count($numbers); $i++)
{
$total += (int) $numbers[$i];
if((int) $numbers[$i] < $minimum)
$minimum = (int) $numbers[$i];
if((int) $numbers[$i] > $maximum)
$maximum = (int) $numbers[$i];
}
$average = $total / count($numbers);
echo "Total: $total" . PHP_EOL;
echo "Average: $average" . PHP_EOL;
echo "Max: $maximum" . PHP_EOL;
echo "Min: $minimum" . PHP_EOL;
?>
|
#!/bin/bash
az network vnet create -g ${RESOURCE_GROUP} -n DualCustomVNET --address-prefixes 10.100.0.0/24 10.200.0.0/24 --subnet-name DualMasterSubnet --subnet-prefix 10.100.0.0/24
az network vnet subnet create --name DualAgentSubnet --address-prefix 10.200.0.0/24 -g ${RESOURCE_GROUP} --vnet-name DualCustomVNET
tempfile="$(mktemp)"
trap 'rm -rf "${tempfile}"' EXIT
jq ".properties.masterProfile.vnetSubnetId = \"/subscriptions/${SUBSCRIPTION_ID}/resourceGroups/${RESOURCE_GROUP}/providers/Microsoft.Network/virtualNetworks/DualCustomVNET/subnets/DualMasterSubnet\"" ${CLUSTER_DEFINITION} > $tempfile && mv $tempfile ${CLUSTER_DEFINITION}
indx=0
for poolname in $(jq -r '.properties.agentPoolProfiles[].name' "${CLUSTER_DEFINITION}"); do
jq ".properties.agentPoolProfiles[$indx].vnetSubnetId = \"/subscriptions/${SUBSCRIPTION_ID}/resourceGroups/${RESOURCE_GROUP}/providers/Microsoft.Network/virtualNetworks/DualCustomVNET/subnets/DualAgentSubnet\"" ${CLUSTER_DEFINITION} > $tempfile && mv $tempfile ${CLUSTER_DEFINITION}
indx=$((indx+1))
done
|
package codecheck.github.exceptions
import org.json4s.JValue
class NotFoundException(body: JValue) extends GitHubAPIException(body)
|
var express = require("express")
var app = express()
const path = require("path")
var http = require("http").createServer(app)
var io = require("socket.io")(http)
app.use(express.static(path.join(__dirname, "public")))
//Local
var client = require("./instagram/Client");
var globals = require("./instagram/Globals");
var live = require('./instagram/src/request/Live')
let broadcastId
app.get("/", (req, res) => {
res.render("index.ejs");
})
io.on("connection", function (socket) {
console.log(
"[" +
new Date().toISOString().replace(/T/, " ").replace(/\..+/, "") +
"] " +
"New connection from " +
socket.request.connection.remoteAddress
)
if(globals.isLoggedIn)
socket.emit('isLoggedIn', {status: true})
socket.on('login', (data)=>{
client.login(data.username, data.password, (data)=>{
if(globals.isLoggedIn)
{
live.create("720", "1184", "", (data)=>{
o = JSON.parse(data);
broadcastId = o.broadcast_id
live.start(broadcastId, (data)=>{
if(globals.statusCode == 200){
socket.emit('login_success', {key: o.upload_url.split("/")[4], status: true})
get_updates(broadcastId)
}
else{
socket.emit('login_success', {status: "fail", message: 'Oops. Something wrong. try again later'})
}
})
})
}
else{
o = JSON.parse(globals.LastResponse)
socket.emit('login_success', {status: "fail", message: o.message})
}
})
})
socket.on('logout', (res)=>{
if(res){
live.end(broadcastId, ()=>{
client.logout();
globals.isLoggedIn = false
console.log('logout')
})
}
})
socket.on('comment', (data)=>{
live.comment(broadcastId, data.comment);
})
})
http.listen(8000, () => {
console.log(
"[" +
new Date().toISOString().replace(/T/, " ").replace(/\..+/, "") +
"] " +
"Open http://127.0.0.1:8000"
)
})
get_updates = async (broadcastId)=>{
let last_comment = 0
while(globals.isLoggedIn){
//Get comments
live.getComments(broadcastId, last_comment, 3, (res)=>{
o = JSON.parse(res);
last_comment = o.comments[o.comments.length - 1].created_at
for(comment in o.comments){
io.sockets.emit("chat_get", {comment: o.comments[comment]})
}
});
//Get views count
live.getViewerList(broadcastId, (res)=>{
o = JSON.parse(res)
views_count = o.users.length
io.sockets.emit('views_count', {count: views_count})
})
await new Promise(r => setTimeout(r, 1000))
}
}
|
<gh_stars>0
Pod::Spec.new do |s|
s.name = "JVNotifications"
s.version = "0.2.0"
s.summary = "Facebook style notification persistence"
s.description = <<-DESC
This will allow you to manage notifications and persist them using Realm
DESC
s.homepage = "https://github.com/joninsky/JVNotifications.git"
s.license = 'MIT'
s.author = { "pebblebee" => "<EMAIL>" }
s.source = { :git => "https://github.com/joninsky/JVNotifications.git", :tag => s.version.to_s}
#pod spec lint
s.platform = :ios, '9.0'
s.requires_arc = true
s.dependency 'RealmSwift'
s.source_files = "JVNotifications/**/*"
#s.resources = "JVNotifications/**/*.{png, mp3, mp4, m4a}"
#s.resource_bundles = {
# 'JVNotificationBunble' => [
# "JVNotifications/**/*.{png}", "JVNotifications/**/*.{m4a}", "JVNotifications/**/*.{mp3}", "JVNotifications/**/*.{mp4}"
# ]
#}
end
|
package com.threathunter.bordercollie.slot.compute.cache.wrapper.builtin;
import com.threathunter.bordercollie.slot.compute.cache.CacheType;
import com.threathunter.bordercollie.slot.compute.cache.StorageType;
import com.threathunter.bordercollie.slot.compute.cache.storage.BuiltinCacheStore;
import com.threathunter.bordercollie.slot.compute.cache.wrapper.CacheWrapperMeta;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.util.Map;
/**
*
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest(BuiltinCacheStore.class)
public class SecondaryLongBuiltinCacheWrapperTest {
private BuiltinCacheStore cacheStore = PowerMockito.mock(BuiltinCacheStore.class);
private String dataKey = "key";
private String nullKey = "key_null";
@Before
public void setUp() {
Object[] cacheData = new Object[5];
Object[] cacheNull = new Object[5];
PowerMockito.doReturn(cacheData).when(cacheStore).getCache(dataKey);
PowerMockito.doReturn(cacheNull).when(cacheStore).getCache(nullKey);
}
@Test
public void testSum() {
CacheWrapperMeta meta = new CacheWrapperMeta();
meta.setStorageType(StorageType.BUILDIN);
meta.setCacheType(CacheType.SECONDARY_SUM_LONG);
SecondaryLongBuiltinCacheWrapper.SecondarySumLongBuiltinCacheWrapper wrapper =
new SecondaryLongBuiltinCacheWrapper.SecondarySumLongBuiltinCacheWrapper(meta);
wrapper.updateStoreInfo(cacheStore, 0);
Assert.assertEquals(1L, wrapper.addData(1L, dataKey, "key1").longValue());
Assert.assertEquals(3L, wrapper.addData(2L, dataKey, "key1").longValue());
Assert.assertEquals(3L, wrapper.getData(dataKey, "key1").longValue());
wrapper.addData(2L, dataKey, "key2");
Assert.assertEquals(2, ((Map) wrapper.readAll(dataKey)).size());
Assert.assertNull(wrapper.readAll(nullKey));
}
@Test
public void testFirst() {
CacheWrapperMeta meta = new CacheWrapperMeta();
meta.setStorageType(StorageType.BUILDIN);
meta.setCacheType(CacheType.SECONDARY_FIRST_LONG);
SecondaryLongBuiltinCacheWrapper.SecondaryFirstLongBuiltinCacheWrapper wrapper =
new SecondaryLongBuiltinCacheWrapper.SecondaryFirstLongBuiltinCacheWrapper(meta);
wrapper.updateStoreInfo(cacheStore, 0);
Assert.assertEquals(1L, wrapper.addData(1L, dataKey, "key1"), 0.0);
Assert.assertNull(wrapper.addData(2L, dataKey, "key1"));
Assert.assertEquals(1L, wrapper.getData(dataKey, "key1"), 0.0);
wrapper.addData(2L, dataKey, "key2");
Assert.assertEquals(2, ((Map) wrapper.readAll(dataKey)).size());
Assert.assertNull(wrapper.readAll(nullKey));
}
@Test
public void testLast() {
CacheWrapperMeta meta = new CacheWrapperMeta();
meta.setStorageType(StorageType.BUILDIN);
meta.setCacheType(CacheType.SECONDARY_LAST_LONG);
SecondaryLongBuiltinCacheWrapper.SecondaryLastLongBuiltinCacheWrapper wrapper =
new SecondaryLongBuiltinCacheWrapper.SecondaryLastLongBuiltinCacheWrapper(meta);
wrapper.updateStoreInfo(cacheStore, 0);
Assert.assertEquals(1L, wrapper.addData(1L, dataKey, "key1"), 0.0);
Assert.assertEquals(2L, wrapper.addData(2L, dataKey, "key1"), 0.0);
Assert.assertEquals(2L, wrapper.getData(dataKey, "key1"), 0.0);
wrapper.addData(2L, dataKey, "key2");
Assert.assertEquals(2, ((Map) wrapper.readAll(dataKey)).size());
Assert.assertNull(wrapper.readAll(nullKey));
}
@Test
public void testMax() {
CacheWrapperMeta meta = new CacheWrapperMeta();
meta.setStorageType(StorageType.BUILDIN);
meta.setCacheType(CacheType.SECONDARY_MAX_LONG);
SecondaryLongBuiltinCacheWrapper.SecondaryMaxLongBuiltinCacheWrapper wrapper =
new SecondaryLongBuiltinCacheWrapper.SecondaryMaxLongBuiltinCacheWrapper(meta);
wrapper.updateStoreInfo(cacheStore, 0);
Assert.assertEquals(2L, wrapper.addData(2L, dataKey, "key1"), 0.0);
Assert.assertEquals(3L, wrapper.addData(3L, dataKey, "key1"), 0.0);
Assert.assertNull(wrapper.addData(1L, dataKey, "key1"));
Assert.assertEquals(3L, wrapper.getData(dataKey, "key1"), 0.0);
Assert.assertEquals(1, ((Map) wrapper.readAll(dataKey)).size());
Assert.assertNull(wrapper.readAll(nullKey));
}
@Test
public void testMin() {
CacheWrapperMeta meta = new CacheWrapperMeta();
meta.setStorageType(StorageType.BUILDIN);
meta.setCacheType(CacheType.SECONDARY_MIN_LONG);
SecondaryLongBuiltinCacheWrapper.SecondaryMinLongBuiltinCacheWrapper wrapper =
new SecondaryLongBuiltinCacheWrapper.SecondaryMinLongBuiltinCacheWrapper(meta);
wrapper.updateStoreInfo(cacheStore, 0);
Assert.assertEquals(3L, wrapper.addData(3L, dataKey, "key1"), 0.0);
Assert.assertEquals(1L, wrapper.addData(1L, dataKey, "key1"), 0.0);
Assert.assertNull(wrapper.addData(2L, dataKey, "key1"));
Assert.assertEquals(1L, wrapper.getData(dataKey, "key1"), 0.0);
Assert.assertEquals(1, ((Map) wrapper.readAll(dataKey)).size());
Assert.assertNull(wrapper.readAll(nullKey));
}
}
|
<reponame>df-service-e2e-test/x_khu2_9th_stress_test_5<gh_stars>1-10
/*
* Copyright (c) 2008-2019, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.query.impl.predicates;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import nl.jqno.equalsverifier.EqualsVerifier;
import nl.jqno.equalsverifier.Warning;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import static com.hazelcast.query.impl.predicates.PredicateTestUtils.entry;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class InPredicateTest {
@Test
@SuppressWarnings("unchecked")
// should be fixed in 4.0; See: https://github.com/hazelcast/hazelcast/issues/6188
public void equal_zeroMinusZero() {
// in BetweenPredicate
assertFalse(new InPredicate("this", 0.0).apply(entry(-0.0)));
assertFalse(new InPredicate("this", 0.0d).apply(entry(-0.0d)));
assertFalse(new InPredicate("this", 0.0).apply(entry(-0.0d)));
assertFalse(new InPredicate("this", 0.0d).apply(entry(-0.0)));
// whereas in Java
assertTrue(0.0 == -0.0);
assertTrue(0.0d == -0.0d);
assertTrue(0.0 == -0.0d);
assertTrue(0.0d == -0.0);
}
@Test
@SuppressWarnings("unchecked")
// should be fixed in 4.0; See: https://github.com/hazelcast/hazelcast/issues/6188
public void equal_NaN() {
// in BetweenPredicate
assertTrue(new InPredicate("this", Double.NaN).apply(entry(Double.NaN)));
assertTrue(new InPredicate("this", Float.NaN).apply(entry(Float.NaN)));
assertTrue(new InPredicate("this", Double.NaN).apply(entry(-Double.NaN)));
assertTrue(new InPredicate("this", Float.NaN).apply(entry(-Float.NaN)));
assertTrue(new InPredicate("this", Double.NaN).apply(entry(-Float.NaN)));
assertTrue(new InPredicate("this", Float.NaN).apply(entry(-Double.NaN)));
// whereas in Java
assertFalse(Double.NaN == Double.NaN);
assertFalse(Float.NaN == Float.NaN);
assertFalse(Double.NaN == -Double.NaN);
assertFalse(Float.NaN == -Float.NaN);
assertFalse(Double.NaN == -Float.NaN);
assertFalse(Float.NaN == -Double.NaN);
}
@Test
public void testEqualsAndHashCode() {
EqualsVerifier.forClass(InPredicate.class)
.suppress(Warning.NONFINAL_FIELDS, Warning.STRICT_INHERITANCE)
.withRedefinedSuperclass()
.allFieldsShouldBeUsed()
.verify();
}
}
|
Scalr.regPage('Scalr.ui.dnszones.view', function (loadParams, moduleParams) {
var store = Ext.create('store.store', {
fields: [
{ name: 'id', type: 'int' },
{ name: 'client_id', type: 'int' },
'zone_name', 'status', 'role_name', 'farm_roleid', 'dtlastmodified', 'farm_id', 'farm_name'
],
proxy: {
type: 'scalr.paging',
url: '/dnszones/xListZones/'
},
remoteSort: true
});
return Ext.create('Ext.grid.Panel', {
scalrOptions: {
reload: false,
maximize: 'all',
menuTitle: 'DNS Zones',
menuHref: '#/dnszones',
menuFavorite: true
},
store: store,
stateId: 'grid-dnszones-view',
stateful: true,
plugins: [ 'gridstore', 'applyparams' ],
viewConfig: {
emptyText: 'No DNS zones found',
loadingText: 'Loading DNS zones ...'
},
columns: [
{ text: "Domain name", flex: 2, dataIndex: 'zone_name', sortable: true, xtype: 'templatecolumn',
tpl: '<a target="_blank" href="http://{zone_name}">{zone_name}</a>'
},
{ text: "Assigned to", flex: 1, dataIndex: 'role_name', sortable: false, xtype: 'templatecolumn', tpl:
'<tpl if="farm_id > 0"><a href="#/farms?farmId={farm_id}" title="Farm {farm_name}">{farm_name}</a>' +
'<tpl if="farm_roleid > 0"> → <a href="#/farms/{farm_id}/roles/{farm_roleid}/view" ' +
'title="Role {role_name}">{role_name}</a></tpl>' +
'</tpl>' +
'<tpl if="farm_id == 0">—</tpl>'
},
{ text: "Last modified", width: 200, dataIndex: 'dtlastmodified', sortable: true, xtype: 'templatecolumn',
tpl: '<tpl if="dtlastmodified">{dtlastmodified}</tpl><tpl if="! dtlastmodified">Never</tpl>'
},
{ text: "Status", minWidth: 130, width: 130, dataIndex: 'status', sortable: true, xtype: 'statuscolumn', statustype: 'dnszone'},
{
xtype: 'optionscolumn',
hidden: !Scalr.isAllowed('DNS_ZONES', 'manage'),
menu: [{
text: 'Edit DNS Zone',
iconCls: 'x-menu-icon-edit',
showAsQuickAction: true,
href: '#/dnszones/{id}/edit'
}, {
text: 'Settings',
iconCls: 'x-menu-icon-settings',
showAsQuickAction: true,
href: '#/dnszones/{id}/settings'
}],
getVisibility: function (record) {
return (record.get('status') != 'Pending delete' && record.get('status') != 'Pending create');
}
}
],
selModel:
Scalr.isAllowed('DNS_ZONES', 'manage') ? {
selType: 'selectedmodel',
getVisibility: function (record) {
return (record.get('status') != 'Pending delete');
}
} : null,
listeners: {
selectionchange: function(selModel, selections) {
this.down('scalrpagingtoolbar').down('#delete').setDisabled(!selections.length);
}
},
dockedItems: [{
xtype: 'scalrpagingtoolbar',
store: store,
dock: 'top',
beforeItems: [{
text: 'New DNS zone',
cls: 'x-btn-green',
hidden: !Scalr.isAllowed('DNS_ZONES', 'manage'),
handler: function() {
Scalr.event.fireEvent('redirect', '#/dnszones/create');
}
}],
afterItems: [{
itemId: 'delete',
iconCls: 'x-btn-icon-delete',
cls: 'x-btn-red',
tooltip: 'Select one or more zones to delete them',
disabled: true,
hidden: !Scalr.isAllowed('DNS_ZONES', 'manage'),
handler: function() {
var grid = this.up('grid'),
request = {
confirmBox: {
msg: 'Remove selected zone(s): %s ?',
type: 'delete'
},
processBox: {
msg: 'Removing dns zone(s) ...',
type: 'delete'
},
url: '/dnszones/xRemoveZones',
success: function() {
grid.getSelectionModel().deselectAll();
store.load();
}
}, records = grid.getSelectionModel().getSelection(), zones = [];
request.confirmBox.objects = [];
for (var i = 0, len = records.length; i < len; i++) {
zones.push(records[i].get('id'));
request.confirmBox.objects.push(records[i].get('zone_name'));
}
request.params = { zones: Ext.encode(zones) };
Scalr.Request(request);
}
}],
items: [{
xtype: 'filterfield',
store: store
}, ' ', {
text: 'Default Records',
tooltip: 'Manage Default DNS records',
hidden: !Scalr.isAllowed('DNS_ZONES', 'manage'),
handler: function() {
Scalr.event.fireEvent('redirect', '#/dnszones/defaultRecords');
}
}]
}]
});
});
|
package br.com.zup.mercadolivre.finalizarcompra;
import br.com.zup.mercadolivre.finalizarcompra.fechamentoCompra.GatewayPagamento;
import org.springframework.web.util.UriComponentsBuilder;
public class PagseguroGateway implements Gatway{
@Override
public String redirecionamentoGatway(UriComponentsBuilder componentsBuilder, GatewayPagamento gateway, Compra novaCompra) {
String urlRetornoPagSeguro = componentsBuilder.path("/retorno-pagseguro/{id}").buildAndExpand(novaCompra.getId()).toString();
return "pagseguro.com/" + novaCompra.getId() + "?redirectUrl=" + urlRetornoPagSeguro;
}
}
|
#!/usr/bin/env bash
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Always clean up.
DELETE_AT_EXIT="$(mktemp -d)"
finish() {
echo 'BEGIN: finish() trap handler' >&2
kitchen destroy "$SUITE"
[[ -d "${DELETE_AT_EXIT}" ]] && rm -rf "${DELETE_AT_EXIT}"
echo 'END: finish() trap handler' >&2
}
# Map the input parameters provided by Concourse CI, or whatever mechanism is
# running the tests to Terraform input variables. Also setup credentials for
# use with kitchen-terraform, inspec, and gcloud.
setup_environment() {
local tmpfile
tmpfile="$(mktemp)"
echo "${SERVICE_ACCOUNT_JSON}" > "${tmpfile}"
# gcloud variables
export CLOUDSDK_AUTH_CREDENTIAL_FILE_OVERRIDE="${tmpfile}"
# Application default credentials (Terraform google provider and inspec-gcp)
export GOOGLE_APPLICATION_CREDENTIALS="${tmpfile}"
# Terraform variables
export TF_VAR_project_id="$PROJECT_ID"
export TF_VAR_bucket_name="$BUCKET_NAME"
}
main() {
export SUITE="${SUITE:-}"
set -eu
# Setup trap handler to auto-cleanup
export TMPDIR="${DELETE_AT_EXIT}"
trap finish EXIT
# Setup environment variables
setup_environment
set -x
# Execute the test lifecycle
kitchen create "$SUITE"
kitchen converge "$SUITE"
kitchen verify "$SUITE"
}
# if script is being executed and not sourced.
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
main "$@"
fi
|
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2021.2 (64-bit)
#
# Filename : data_mem.sh
# Simulator : Mentor Graphics Questa Advanced Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Sun Jan 02 15:13:38 +0800 2022
# SW Build 3367213 on Tue Oct 19 02:48:09 MDT 2021
#
# Copyright 1986-2021 Xilinx, Inc. All Rights Reserved.
#
# usage: data_mem.sh [-help]
# usage: data_mem.sh [-lib_map_path]
# usage: data_mem.sh [-noclean_files]
# usage: data_mem.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'data_mem.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "data_mem.sh - Script generated by export_simulation (Vivado v2021.2 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
elaborate
simulate
}
# RUN_STEP: <compile>
compile()
{
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <elaborate>
elaborate()
{
source elaborate.do 2>&1 | tee elaborate.log
}
# RUN_STEP: <simulate>
simulate()
{
vsim -c -do "do {simulate.do}" -l simulate.log
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./data_mem.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
create_lib_dir
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy modelsim.ini file
copy_setup_file()
{
file="modelsim.ini"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="C:/Users/ASUS/Desktop/step_into_mips-lab_4/step_into_mips-lab_4/lab_4/lab_4/lab_4.cache/compile_simlib/questa"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Create design library directory
create_lib_dir()
{
lib_dir="questa_lib"
if [[ -e $lib_dir ]]; then
rm -rf $lib_dir
fi
mkdir $lib_dir
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaborate.log simulate.log vsim.wlf questa_lib)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
create_lib_dir
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./data_mem.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: data_mem.sh [-help]\n\
Usage: data_mem.sh [-lib_map_path]\n\
Usage: data_mem.sh [-reset_run]\n\
Usage: data_mem.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
def convert_to_hex(numbers):
result = ""
for n in numbers:
result += hex(n)[2:]
return result
|
<gh_stars>1-10
import numpy as np
from lifelines.datasets import load_gbsg2
from elastic_surv.dataset import PandasDataset
from elastic_surv.models import DeepHitModel
def load_data() -> tuple:
raw_dataset = load_gbsg2()
time_column = "time"
event_column = "cens"
features = np.setdiff1d(raw_dataset.columns, [time_column, event_column]).tolist()
return raw_dataset, time_column, event_column, features
def test_sanity() -> None:
df, tcol, ecol, feat = load_data()
dataset = PandasDataset(
df,
time_column=tcol,
event_column=ecol,
features=feat,
)
model = DeepHitModel(
in_features=dataset.features(),
hidden_nodes=[32],
batch_norm=False,
dropout=0.2,
lr=1e-3,
epochs=3,
patience=30,
batch_size=100,
verbose=True,
alpha=0.3,
sigma=0.4,
num_durations=11,
)
assert model.in_features == dataset.features()
assert model.num_durations == 11
assert model.batch_norm is False
assert model.dropout == 0.2
assert model.epochs == 3
assert model.verbose is True
assert model.batch_size == 100
assert model.alpha == 0.3
assert model.sigma == 0.4
def test_train() -> None:
df, tcol, ecol, feat = load_data()
dataset = PandasDataset(
df,
time_column=tcol,
event_column=ecol,
features=feat,
)
model = DeepHitModel(
in_features=dataset.features(),
epochs=5,
)
model.train(dataset)
score = model.score(dataset)
assert "c_index" in score
assert "brier_score" in score
def test_hyperparams() -> None:
assert len(DeepHitModel.hyperparameter_space()) > 0
params = DeepHitModel.sample_hyperparameters()
assert len(params.keys()) == len(DeepHitModel.hyperparameter_space())
|
class ImageDatasetManager:
def __init__(self):
self.datasets = {}
def add_dataset(self, identifier, name):
if identifier not in self.datasets:
self.datasets[identifier] = name
else:
raise ValueError(f"Dataset with identifier '{identifier}' already exists")
def list_datasets(self):
return list(self.datasets.keys())
def remove_dataset(self, identifier):
if identifier in self.datasets:
del self.datasets[identifier]
else:
raise ValueError(f"Dataset with identifier '{identifier}' does not exist")
def get_dataset_name(self, identifier):
if identifier in self.datasets:
return self.datasets[identifier]
else:
raise ValueError(f"Dataset with identifier '{identifier}' does not exist")
# Usage example
manager = ImageDatasetManager()
manager.add_dataset("CIFAR100", "CIFAR-100")
manager.add_dataset("STL10", "STL-10")
print(manager.list_datasets()) # Output: ['CIFAR100', 'STL10']
print(manager.get_dataset_name("CIFAR100")) # Output: 'CIFAR-100'
manager.remove_dataset("STL10")
print(manager.list_datasets()) # Output: ['CIFAR100']
|
/////////////////////////////////////////////////////////////////////////////
// Name: taskbarbutton.cpp
// Purpose: wxTaskBarButton sample
// Author: <NAME> <<EMAIL>>
// Created: 2014-04-30
// Copyright: (c) 2014 wxWidgets development team
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
#include "wx/wxprec.h"
#ifndef WX_PRECOMP
#include "wx/progdlg.h"
#include "wx/wx.h"
#endif
#include "wx/stdpaths.h"
#include "wx/taskbarbutton.h"
enum
{
ProgressValueSlider = wxID_HIGHEST,
VisibilityRadio,
ThumbnailTooltipSetBtn,
ProgressStateChoice,
SetOverlayIconBtn,
ClearOverlayIconBtn,
SetThumbnailClipBtn,
RestoreThumbnailClipBtn,
AddThumbBarButtonBtn,
RemoveThumbBarButtonBtn,
};
enum
{
ThumbnailToolbarBtn_0 = wxID_HIGHEST + 100,
ThumbnailToolbarBtn_1,
ThumbnailToolbarBtn_2,
ThumbnailToolbarBtn_3,
ThumbnailToolbarBtn_4,
ThumbnailToolbarBtn_5,
ThumbnailToolbarBtn_6
};
namespace {
wxBitmap CreateBitmap(const wxColour& colour, int w, int h)
{
wxMemoryDC dc;
wxBitmap bmp(w, h);
dc.SelectObject(bmp);
// Draw transparent background
wxColour magic(255, 0, 255);
wxBrush magicBrush(magic);
dc.SetBrush(magicBrush);
dc.SetPen(*wxTRANSPARENT_PEN);
dc.DrawRectangle(0, 0, w, h);
// Draw image content
dc.SetBrush(wxBrush(colour));
dc.DrawCircle(h / 2, h / 2 + 1, h / 2);
dc.SelectObject(wxNullBitmap);
// Finalize transparency with a mask
wxMask *mask = new wxMask(bmp, magic);
bmp.SetMask(mask);
return bmp;
}
wxIcon CreateRandomIcon()
{
static int counter = 0;
static const wxColour* colours[] =
{
wxBLACK,
wxWHITE,
wxRED,
wxBLUE,
wxGREEN,
wxCYAN,
wxLIGHT_GREY
};
wxIcon icon;
icon.CopyFromBitmap(CreateBitmap(*(colours[counter]), 16, 16));
counter += 1;
counter = counter % WXSIZEOF(colours);
return icon;
}
} // namespace
class MyApp : public wxApp
{
public:
virtual bool OnInit() wxOVERRIDE;
};
class MyFrame : public wxFrame
{
public:
MyFrame(const wxString& title);
private:
wxDECLARE_EVENT_TABLE();
void OnSetProgressValue(wxScrollEvent& WXUNUSED(event));
void OnVisibilityChange(wxCommandEvent& WXUNUSED(event));
void OnSetThumbnailTooltipBtn(wxCommandEvent& WXUNUSED(event));
void OnChoice(wxCommandEvent& event);
void OnSetOverlayIcon(wxCommandEvent& WXUNUSED(event));
void OnClearOverlayIcon(wxCommandEvent& WXUNUSED(event));
void OnSetOrRestoreThumbnailClip(wxCommandEvent& event);
void OnAddThubmBarButton(wxCommandEvent& WXUNUSED(event));
void OnRemoveThubmBarButton(wxCommandEvent& WXUNUSED(event));
void OnThumbnailToolbarBtnClicked(wxCommandEvent& event);
wxSlider *m_slider;
wxRadioBox *m_visibilityRadioBox;
wxTextCtrl *m_textCtrl;
wxChoice *m_stateChoice;
typedef wxVector<wxThumbBarButton*> wxThumbBarButtons;
wxThumbBarButtons m_thumbBarButtons;
};
wxIMPLEMENT_APP(MyApp);
bool MyApp::OnInit()
{
if ( !wxApp::OnInit() )
return false;
wxTaskBarJumpList jumpList;
wxTaskBarJumpListItem *item1 = new wxTaskBarJumpListItem(
NULL,
wxTASKBAR_JUMP_LIST_TASK,
"Task 1",
wxStandardPaths::Get().GetExecutablePath(),
wxEmptyString,
"Test Task",
wxStandardPaths::Get().GetExecutablePath(),
0);
wxTaskBarJumpListItem *item2 = new wxTaskBarJumpListItem(
NULL,
wxTASKBAR_JUMP_LIST_TASK,
"Task 2",
wxStandardPaths::Get().GetExecutablePath(),
wxEmptyString,
"Test Task",
wxStandardPaths::Get().GetExecutablePath(),
0);
jumpList.GetTasks().Append(item1);
jumpList.GetTasks().Append(
new wxTaskBarJumpListItem(NULL, wxTASKBAR_JUMP_LIST_SEPARATOR));
jumpList.GetTasks().Append(item2);
jumpList.ShowRecentCategory();
jumpList.ShowFrequentCategory();
wxTaskBarJumpListCategory* customCategory =
new wxTaskBarJumpListCategory(&jumpList, "Custom");
wxTaskBarJumpListItem* item3 = new wxTaskBarJumpListItem(
customCategory,
wxTASKBAR_JUMP_LIST_DESTINATION,
"Help",
wxStandardPaths::Get().GetExecutablePath(),
"--help",
"wxTaskBarButton help.",
wxStandardPaths::Get().GetExecutablePath(),
0);
customCategory->Append(item3);
jumpList.AddCustomCategory(customCategory);
const wxTaskBarJumpListCategory& frequentCategory =
jumpList.GetFrequentCategory();
const wxTaskBarJumpListItems& frequentItems = frequentCategory.GetItems();
for ( size_t i = 0; i < frequentItems.size(); ++i )
{
wxLogMessage(frequentItems[i]->GetFilePath());
}
const wxTaskBarJumpListCategory& recentCategory =
jumpList.GetRecentCategory();
const wxTaskBarJumpListItems& recentItems = recentCategory.GetItems();
for ( size_t i = 0; i < recentItems.size(); ++i )
{
wxLogMessage(recentItems[i]->GetFilePath());
}
MyFrame *frame = new MyFrame("wxTaskBarButton App");
if ( !frame->MSWGetTaskBarButton() )
{
wxLogError("Task bar button API is not available on this system, sorry.");
return false;
}
frame->Show(true);
return true;
}
MyFrame::MyFrame(const wxString& title)
: wxFrame(NULL, wxID_ANY, title)
{
wxPanel *panel = new wxPanel(this);
wxBoxSizer *mainSizer = new wxBoxSizer(wxVERTICAL);
wxFlexGridSizer *gs = new wxFlexGridSizer(4, 2, 10, 10);
// SetProgressValue section.
wxStaticBoxSizer *spvSizer =
new wxStaticBoxSizer(wxVERTICAL, panel, "SetProgressValue");
int flags = wxSL_MIN_MAX_LABELS | wxSL_VALUE_LABEL | wxSL_AUTOTICKS;
m_slider = new wxSlider(spvSizer->GetStaticBox(), ProgressValueSlider,
0, 0, 100,
wxDefaultPosition, wxSize(250, -1),
flags);
m_slider->SetTickFreq(10);
spvSizer->Add(m_slider);
// Show/Hide in Taskbar section.
const wxString labels[] =
{
"&Show in Taskbar",
"&Hide in Taskbar"
};
m_visibilityRadioBox = new wxRadioBox(panel, VisibilityRadio, "Visibility:",
wxDefaultPosition, wxDefaultSize,
WXSIZEOF(labels), labels,
1, wxRA_SPECIFY_ROWS);
// SetThumbnailTooltip section.
wxStaticBoxSizer *sttSizer =
new wxStaticBoxSizer(wxVERTICAL, panel, "SetThumbnailTooltip");
m_textCtrl = new wxTextCtrl(panel, wxID_ANY);
wxButton *btn = new wxButton(panel, ThumbnailTooltipSetBtn, "Set");
sttSizer->Add(m_textCtrl, 1, wxEXPAND | wxALL, 2);
sttSizer->Add(btn, 1, wxEXPAND | wxALL, 2);
// SetProgressState section.
wxStaticBoxSizer *spsSizer =
new wxStaticBoxSizer(wxVERTICAL, panel, "SetProgressState");
const wxString choices[] =
{
"wxNoProgress",
"wxIndeterminate",
"wxNormal",
"wxError",
"wxPaused"
};
m_stateChoice = new wxChoice(panel, ProgressStateChoice,
wxDefaultPosition, wxDefaultSize,
WXSIZEOF(choices), choices);
spsSizer->Add(m_stateChoice, 0, wxALL | wxGROW, 5);
// SetOverlayIcon section.
wxStaticBoxSizer *soiSizer =
new wxStaticBoxSizer(wxVERTICAL, panel, "SetOverlayIcon");
wxButton *setOverlayIconBtn =
new wxButton(panel, SetOverlayIconBtn, "Set Overlay Icon");
wxButton *clearOverlayIconBtn =
new wxButton(panel, ClearOverlayIconBtn, "Clear Overlay Icon");
soiSizer->Add(setOverlayIconBtn, 1, wxEXPAND | wxALL, 2);
soiSizer->Add(clearOverlayIconBtn, 1, wxEXPAND | wxALL, 2);
// SetThumbnailClip section.
wxStaticBoxSizer *stcSizer =
new wxStaticBoxSizer(wxVERTICAL, panel, "SetThumbnailClip");
wxButton *setThumbnailClipBtn =
new wxButton(panel, SetThumbnailClipBtn, "Set Thumbnail Clip");
wxButton *restoreThumbnailClipBtn =
new wxButton(panel, RestoreThumbnailClipBtn,
"Restore Thumbnail Clip");
stcSizer->Add(setThumbnailClipBtn, 1, wxEXPAND | wxALL, 2);
stcSizer->Add(restoreThumbnailClipBtn, 1, wxEXPAND | wxALL, 2);
// Thumbnail Toolbar Buttons section.
wxStaticBoxSizer *ttbSizer =
new wxStaticBoxSizer(wxVERTICAL, panel, "ThumbBar Buttons");
wxButton *addThumbBarButtonBtn =
new wxButton(panel, AddThumbBarButtonBtn, "Add ThumbBar Button");
wxButton *showThumbnailToolbarBtn =
new wxButton(panel, RemoveThumbBarButtonBtn,
"Remove Last ThumbBar Button");
ttbSizer->Add(addThumbBarButtonBtn, 1, wxEXPAND | wxALL, 2);
ttbSizer->Add(showThumbnailToolbarBtn, 1, wxEXPAND | wxALL, 2);
gs->Add(spvSizer, 0, wxEXPAND);
gs->Add(m_visibilityRadioBox, 0, wxEXPAND);
gs->Add(sttSizer, 0, wxEXPAND);
gs->Add(spsSizer, 0, wxEXPAND);
gs->Add(soiSizer, 0, wxEXPAND);
gs->Add(stcSizer, 0, wxEXPAND);
gs->Add(ttbSizer, 0, wxEXPAND);
wxStaticText *text = new wxStaticText(
panel, wxID_ANY, "Welcome to wxTaskBarButton sample");
mainSizer->Add(text, 0, wxALIGN_CENTRE_HORIZONTAL);
mainSizer->Add(gs);
panel->SetSizer(mainSizer);
SetIcon(wxICON(sample));
SetSize(537, 420);
Centre();
}
wxBEGIN_EVENT_TABLE(MyFrame, wxFrame)
EVT_COMMAND_SCROLL(ProgressValueSlider, MyFrame::OnSetProgressValue)
EVT_RADIOBOX(VisibilityRadio, MyFrame::OnVisibilityChange)
EVT_BUTTON(ThumbnailTooltipSetBtn, MyFrame::OnSetThumbnailTooltipBtn)
EVT_CHOICE(ProgressStateChoice, MyFrame::OnChoice)
EVT_BUTTON(SetOverlayIconBtn, MyFrame::OnSetOverlayIcon)
EVT_BUTTON(ClearOverlayIconBtn, MyFrame::OnClearOverlayIcon)
EVT_BUTTON(SetThumbnailClipBtn, MyFrame::OnSetOrRestoreThumbnailClip)
EVT_BUTTON(RestoreThumbnailClipBtn, MyFrame::OnSetOrRestoreThumbnailClip)
EVT_BUTTON(AddThumbBarButtonBtn, MyFrame::OnAddThubmBarButton)
EVT_BUTTON(RemoveThumbBarButtonBtn, MyFrame::OnRemoveThubmBarButton)
EVT_BUTTON(ThumbnailToolbarBtn_0, MyFrame::OnThumbnailToolbarBtnClicked)
EVT_BUTTON(ThumbnailToolbarBtn_1, MyFrame::OnThumbnailToolbarBtnClicked)
EVT_BUTTON(ThumbnailToolbarBtn_2, MyFrame::OnThumbnailToolbarBtnClicked)
EVT_BUTTON(ThumbnailToolbarBtn_3, MyFrame::OnThumbnailToolbarBtnClicked)
EVT_BUTTON(ThumbnailToolbarBtn_4, MyFrame::OnThumbnailToolbarBtnClicked)
EVT_BUTTON(ThumbnailToolbarBtn_5, MyFrame::OnThumbnailToolbarBtnClicked)
EVT_BUTTON(ThumbnailToolbarBtn_6, MyFrame::OnThumbnailToolbarBtnClicked)
wxEND_EVENT_TABLE()
void MyFrame::OnSetProgressValue(wxScrollEvent& WXUNUSED(event))
{
static bool s_hasRangeSet = false;
if ( !s_hasRangeSet )
{
MSWGetTaskBarButton()->SetProgressRange(100);
s_hasRangeSet = true;
}
MSWGetTaskBarButton()->SetProgressValue(m_slider->GetValue());
}
void MyFrame::OnVisibilityChange(wxCommandEvent& WXUNUSED(event))
{
if ( m_visibilityRadioBox->GetSelection() == 0 )
MSWGetTaskBarButton()->Show();
else
MSWGetTaskBarButton()->Hide();
}
void MyFrame::OnSetThumbnailTooltipBtn(wxCommandEvent& WXUNUSED(event))
{
MSWGetTaskBarButton()->SetThumbnailTooltip(m_textCtrl->GetLineText(0));
}
void MyFrame::OnChoice(wxCommandEvent& event)
{
int sel = event.GetSelection();
wxTaskBarButtonState state;
switch(sel)
{
case 0:
state = wxTASKBAR_BUTTON_NO_PROGRESS;
break;
case 1:
state = wxTASKBAR_BUTTON_INDETERMINATE;
break;
case 2:
state = wxTASKBAR_BUTTON_NORMAL;
break;
case 3:
state = wxTASKBAR_BUTTON_ERROR;
break;
case 4:
state = wxTASKBAR_BUTTON_PAUSED;
break;
default:
state = wxTASKBAR_BUTTON_NO_PROGRESS;
break;
}
MSWGetTaskBarButton()->SetProgressValue(m_slider->GetValue());
MSWGetTaskBarButton()->SetProgressState(state);
}
void MyFrame::OnSetOverlayIcon(wxCommandEvent& WXUNUSED(event))
{
MSWGetTaskBarButton()->SetOverlayIcon(CreateRandomIcon());
}
void MyFrame::OnClearOverlayIcon(wxCommandEvent& WXUNUSED(event))
{
MSWGetTaskBarButton()->SetOverlayIcon(wxNullIcon);
}
void MyFrame::OnSetOrRestoreThumbnailClip(wxCommandEvent& event)
{
wxRect rect;
if ( event.GetId() == SetThumbnailClipBtn )
{
static const int CLIP_LENGTH = 100;
int height, width;
GetClientSize(&width, &height);
rect.SetX((width - CLIP_LENGTH) / 2);
rect.SetY((height - CLIP_LENGTH) / 2);
rect.SetHeight(CLIP_LENGTH);
rect.SetWidth(CLIP_LENGTH);
}
MSWGetTaskBarButton()->SetThumbnailClip(rect);
}
void MyFrame::OnAddThubmBarButton(wxCommandEvent& WXUNUSED(event))
{
if ( m_thumbBarButtons.size() >= 7 )
return;
wxThumbBarButton *button =
new wxThumbBarButton(m_thumbBarButtons.size() + ThumbnailToolbarBtn_0 ,
CreateRandomIcon());
MSWGetTaskBarButton()->AppendThumbBarButton(button);
m_thumbBarButtons.push_back(button);
}
void MyFrame::OnRemoveThubmBarButton(wxCommandEvent& WXUNUSED(event))
{
if ( m_thumbBarButtons.empty() )
return;
wxThumbBarButton* button = m_thumbBarButtons.back();
m_thumbBarButtons.pop_back();
delete MSWGetTaskBarButton()->RemoveThumbBarButton(button);
}
void MyFrame::OnThumbnailToolbarBtnClicked(wxCommandEvent& event)
{
wxLogMessage("Thumbnail Toolbar Button %d is clicked.", event.GetId());
}
|
// CBacnetBuildingRoomEditor.cpp: 实现文件
//
#include "stdafx.h"
#include "T3000.h"
#include "CBacnetBuildingRoomEditor.h"
#include "afxdialogex.h"
// CBacnetBuildingRoomEditor 对话框
IMPLEMENT_DYNAMIC(CBacnetBuildingRoomEditor, CDialogEx)
CBacnetBuildingRoomEditor::CBacnetBuildingRoomEditor(CWnd* pParent /*=nullptr*/)
: CDialogEx(IDD_DIALOG_BACNET_BUILDING_EDIT_FLOOR, pParent)
{
}
CBacnetBuildingRoomEditor::~CBacnetBuildingRoomEditor()
{
}
void CBacnetBuildingRoomEditor::DoDataExchange(CDataExchange* pDX)
{
CDialogEx::DoDataExchange(pDX);
DDX_Control(pDX, IDC_STATIC_BM_FLOOR, m_static_floor);
DDX_Control(pDX, IDC_STATIC_BM_ROOM, m_static_room);
DDX_Control(pDX, IDC_LIST_BM_FLOOR, m_bm_floor_list);
DDX_Control(pDX, IDC_LIST_BM_ROOM, m_bm_room_list);
}
BEGIN_MESSAGE_MAP(CBacnetBuildingRoomEditor, CDialogEx)
ON_MESSAGE(WM_REFRESH_BAC_FLOOR_LIST, Fresh_Floor_List)
ON_MESSAGE(WM_REFRESH_BAC_ROOM_LIST, Fresh_Room_List)
ON_MESSAGE(WM_LIST_ITEM_CHANGED, Fresh_EditCallBack_Item)
ON_MESSAGE(WM_LIST_FLOOR_CHANGED, Fresh_Floor_Item)
ON_MESSAGE(WM_LIST_ROOM_CHANGED, Fresh_Room_Item)
ON_BN_CLICKED(IDC_BUTTON_BM_FLOOR_ADD, &CBacnetBuildingRoomEditor::OnBnClickedButtonBmFloorAdd)
ON_NOTIFY(NM_CLICK, IDC_LIST_BM_FLOOR, &CBacnetBuildingRoomEditor::OnNMClickListBmFloor)
ON_NOTIFY(NM_CLICK, IDC_LIST_BM_ROOM, &CBacnetBuildingRoomEditor::OnNMClickListBmRoom)
END_MESSAGE_MAP()
LRESULT CBacnetBuildingRoomEditor::Fresh_Floor_Item(WPARAM wParam, LPARAM lParam)
{
int Changed_Item = (int)wParam;
int Changed_SubItem = (int)lParam;
CString New_CString = m_bm_floor_list.GetItemText(Changed_Item, Changed_SubItem);
if (New_CString.IsEmpty())
return 0;
if (Changed_Item > bm_floor.size() + 1)
{
return 1;
}
else if (Changed_Item == bm_floor.size())
{
//新增的项目
//vector < RoomInfo> bm_room;
FloorInfo temp_floor;
temp_floor.csName = New_CString;
temp_floor.xindex = Changed_Item;
bm_floor.push_back(temp_floor);
}
else
{
bm_floor.at(Changed_Item).csName = New_CString;
}
//保存至本地缓存;
//PostMessage(WM_REFRESH_BAC_FLOOR_LIST,NULL,NULL);
//PostMessage(WM_REFRESH_BAC_ROOM_LIST, NULL, NULL);
return 1;
}
LRESULT CBacnetBuildingRoomEditor::Fresh_Room_Item(WPARAM wParam, LPARAM lParam)
{
int Changed_Item = (int)wParam;
int Changed_SubItem = (int)lParam;
return 1;
}
// CBacnetBuildingRoomEditor 消息处理程序
//对于一个对话框里面有多个List 只能通过确认焦点 来发送Changed 消息;
LRESULT CBacnetBuildingRoomEditor::Fresh_EditCallBack_Item(WPARAM wParam, LPARAM lParam)
{
CWnd* window_focus = GetFocus();
if (window_focus == NULL)
return 0;
if (GetFocus()->GetDlgCtrlID() == IDC_LIST_BM_FLOOR)
{
PostMessage(WM_LIST_FLOOR_CHANGED, wParam, lParam);
}
else if (GetFocus()->GetDlgCtrlID() == IDC_LIST_BM_ROOM)
{
PostMessage(WM_LIST_ROOM_CHANGED, wParam, lParam);
}
return 0;
}
BOOL CBacnetBuildingRoomEditor::OnInitDialog()
{
CDialogEx::OnInitDialog();
// TODO: 在此添加额外的初始化
InitialUI();
Initial_List();
return TRUE; // return TRUE unless you set the focus to a control
// 异常: OCX 属性页应返回 FALSE
}
void CBacnetBuildingRoomEditor::InitialUI()
{
m_static_floor.SetWindowTextW(_T("Floor"));
m_static_floor.textColor(RGB(0, 0, 255));
//m_static_floor.bkColor(RGB(0,255,255));
m_static_floor.setFont(20, 16, NULL, _T("Arial"));
m_static_room.SetWindowTextW(_T("Room"));
m_static_room.textColor(RGB(0, 0, 255));
//m_static_room.bkColor(RGB(0,255,255));
m_static_room.setFont(20, 16, NULL, _T("Arial"));
}
void CBacnetBuildingRoomEditor::Initial_List()
{
//m_bm_floor_list
while (m_bm_floor_list.DeleteColumn(0));
m_bm_floor_list.ModifyStyle(0, LVS_SINGLESEL | LVS_REPORT | LVS_SHOWSELALWAYS);
m_bm_floor_list.SetExtendedStyle(m_bm_floor_list.GetExtendedStyle() | LVS_EX_GRIDLINES & (~LVS_EX_FULLROWSELECT));//Not allow full row select.
m_bm_floor_list.InsertColumn(0, _T(" "), 30, ListCtrlEx::CheckBox, LVCFMT_LEFT, ListCtrlEx::SortByString);
m_bm_floor_list.InsertColumn(1, _T("Floor Name"), 130, ListCtrlEx::EditBox, LVCFMT_LEFT, ListCtrlEx::SortByString);
m_bm_floor_list.SetListHwnd(this->m_hWnd);
m_bm_floor_list.SetWhetherShowBkCol(false);
while (m_bm_room_list.DeleteColumn(0));
m_bm_room_list.ModifyStyle(0, LVS_SINGLESEL | LVS_REPORT | LVS_SHOWSELALWAYS);
m_bm_room_list.SetExtendedStyle(m_bm_floor_list.GetExtendedStyle() | LVS_EX_GRIDLINES & (~LVS_EX_FULLROWSELECT));//Not allow full row select.
m_bm_room_list.InsertColumn(0, _T(" "), 30, ListCtrlEx::CheckBox, LVCFMT_LEFT, ListCtrlEx::SortByString);
m_bm_room_list.InsertColumn(1, _T("Room Name"), 130, ListCtrlEx::EditBox, LVCFMT_LEFT, ListCtrlEx::SortByString);
m_bm_room_list.SetListHwnd(this->m_hWnd);
m_bm_room_list.SetWhetherShowBkCol(false);
Fresh_Floor_List(0, 0);
Fresh_Room_List(0, 0);
}
LRESULT CBacnetBuildingRoomEditor::Fresh_Floor_List(WPARAM wParam, LPARAM lParam)
{
m_bm_floor_list.DeleteAllItems();
for (int i = 0; i < bm_floor.size(); i++)
{
CString temp_floor;
temp_floor = bm_floor.at(i).csName;
m_bm_floor_list.InsertItem(i, _T(" "));
m_bm_floor_list.SetItemText(i, 1, temp_floor);
m_bm_floor_list.SetCellEnabled(i, 0, false);
}
return 0;
}
LRESULT CBacnetBuildingRoomEditor::Fresh_Room_List(WPARAM wParam, LPARAM lParam)
{
m_bm_room_list.DeleteAllItems();
for (int i = 0; i < bm_room.size(); i++)
{
CString temp_room;
temp_room = bm_room.at(i).csName;
m_bm_room_list.InsertItem(i, _T(" "));
m_bm_room_list.SetItemText(i, 1, temp_room);
m_bm_room_list.SetCellEnabled(i, 0, false);
}
return 0;
}
void CBacnetBuildingRoomEditor::OnBnClickedButtonBmFloorAdd()
{
// TODO: 在此添加控件通知处理程序代码
int ncount = m_bm_floor_list.GetItemCount();
if (ncount > bm_floor.size())
{
MessageBox(_T("Unnamed items have been added to the list"));
return;
}
m_bm_floor_list.InsertItem(ncount , _T(""));
m_bm_floor_list.SetCellEnabled(ncount, 0, false);
}
void CBacnetBuildingRoomEditor::OnNMClickListBmFloor(NMHDR* pNMHDR, LRESULT* pResult)
{
LPNMITEMACTIVATE pNMItemActivate = reinterpret_cast<LPNMITEMACTIVATE>(pNMHDR);
// TODO: 在此添加控件通知处理程序代码
*pResult = 0;
DWORD dwPos = GetMessagePos();//Get which line is click by user.Set the check box, when user enter Insert it will jump to program dialog
CPoint point(LOWORD(dwPos), HIWORD(dwPos));
m_bm_floor_list.ScreenToClient(&point);
LVHITTESTINFO lvinfo;
lvinfo.pt = point;
lvinfo.flags = LVHT_ABOVE;
int nItem = m_bm_floor_list.SubItemHitTest(&lvinfo);
if (lvinfo.iItem > m_bm_floor_list.GetItemCount()) //如果点击区超过最大行号,则点击是无效的
return;
if (lvinfo.iItem < 0)
return;
if (nItem != -1)
{
m_bm_floor_list.SetCellChecked(nItem, 0, 1);
program_list_line = nItem;
for (int i = 0; i < m_bm_floor_list.GetItemCount(); ++i)
{
if (i == nItem)
continue;
m_bm_floor_list.SetCellChecked(i, 0, FALSE);
}
}
}
void CBacnetBuildingRoomEditor::OnNMClickListBmRoom(NMHDR* pNMHDR, LRESULT* pResult)
{
LPNMITEMACTIVATE pNMItemActivate = reinterpret_cast<LPNMITEMACTIVATE>(pNMHDR);
// TODO: 在此添加控件通知处理程序代码
*pResult = 0;
DWORD dwPos = GetMessagePos();//Get which line is click by user.Set the check box, when user enter Insert it will jump to program dialog
CPoint point(LOWORD(dwPos), HIWORD(dwPos));
m_bm_room_list.ScreenToClient(&point);
LVHITTESTINFO lvinfo;
lvinfo.pt = point;
lvinfo.flags = LVHT_ABOVE;
int nItem = m_bm_room_list.SubItemHitTest(&lvinfo);
if (nItem != -1)
{
m_bm_room_list.SetCellChecked(nItem, 0, 1);
program_list_line = nItem;
for (int i = 0; i < m_bm_room_list.GetItemCount(); ++i)
{
if (i == nItem)
continue;
m_bm_room_list.SetCellChecked(i, 0, FALSE);
}
}
}
|
import * as vscode from "vscode";
const textHeadPos = new vscode.Position(0, 0);
export const openNote = (uri: any) => {
console.log("openNote", uri);
openUrl(uri);
};
export const openUrl = (url: string, pos: vscode.Position = textHeadPos) => {
vscode.workspace.openTextDocument(url).then((doc) => {
vscode.window.showTextDocument(doc).then((editor) => {
editor.selections = [new vscode.Selection(pos, pos)];
const range = new vscode.Range(pos, pos);
editor.revealRange(range);
});
});
};
|
#!/bin/bash
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
if ! [[ "$2" =~ ^(git@)?(www.)?github.com(:|/)adnetcoin/adnetcoin(.git)?$ ]]; then
exit 0
fi
while read LINE; do
set -- A $LINE
if [ "$4" != "refs/heads/master" ]; then
continue
fi
if ! ./contrib/verify-commits/verify-commits.sh $3 > /dev/null 2>&1; then
echo "ERROR: A commit is not signed, can't push"
./contrib/verify-commits/verify-commits.sh
exit 1
fi
done < /dev/stdin
|
#!/bin/bash
# Copyright (c) Lawrence Livermore National Security, LLC and other Ascent
# Project developers. See top-level LICENSE AND COPYRIGHT files for dates and
# other details. No copyright assignment is required to contribute to Ascent.
set -ev
export TAG_BASE=alpinedav/ascent-ci:ubuntu-21.04-devel-tpls
date
python ../build_and_tag.py ${TAG_BASE}
date
|
<filename>server/src/main/java/com/portfolio/bugtracker/services/UserRolesService.java<gh_stars>0
package com.portfolio.bugtracker.services;
public interface UserRolesService
{
}
|
def fibonacci_series(n):
# initialize first two Fibonacci Numbers
a = 0
b = 1
# We have to calculate Fibonacci till n
for _ in range(0, n):
# add previous two numbers and assign it to 'a'
a, b = b, a + b
print(a, end=" ")
|
python3 ckpt2pb.py --model_dir fcn8s_mobilenet/checkpoints/best --output_node_names network/output/Softmax >> fcn8s_mobilenet.log
|
def is_leap_year(year):
if year % 4 == 0 and (year % 100 != 0 or year % 400 == 0):
return True
return False
|
<filename>FactoriOres/src/main/java/seraphaestus/factoriores/worldgen/WorldGenEventHandler.java
package seraphaestus.factoriores.worldgen;
import net.minecraft.world.gen.GenerationStage;
import net.minecraft.world.gen.feature.ConfiguredFeature;
import net.minecraftforge.event.world.BiomeLoadingEvent;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod;
import seraphaestus.factoriores.ConfigHandler;
import seraphaestus.factoriores.Registrar;
@Mod.EventBusSubscriber
public class WorldGenEventHandler {
@SubscribeEvent
public static void onBiomeLoad(final BiomeLoadingEvent event) {
if (!ConfigHandler.COMMON.worldgenEnabled.get()) return;
for (ConfiguredFeature<?, ?> configuredFeatureOreDeposit : Registrar.configuredFeaturesDeposits) {
event.getGeneration().getFeatures(GenerationStage.Decoration.UNDERGROUND_ORES).add(() -> configuredFeatureOreDeposit);
}
}
}
|
<gh_stars>0
package com.example.demo.model;
import org.hibernate.annotations.GenericGenerator;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
/**
* Created by Admin on 07.05.2017.
*/
@Entity
@Transactional
public class RelatedLocation {
@Id
@GeneratedValue(generator = "UUID")
@GenericGenerator(
name = "UUID",
strategy = "org.hibernate.id.UUIDGenerator"
)
private String id;
@Basic
private String name;
@ManyToOne(targetEntity = Location.class, cascade = {CascadeType.PERSIST, CascadeType.MERGE})
private Location parentLocation;
@ManyToOne(targetEntity = Customer.class, cascade = {CascadeType.PERSIST, CascadeType.MERGE, CascadeType.REMOVE})
private Customer customer;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
#!/bin/sh
CLASSPATH=$CLASSPATH:/usr/local/java/xerces-2_11_0-xml-schema-1.1-beta/xercesImpl.jar:/usr/local/java/xerces-2_11_0-xml-schema-1.1-beta/xercesSamples.jar:/usr/local/java/xerces-2_11_0-xml-schema-1.1-beta/org.eclipse.wst.xml.xpath2.processor_1.1.0.jar:/usr/local/java/xerces-2_11_0-xml-schema-1.1-beta/resolver.jar:/usr/local/java/xerces-2_11_0-xml-schema-1.1-beta/serializer.jar:/usr/local/java/xerces-2_11_0-xml-schema-1.1-beta/xml-apis.jar
export CLASSPATH
echo "java jaxp.SourceValidator -xsd11 $*"
java jaxp.SourceValidator -xsd11 $*
|
#!/bin/sh
# shellcheck disable=SC2086 # FIXME: fix these globing warnings
set -e
die() {
echo "die: $*"
exit 1
}
SCRIPT_DIR="$(dirname "${0}")"
# https://www.shellcheck.net/wiki/SC1090 No need to shellcheck private config.
# shellcheck source=/dev/null
[ -x "$SCRIPT_DIR/../run-local.sh" ] && . "$SCRIPT_DIR/../run-local.sh"
#SERENITY_PACKET_LOGGING_ARG="-object filter-dump,id=hue,netdev=breh,file=e1000.pcap"
# FIXME: Enable for SERENITY_ARCH=aarch64 if on an aarch64 host?
KVM_SUPPORT="0"
[ -e /dev/kvm ] && [ -r /dev/kvm ] && [ -w /dev/kvm ] && [ "$SERENITY_ARCH" != "aarch64" ] && KVM_SUPPORT="1"
[ -z "$SERENITY_BOCHS_BIN" ] && SERENITY_BOCHS_BIN="bochs"
# To support virtualization acceleration on mac
# we need to use 64-bit qemu
if [ "$(uname)" = "Darwin" ] && [ "$(uname -m)" = "x86_64" ]; then
[ -z "$SERENITY_QEMU_BIN" ] && SERENITY_QEMU_BIN="qemu-system-x86_64"
if $SERENITY_QEMU_BIN --accel help | grep -q hvf; then
SERENITY_VIRT_TECH_ARG="--accel hvf"
fi
fi
# Prepend the toolchain qemu directory so we pick up QEMU from there
PATH="$SCRIPT_DIR/../Toolchain/Local/qemu/bin:$PATH"
# Also prepend the i686 toolchain directory because that's where most
# people will have their QEMU binaries if they built them before the
# directory was changed to Toolchain/Local/qemu.
PATH="$SCRIPT_DIR/../Toolchain/Local/i686/bin:$PATH"
SERENITY_RUN="${SERENITY_RUN:-$1}"
if [ -z "$SERENITY_QEMU_BIN" ]; then
if command -v wslpath >/dev/null; then
# Some Windows systems don't have reg.exe's directory on the PATH by default.
PATH=$PATH:/mnt/c/Windows/System32
QEMU_INSTALL_DIR=$(reg.exe query 'HKLM\Software\QEMU' /v Install_Dir /t REG_SZ | grep '^ Install_Dir' | sed 's/ / /g' | cut -f4- -d' ')
if [ -z "$QEMU_INSTALL_DIR" ]; then
if [ "$KVM_SUPPORT" -eq "0" ]; then
die "Could not determine where QEMU for Windows is installed. Please make sure QEMU is installed or set SERENITY_QEMU_BIN if it is already installed."
fi
else
KVM_SUPPORT="0"
QEMU_BINARY_PREFIX="$(wslpath -- "${QEMU_INSTALL_DIR}" | tr -d '\r\n')/"
QEMU_BINARY_SUFFIX=".exe"
fi
fi
if [ "$SERENITY_ARCH" = "aarch64" ]; then
SERENITY_QEMU_BIN="${QEMU_BINARY_PREFIX}qemu-system-aarch64${QEMU_BINARY_SUFFIX}"
elif [ "$SERENITY_ARCH" = "x86_64" ]; then
SERENITY_QEMU_BIN="${QEMU_BINARY_PREFIX}qemu-system-x86_64${QEMU_BINARY_SUFFIX}"
else
SERENITY_QEMU_BIN="${QEMU_BINARY_PREFIX}qemu-system-i386${QEMU_BINARY_SUFFIX}"
fi
fi
[ "$KVM_SUPPORT" -eq "1" ] && SERENITY_VIRT_TECH_ARG="-enable-kvm"
# For default values, see Kernel/CommandLine.cpp
[ -z "$SERENITY_KERNEL_CMDLINE" ] && SERENITY_KERNEL_CMDLINE="hello"
[ -z "$SERENITY_RAM_SIZE" ] && SERENITY_RAM_SIZE=1G
[ -z "$SERENITY_DISK_IMAGE" ] && {
if [ "$SERENITY_RUN" = q35grub ] || [ "$SERENITY_RUN" = qgrub ]; then
SERENITY_DISK_IMAGE="grub_disk_image"
elif [ "$SERENITY_RUN" = qextlinux ]; then
SERENITY_DISK_IMAGE="extlinux_disk_image"
else
SERENITY_DISK_IMAGE="_disk_image"
fi
if command -v wslpath >/dev/null; then
case "$SERENITY_QEMU_BIN" in
/mnt/?/*)
SERENITY_DISK_IMAGE=$(wslpath -w "$SERENITY_DISK_IMAGE")
;;
esac
fi
}
if ! command -v "$SERENITY_QEMU_BIN" >/dev/null 2>&1 ; then
die "Please install QEMU version 5.0 or newer or use the Toolchain/BuildQemu.sh script."
fi
SERENITY_QEMU_MIN_REQ_VERSION=5
installed_major_version=$("$SERENITY_QEMU_BIN" -version | head -n 1 | sed -E 's/QEMU emulator version ([1-9][0-9]*|0).*/\1/')
installed_minor_version=$("$SERENITY_QEMU_BIN" -version | head -n 1 | sed -E 's/QEMU emulator version [0-9]+\.([1-9][0-9]*|0).*/\1/')
if [ "$installed_major_version" -lt "$SERENITY_QEMU_MIN_REQ_VERSION" ]; then
echo "Required QEMU >= 5.0! Found $($SERENITY_QEMU_BIN -version | head -n 1)"
echo "Please install a newer version of QEMU or use the Toolchain/BuildQemu.sh script."
die
fi
NATIVE_WINDOWS_QEMU="0"
if command -v wslpath >/dev/null; then
case "$SERENITY_QEMU_BIN" in
/mnt/?/*)
if [ -z "$SERENITY_VIRT_TECH_ARG" ]; then
if [ "$installed_major_version" -gt 5 ]; then
SERENITY_VIRT_TECH_ARG="-accel whpx,kernel-irqchip=off -accel tcg"
else
SERENITY_VIRT_TECH_ARG="-accel whpx -accel tcg"
fi
fi
[ -z "$SERENITY_QEMU_CPU" ] && SERENITY_QEMU_CPU="max,vmx=off"
SERENITY_KERNEL_CMDLINE="$SERENITY_KERNEL_CMDLINE disable_virtio"
NATIVE_WINDOWS_QEMU="1"
;;
esac
fi
[ -z "$SERENITY_QEMU_CPU" ] && SERENITY_QEMU_CPU="max"
if [ "$SERENITY_ARCH" != "aarch64" ]; then
[ -z "$SERENITY_CPUS" ] && SERENITY_CPUS="2"
if [ "$SERENITY_CPUS" -le 8 ]; then
# Explicitly disable x2APIC so we can test it more easily
SERENITY_QEMU_CPU="$SERENITY_QEMU_CPU,-x2apic"
fi
if [ -z "$SERENITY_SPICE" ] && "${SERENITY_QEMU_BIN}" -chardev help | grep -iq qemu-vdagent; then
SERENITY_SPICE_SERVER_CHARDEV="-chardev qemu-vdagent,clipboard=on,mouse=off,id=vdagent,name=vdagent"
elif "${SERENITY_QEMU_BIN}" -chardev help | grep -iq spicevmc; then
SERENITY_SPICE_SERVER_CHARDEV="-chardev spicevmc,id=vdagent,name=vdagent"
fi
fi
if [ "$(uname)" = "Darwin" ]; then
SERENITY_AUDIO_BACKEND="-audiodev coreaudio,id=snd0"
elif [ "$NATIVE_WINDOWS_QEMU" -eq "1" ]; then
SERENITY_AUDIO_BACKEND="-audiodev dsound,id=snd0"
elif "$SERENITY_QEMU_BIN" -audio-help 2>&1 | grep -- "-audiodev id=sdl" >/dev/null; then
SERENITY_AUDIO_BACKEND="-audiodev sdl,id=snd0"
else
SERENITY_AUDIO_BACKEND="-audiodev pa,timer-period=2000,id=snd0"
fi
if [ "$installed_major_version" -eq 5 ] && [ "$installed_minor_version" -eq 0 ]; then
SERENITY_AUDIO_HW="-soundhw pcspk"
else
SERENITY_AUDIO_HW="-machine pcspk-audiodev=snd0"
fi
SERENITY_SCREENS="${SERENITY_SCREENS:-1}"
if [ "$SERENITY_SPICE" ]; then
SERENITY_QEMU_DISPLAY_BACKEND="${SERENITY_QEMU_DISPLAY_BACKEND:-spice-app}"
elif [ "$NATIVE_WINDOWS_QEMU" -eq "1" ]; then
# QEMU for windows does not like gl=on, so detect if we are building in wsl, and if so, disable it
# Also, when using the GTK backend we run into this problem: https://github.com/SerenityOS/serenity/issues/7657
SERENITY_QEMU_DISPLAY_BACKEND="${SERENITY_QEMU_DISPLAY_BACKEND:-sdl,gl=off}"
elif [ $SERENITY_SCREENS -gt 1 ] && "${SERENITY_QEMU_BIN}" --display help | grep -iq sdl; then
SERENITY_QEMU_DISPLAY_BACKEND="${SERENITY_QEMU_DISPLAY_BACKEND:-sdl,gl=off}"
elif ! command -v wslpath >/dev/null && ("${SERENITY_QEMU_BIN}" --display help | grep -iq sdl) && (ldconfig -p | grep -iq virglrenderer); then
SERENITY_QEMU_DISPLAY_BACKEND="${SERENITY_QEMU_DISPLAY_BACKEND:-sdl,gl=on}"
elif "${SERENITY_QEMU_BIN}" --display help | grep -iq cocoa; then
# QEMU for OSX seems to only support cocoa
SERENITY_QEMU_DISPLAY_BACKEND="${SERENITY_QEMU_DISPLAY_BACKEND:-cocoa,gl=off}"
else
SERENITY_QEMU_DISPLAY_BACKEND="${SERENITY_QEMU_DISPLAY_BACKEND:-gtk,gl=off}"
fi
if [ "$SERENITY_SCREENS" -gt 1 ]; then
SERENITY_QEMU_DISPLAY_DEVICE="virtio-vga,max_outputs=$SERENITY_SCREENS "
# QEMU appears to always relay absolute mouse coordinates relative to the screen that the mouse is
# pointed to, without any way for us to know what screen it was. So, when dealing with multiple
# displays force using relative coordinates only
SERENITY_KERNEL_CMDLINE="$SERENITY_KERNEL_CMDLINE vmmouse=off"
else
SERENITY_QEMU_DISPLAY_DEVICE="VGA,vgamem_mb=64 "
fi
if [ -z "$SERENITY_DISABLE_GDB_SOCKET" ]; then
SERENITY_EXTRA_QEMU_ARGS="$SERENITY_EXTRA_QEMU_ARGS -s"
fi
if [ -z "$SERENITY_ETHERNET_DEVICE_TYPE" ]; then
SERENITY_ETHERNET_DEVICE_TYPE="e1000"
fi
if [ -z "$SERENITY_MACHINE" ]; then
if [ "$SERENITY_ARCH" = "aarch64" ]; then
SERENITY_MACHINE="-M raspi3 -serial stdio"
else
SERENITY_MACHINE="
-m $SERENITY_RAM_SIZE
-smp $SERENITY_CPUS
-display $SERENITY_QEMU_DISPLAY_BACKEND
-device $SERENITY_QEMU_DISPLAY_DEVICE
-drive file=${SERENITY_DISK_IMAGE},format=raw,index=0,media=disk
-device virtio-serial,max_ports=2
-device virtconsole,chardev=stdout
-device isa-debugcon,chardev=stdout
-device virtio-rng-pci
$SERENITY_AUDIO_BACKEND
$SERENITY_AUDIO_HW
-device ac97,audiodev=snd0
-device pci-bridge,chassis_nr=1,id=bridge1 -device $SERENITY_ETHERNET_DEVICE_TYPE,bus=bridge1
-device i82801b11-bridge,bus=bridge1,id=bridge2 -device sdhci-pci,bus=bridge2
-device i82801b11-bridge,id=bridge3 -device sdhci-pci,bus=bridge3
-device ich9-ahci,bus=bridge3
-chardev stdio,id=stdout,mux=on
"
fi
fi
if [ "$NATIVE_WINDOWS_QEMU" -ne "1" ]; then
SERENITY_MACHINE="$SERENITY_MACHINE
-qmp unix:qmp-sock,server,nowait"
fi
[ -z "$SERENITY_COMMON_QEMU_ARGS" ] && SERENITY_COMMON_QEMU_ARGS="
$SERENITY_EXTRA_QEMU_ARGS
$SERENITY_MACHINE
-cpu $SERENITY_QEMU_CPU
-d guest_errors
-usb
$SERENITY_SPICE_SERVER_CHARDEV
"
if [ "$SERENITY_ARCH" != "aarch64" ]; then
if "${SERENITY_QEMU_BIN}" -chardev help | grep -iq spice; then
SERENITY_COMMON_QEMU_ARGS="$SERENITY_COMMON_QEMU_ARGS
-spice port=5930,agent-mouse=off,disable-ticketing=on
-device virtserialport,chardev=vdagent,nr=1
"
fi
fi
[ -z "$SERENITY_COMMON_QEMU_Q35_ARGS" ] && SERENITY_COMMON_QEMU_Q35_ARGS="
$SERENITY_EXTRA_QEMU_ARGS
-m $SERENITY_RAM_SIZE
-cpu $SERENITY_QEMU_CPU
-machine q35
-d guest_errors
-smp $SERENITY_CPUS
-vga none
-device bochs-display
-device ich9-usb-ehci1,bus=pcie.0,multifunction=on,addr=0x5.0x0
-device ich9-usb-ehci2,bus=pcie.0,addr=0x5.0x2
-device ich9-usb-uhci1,bus=pcie.0,multifunction=on,addr=0x7.0x0
-device ich9-usb-uhci2,bus=pcie.0,addr=0x7.0x1
-device ich9-usb-uhci3,bus=pcie.0,addr=0x7.0x2
-device ich9-usb-uhci4,bus=pcie.0,addr=0x7.0x3
-device ich9-usb-uhci5,bus=pcie.0,addr=0x7.0x4
-device ich9-usb-uhci6,bus=pcie.0,addr=0x7.0x5
-device pcie-root-port,port=0x10,chassis=1,id=pcie.1,bus=pcie.0,multifunction=on,addr=0x6
-device pcie-root-port,port=0x11,chassis=2,id=pcie.2,bus=pcie.0,addr=0x6.0x1
-device pcie-root-port,port=0x12,chassis=3,id=pcie.3,bus=pcie.0,addr=0x6.0x2
-device pcie-root-port,port=0x13,chassis=4,id=pcie.4,bus=pcie.0,addr=0x6.0x3
-device pcie-root-port,port=0x14,chassis=5,id=pcie.5,bus=pcie.0,addr=0x6.0x4
-device pcie-root-port,port=0x15,chassis=6,id=pcie.6,bus=pcie.0,addr=0x6.0x5
-device pcie-root-port,port=0x16,chassis=7,id=pcie.7,bus=pcie.0,addr=0x6.0x6
-device pcie-root-port,port=0x17,chassis=8,id=pcie.8,bus=pcie.0,addr=0x6.0x7
-device bochs-display,bus=pcie.6,addr=0x10.0x0
-device ich9-intel-hda,bus=pcie.2,addr=0x03.0x0
-device nec-usb-xhci,bus=pcie.2,addr=0x11.0x0
-device pci-bridge,chassis_nr=1,id=bridge1,bus=pcie.4,addr=0x3.0x0
-device sdhci-pci,bus=bridge1,addr=0x1.0x0
-display $SERENITY_QEMU_DISPLAY_BACKEND
-drive file=${SERENITY_DISK_IMAGE},format=raw,id=disk,if=none
-device ahci,id=ahci
-device ide-hd,bus=ahci.0,drive=disk,unit=0
-device virtio-serial
-chardev stdio,id=stdout,mux=on
-device virtconsole,chardev=stdout
-device isa-debugcon,chardev=stdout
-device virtio-rng-pci
$SERENITY_AUDIO_BACKEND
$SERENITY_AUDIO_HW
"
export SDL_VIDEO_X11_DGAMOUSE=0
: "${SERENITY_BUILD:=.}"
cd -P -- "$SERENITY_BUILD" || die "Could not cd to \"$SERENITY_BUILD\""
if [ "$SERENITY_RUN" = "b" ]; then
# Meta/run.sh b: bochs
[ -z "$SERENITY_BOCHSRC" ] && {
# Make sure that SERENITY_SOURCE_DIR is set and not empty
[ -z "$SERENITY_SOURCE_DIR" ] && die 'SERENITY_SOURCE_DIR not set or empty'
SERENITY_BOCHSRC="$SERENITY_SOURCE_DIR/Meta/bochsrc"
}
"$SERENITY_BOCHS_BIN" -q -f "$SERENITY_BOCHSRC"
elif [ "$SERENITY_RUN" = "qn" ]; then
# Meta/run.sh qn: qemu without network
"$SERENITY_QEMU_BIN" \
$SERENITY_COMMON_QEMU_ARGS \
-device $SERENITY_ETHERNET_DEVICE_TYPE \
-kernel Kernel/Prekernel/Prekernel \
-initrd Kernel/Kernel \
-append "${SERENITY_KERNEL_CMDLINE}"
elif [ "$SERENITY_RUN" = "qtap" ]; then
# Meta/run.sh qtap: qemu with tap
sudo ip tuntap del dev tap0 mode tap || true
sudo ip tuntap add dev tap0 mode tap user "$(id -u)"
"$SERENITY_QEMU_BIN" \
$SERENITY_COMMON_QEMU_ARGS \
$SERENITY_VIRT_TECH_ARG \
$SERENITY_PACKET_LOGGING_ARG \
-netdev tap,ifname=tap0,id=br0 \
-device $SERENITY_ETHERNET_DEVICE_TYPE,netdev=br0 \
-kernel Kernel/Prekernel/Prekernel \
-initrd Kernel/Kernel \
-append "${SERENITY_KERNEL_CMDLINE}"
sudo ip tuntap del dev tap0 mode tap
elif [ "$SERENITY_RUN" = "qgrub" ] || [ "$SERENITY_RUN" = "qextlinux" ]; then
# Meta/run.sh qgrub: qemu with grub/extlinux
"$SERENITY_QEMU_BIN" \
$SERENITY_COMMON_QEMU_ARGS \
$SERENITY_VIRT_TECH_ARG \
$SERENITY_PACKET_LOGGING_ARG \
-netdev user,id=breh,hostfwd=tcp:127.0.0.1:8888-10.0.2.15:8888,hostfwd=tcp:127.0.0.1:8823-10.0.2.15:23 \
-device $SERENITY_ETHERNET_DEVICE_TYPE,netdev=breh
elif [ "$SERENITY_RUN" = "q35" ]; then
# Meta/run.sh q35: qemu (q35 chipset) with SerenityOS
echo "Starting SerenityOS with QEMU Q35 machine, Commandline: ${SERENITY_KERNEL_CMDLINE}"
"$SERENITY_QEMU_BIN" \
$SERENITY_COMMON_QEMU_Q35_ARGS \
$SERENITY_VIRT_TECH_ARG \
-netdev user,id=breh,hostfwd=tcp:127.0.0.1:8888-10.0.2.15:8888,hostfwd=tcp:127.0.0.1:8823-10.0.2.15:23 \
-device $SERENITY_ETHERNET_DEVICE_TYPE,netdev=breh \
-kernel Kernel/Prekernel/Prekernel \
-initrd Kernel/Kernel \
-append "${SERENITY_KERNEL_CMDLINE}"
elif [ "$SERENITY_RUN" = "q35grub" ]; then
# Meta/run.sh q35grub: qemu (q35 chipset) with SerenityOS, using a grub disk image
"$SERENITY_QEMU_BIN" \
$SERENITY_COMMON_QEMU_Q35_ARGS \
$SERENITY_VIRT_TECH_ARG \
-netdev user,id=breh,hostfwd=tcp:127.0.0.1:8888-10.0.2.15:8888,hostfwd=tcp:127.0.0.1:8823-10.0.2.15:23 \
-device $SERENITY_ETHERNET_DEVICE_TYPE,netdev=breh
elif [ "$SERENITY_RUN" = "ci" ]; then
# Meta/run.sh ci: qemu in text mode
echo "Running QEMU in CI"
"$SERENITY_QEMU_BIN" \
$SERENITY_EXTRA_QEMU_ARGS \
$SERENITY_VIRT_TECH_ARG \
-m $SERENITY_RAM_SIZE \
-cpu $SERENITY_QEMU_CPU \
-d guest_errors \
-no-reboot \
-smp ${SERENITY_CPUS} \
-drive file=${SERENITY_DISK_IMAGE},format=raw,index=0,media=disk \
-device ich9-ahci \
-nographic \
-display none \
-debugcon file:debug.log \
-kernel Kernel/Prekernel/Prekernel \
-initrd Kernel/Kernel \
-append "${SERENITY_KERNEL_CMDLINE}"
else
# Meta/run.sh: qemu with user networking
if [ "$SERENITY_ARCH" = "aarch64" ]; then
SERENITY_NETFLAGS=
else
SERENITY_NETFLAGS="
-netdev user,id=breh,hostfwd=tcp:127.0.0.1:8888-10.0.2.15:8888,hostfwd=tcp:127.0.0.1:8823-10.0.2.15:23,hostfwd=tcp:127.0.0.1:8000-10.0.2.15:8000,hostfwd=tcp:127.0.0.1:2222-10.0.2.15:22 \
-device $SERENITY_ETHERNET_DEVICE_TYPE,netdev=breh \
"
fi
"$SERENITY_QEMU_BIN" \
$SERENITY_COMMON_QEMU_ARGS \
$SERENITY_VIRT_TECH_ARG \
$SERENITY_PACKET_LOGGING_ARG \
$SERENITY_NETFLAGS \
-kernel Kernel/Prekernel/Prekernel \
-initrd Kernel/Kernel \
-append "${SERENITY_KERNEL_CMDLINE}"
fi
|
<filename>gameCenter/src/main/java/com/shiliu/game/domain/UserGame.java
package com.shiliu.game.domain;
import java.util.Date;
import com.shiliu.game.utils.LongIdWorker;
public class UserGame {
private Long userGameId = LongIdWorker.getDataId();
private String phone;
private String openid;
private String gameId;
private Integer flowNum;
private Date playTime;
private Date createTime;
private Integer playTimes;
private Integer totalTimes;
private Integer holenFlow;
private Integer maxScore;
private Integer lastCycleScore;
private Integer currentScore;
private Boolean isUpdate;
public Long getUserGameId() {
return userGameId;
}
public void setUserGameId(Long userGameId) {
this.userGameId = userGameId;
}
public String getPhone() {
return phone;
}
public void setPhone(String phone) {
this.phone = phone == null ? null : phone.trim();
}
public String getOpenid() {
return openid;
}
public void setOpenid(String openid) {
this.openid = openid == null ? null : openid.trim();
}
public String getGameId() {
return gameId;
}
public void setGameId(String gameId) {
this.gameId = gameId == null ? null : gameId.trim();
}
public Integer getFlowNum() {
return flowNum;
}
public void setFlowNum(Integer flowNum) {
this.flowNum = flowNum;
}
public Date getPlayTime() {
return playTime;
}
public void setPlayTime(Date playTime) {
this.playTime = playTime;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Integer getPlayTimes() {
return playTimes;
}
public void setPlayTimes(Integer playTimes) {
this.playTimes = playTimes;
}
public Integer getTotalTimes() {
return totalTimes;
}
public void setTotalTimes(Integer totalTimes) {
this.totalTimes = totalTimes;
}
public Integer getHolenFlow() {
return holenFlow;
}
public void setHolenFlow(Integer holenFlow) {
this.holenFlow = holenFlow;
}
public Integer getMaxScore() {
return maxScore;
}
public void setMaxScore(Integer maxScore) {
this.maxScore = maxScore;
}
public Integer getLastCycleScore() {
return lastCycleScore;
}
public void setLastCycleScore(Integer lastCycleScore) {
this.lastCycleScore = lastCycleScore;
}
public Integer getCurrentScore() {
return currentScore;
}
public void setCurrentScore(Integer currentScore) {
this.currentScore = currentScore;
}
public Boolean getIsUpdate() {
return isUpdate;
}
public void setIsUpdate(Boolean isUpdate) {
this.isUpdate = isUpdate;
}
}
|
func main() {
arr := []int{3, 5, 8, 7, 2}
max := arr[0]
for i := 1; i < len(arr); i++ {
if arr[i] > max {
max = arr[i]
}
}
fmt.Println("The highest value is:", max)
}
|
#!/bin/bash
shellExit()
{
if [ $1 -eq 1 ]; then
printf "\nfailed!!!\n\n"
exit 1
fi
}
printf "\nInit db\n\n"
time go run -v ./cmd/init/db/main.go -addr $1 -user $2 -pass $3 -name $4
shellExit $?
printf "\nDone.\n\n"
|
#!/bin/bash
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
export NGINX_VERSION=1.19.2
export NDK_VERSION=0.3.1
export SETMISC_VERSION=0.32
export MORE_HEADERS_VERSION=0.33
export NGINX_DIGEST_AUTH=cd8641886c873cf543255aeda20d23e4cd603d05
export NGINX_SUBSTITUTIONS=bc58cb11844bc42735bbaef7085ea86ace46d05b
export NGINX_OPENTRACING_VERSION=0.9.0
export OPENTRACING_CPP_VERSION=1.5.1
export ZIPKIN_CPP_VERSION=0.5.2
export JAEGER_VERSION=0.4.2
export MSGPACK_VERSION=3.2.1
export DATADOG_CPP_VERSION=1.1.5
export MODSECURITY_VERSION=b55a5778c539529ae1aa10ca49413771d52bb62e
export MODSECURITY_LIB_VERSION=v3.0.4
export OWASP_MODSECURITY_CRS_VERSION=v3.3.0
export LUA_NGX_VERSION=0.10.17
export LUA_STREAM_NGX_VERSION=0.0.8
export LUA_UPSTREAM_VERSION=0.07
export LUA_BRIDGE_TRACER_VERSION=0.1.1
export LUA_CJSON_VERSION=2.1.0.8
export NGINX_INFLUXDB_VERSION=5b09391cb7b9a889687c0aa67964c06a2d933e8b
export GEOIP2_VERSION=3.3
export NGINX_AJP_VERSION=bf6cd93f2098b59260de8d494f0f4b1f11a84627
export LUAJIT_VERSION=31116c4d25c4283a52b2d87fed50101cf20f5b77
export LUA_RESTY_BALANCER=0.03
export LUA_RESTY_CACHE=0.10
export LUA_RESTY_CORE=0.1.19
export LUA_RESTY_COOKIE_VERSION=766ad8c15e498850ac77f5e0265f1d3f30dc4027
export LUA_RESTY_DNS=0.21
export LUA_RESTY_HTTP=0.15
export LUA_RESTY_LOCK=0.08
export LUA_RESTY_UPLOAD_VERSION=0.10
export LUA_RESTY_STRING_VERSION=0.12
export BUILD_PATH=/tmp/build
ARCH=$(uname -m)
get_src()
{
hash="$1"
url="$2"
f=$(basename "$url")
echo "Downloading $url"
curl -sSL "$url" -o "$f"
echo "$hash $f" | sha256sum -c - || exit 10
tar xzf "$f"
rm -rf "$f"
}
apk update
apk upgrade
# install required packages to build
apk add \
bash \
gcc \
clang \
libc-dev \
make \
automake \
openssl-dev \
pcre-dev \
zlib-dev \
linux-headers \
libxslt-dev \
gd-dev \
geoip-dev \
perl-dev \
libedit-dev \
mercurial \
alpine-sdk \
findutils \
curl ca-certificates \
patch \
libaio-dev \
openssl \
cmake \
util-linux \
lmdb-tools \
wget \
curl-dev \
libprotobuf \
git g++ pkgconf flex bison doxygen yajl-dev lmdb-dev libtool autoconf libxml2 libxml2-dev \
python3 \
libmaxminddb-dev \
bc \
unzip \
dos2unix \
yaml-cpp
mkdir -p /etc/nginx
mkdir --verbose -p "$BUILD_PATH"
cd "$BUILD_PATH"
# download, verify and extract the source files
get_src 7c1f7bb13e79433ee930c597d272a64bc6e30c356a48524f38fd34fa88d62473 \
"https://nginx.org/download/nginx-$NGINX_VERSION.tar.gz"
get_src 0e971105e210d272a497567fa2e2c256f4e39b845a5ba80d373e26ba1abfbd85 \
"https://github.com/simpl/ngx_devel_kit/archive/v$NDK_VERSION.tar.gz"
get_src f1ad2459c4ee6a61771aa84f77871f4bfe42943a4aa4c30c62ba3f981f52c201 \
"https://github.com/openresty/set-misc-nginx-module/archive/v$SETMISC_VERSION.tar.gz"
get_src a3dcbab117a9c103bc1ea5200fc00a7b7d2af97ff7fd525f16f8ac2632e30fbf \
"https://github.com/openresty/headers-more-nginx-module/archive/v$MORE_HEADERS_VERSION.tar.gz"
get_src fe683831f832aae4737de1e1026a4454017c2d5f98cb88b08c5411dc380062f8 \
"https://github.com/atomx/nginx-http-auth-digest/archive/$NGINX_DIGEST_AUTH.tar.gz"
get_src 618551948ab14cac51d6e4ad00452312c7b09938f59ebff4f93875013be31f2d \
"https://github.com/yaoweibin/ngx_http_substitutions_filter_module/archive/$NGINX_SUBSTITUTIONS.tar.gz"
get_src 4fc410d7aef0c8a6371afa9f249d2c6cec50ea88785d05052f8f457c35b69c18 \
"https://github.com/opentracing-contrib/nginx-opentracing/archive/v$NGINX_OPENTRACING_VERSION.tar.gz"
get_src 015c4187f7a6426a2b5196f0ccd982aa87f010cf61f507ae3ce5c90523f92301 \
"https://github.com/opentracing/opentracing-cpp/archive/v$OPENTRACING_CPP_VERSION.tar.gz"
get_src 30affaf0f3a84193f7127cc0135da91773ce45d902414082273dae78914f73df \
"https://github.com/rnburn/zipkin-cpp-opentracing/archive/v$ZIPKIN_CPP_VERSION.tar.gz"
get_src 3f943d1ac7bbf64b010a57b8738107c1412cb31c55c73f0772b4148614493b7b \
"https://github.com/SpiderLabs/ModSecurity-nginx/archive/$MODSECURITY_VERSION.tar.gz"
get_src 21257af93a64fee42c04ca6262d292b2e4e0b7b0660c511db357b32fd42ef5d3 \
"https://github.com/jaegertracing/jaeger-client-cpp/archive/v$JAEGER_VERSION.tar.gz"
get_src 464f46744a6be778626d11452c4db3c2d09461080c6db42e358e21af19d542f6 \
"https://github.com/msgpack/msgpack-c/archive/cpp-$MSGPACK_VERSION.tar.gz"
get_src 1ebdcb041ca3bd238813ef6de352285e7418e6001c41a0a260b447260e37716e \
"https://github.com/openresty/lua-nginx-module/archive/v$LUA_NGX_VERSION.tar.gz"
get_src f2c4b7966dbb5c88edb5692616bf0eeca330ee2d43ae04c1cb96ef8fb072ba46 \
"https://github.com/openresty/stream-lua-nginx-module/archive/v$LUA_STREAM_NGX_VERSION.tar.gz"
get_src 2a69815e4ae01aa8b170941a8e1a10b6f6a9aab699dee485d58f021dd933829a \
"https://github.com/openresty/lua-upstream-nginx-module/archive/v$LUA_UPSTREAM_VERSION.tar.gz"
get_src 82bf1af1ee89887648b53c9df566f8b52ec10400f1641c051970a7540b7bf06a \
"https://github.com/openresty/luajit2/archive/$LUAJIT_VERSION.tar.gz"
get_src b84fd2fb0bb0578af4901db31d1c0ae909b532a1016fe6534cbe31a6c3ad6924 \
"https://github.com/DataDog/dd-opentracing-cpp/archive/v$DATADOG_CPP_VERSION.tar.gz"
get_src 6faab57557bd9cc9fc38208f6bc304c1c13cf048640779f98812cf1f9567e202 \
"https://github.com/opentracing/lua-bridge-tracer/archive/v$LUA_BRIDGE_TRACER_VERSION.tar.gz"
get_src 1af5a5632dc8b00ae103d51b7bf225de3a7f0df82f5c6a401996c080106e600e \
"https://github.com/influxdata/nginx-influxdb-module/archive/$NGINX_INFLUXDB_VERSION.tar.gz"
get_src 41378438c833e313a18869d0c4a72704b4835c30acaf7fd68013ab6732ff78a7 \
"https://github.com/leev/ngx_http_geoip2_module/archive/$GEOIP2_VERSION.tar.gz"
get_src 5f629a50ba22347c441421091da70fdc2ac14586619934534e5a0f8a1390a950 \
"https://github.com/yaoweibin/nginx_ajp_module/archive/$NGINX_AJP_VERSION.tar.gz"
get_src 5d16e623d17d4f42cc64ea9cfb69ca960d313e12f5d828f785dd227cc483fcbd \
"https://github.com/openresty/lua-resty-upload/archive/v$LUA_RESTY_UPLOAD_VERSION.tar.gz"
get_src bfd8c4b6c90aa9dcbe047ac798593a41a3f21edcb71904d50d8ac0e8c77d1132 \
"https://github.com/openresty/lua-resty-string/archive/v$LUA_RESTY_STRING_VERSION.tar.gz"
get_src 82209d5a5d9545c6dde3db7857f84345db22162fdea9743d5e2b2094d8d407f8 \
"https://github.com/openresty/lua-resty-balancer/archive/v$LUA_RESTY_BALANCER.tar.gz"
get_src 040878ed9a485ca7f0f8128e4e979280bcf501af875704c8830bec6a68f128f7 \
"https://github.com/openresty/lua-resty-core/archive/v$LUA_RESTY_CORE.tar.gz"
get_src bd6bee4ccc6cf3307ab6ca0eea693a921fab9b067ba40ae12a652636da588ff7 \
"https://github.com/openresty/lua-cjson/archive/$LUA_CJSON_VERSION.tar.gz"
get_src f818b5cef0881e5987606f2acda0e491531a0cb0c126d8dca02e2343edf641ef \
"https://github.com/cloudflare/lua-resty-cookie/archive/$LUA_RESTY_COOKIE_VERSION.tar.gz"
get_src dae9fb572f04e7df0dabc228f21cdd8bbfa1ff88e682e983ef558585bc899de0 \
"https://github.com/openresty/lua-resty-lrucache/archive/v$LUA_RESTY_CACHE.tar.gz"
get_src 2b4683f9abe73e18ca00345c65010c9056777970907a311d6e1699f753141de2 \
"https://github.com/openresty/lua-resty-lock/archive/v$LUA_RESTY_LOCK.tar.gz"
get_src 4aca34f324d543754968359672dcf5f856234574ee4da360ce02c778d244572a \
"https://github.com/openresty/lua-resty-dns/archive/v$LUA_RESTY_DNS.tar.gz"
get_src 987d5754a366d3ccbf745d2765f82595dcff5b94ba6c755eeb6d310447996f32 \
"https://github.com/ledgetech/lua-resty-http/archive/v$LUA_RESTY_HTTP.tar.gz"
# improve compilation times
CORES=$(($(grep -c ^processor /proc/cpuinfo) - 0))
export MAKEFLAGS=-j${CORES}
export CTEST_BUILD_FLAGS=${MAKEFLAGS}
export HUNTER_JOBS_NUMBER=${CORES}
export HUNTER_USE_CACHE_SERVERS=true
# Install luajit from openresty fork
export LUAJIT_LIB=/usr/local/lib
export LUA_LIB_DIR="$LUAJIT_LIB/lua"
export LUAJIT_INC=/usr/local/include/luajit-2.1
cd "$BUILD_PATH/luajit2-$LUAJIT_VERSION"
make CCDEBUG=-g
make install
ln -s /usr/local/bin/luajit /usr/local/bin/lua
cd "$BUILD_PATH"
# Git tuning
git config --global --add core.compression -1
# build opentracing lib
cd "$BUILD_PATH/opentracing-cpp-$OPENTRACING_CPP_VERSION"
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_TESTING=OFF \
-DWITH_BOOST_STATIC=ON \
-DBUILD_SHARED_LIBS=OFF \
-DBUILD_MOCKTRACER=OFF \
-DBUILD_STATIC_LIBS=ON \
-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=true \
..
make
make install
# build jaeger lib
cd "$BUILD_PATH/jaeger-client-cpp-$JAEGER_VERSION"
sed -i 's/-Werror/-Wno-psabi/' CMakeLists.txt
cat <<EOF > export.map
{
global:
OpenTracingMakeTracerFactory;
local: *;
};
EOF
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_TESTING=OFF \
-DJAEGERTRACING_BUILD_EXAMPLES=OFF \
-DJAEGERTRACING_BUILD_CROSSDOCK=OFF \
-DJAEGERTRACING_COVERAGE=OFF \
-DJAEGERTRACING_PLUGIN=ON \
-DHUNTER_CONFIGURATION_TYPES=Release \
-DBUILD_SHARED_LIBS=OFF \
-DJAEGERTRACING_WITH_YAML_CPP=ON \
-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=true \
..
make
make install
export HUNTER_INSTALL_DIR=$(cat _3rdParty/Hunter/install-root-dir) \
mv libjaegertracing_plugin.so /usr/local/lib/libjaegertracing_plugin.so
# build zipkin lib
cd "$BUILD_PATH/zipkin-cpp-opentracing-$ZIPKIN_CPP_VERSION"
cat <<EOF > export.map
{
global:
OpenTracingMakeTracerFactory;
local: *;
};
EOF
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=OFF \
-DWITH_BOOST_STATIC=ON \
-DBUILD_PLUGIN=ON \
-DBUILD_TESTING=OFF \
-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=true \
..
make
make install
# build msgpack lib
cd "$BUILD_PATH/msgpack-c-cpp-$MSGPACK_VERSION"
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=OFF \
-DMSGPACK_BUILD_EXAMPLES=OFF \
-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=true \
..
make
make install
# build datadog lib
cd "$BUILD_PATH/dd-opentracing-cpp-$DATADOG_CPP_VERSION"
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_TESTING=OFF \
-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=true \
..
make
make install
# Get Brotli source and deps
cd "$BUILD_PATH"
git clone --depth=1 https://github.com/google/ngx_brotli.git
cd ngx_brotli
git submodule init
git submodule update
cd "$BUILD_PATH"
git clone --depth=1 https://github.com/ssdeep-project/ssdeep
cd ssdeep/
./bootstrap
./configure
make
make install
# build modsecurity library
cd "$BUILD_PATH"
git clone --depth=1 -b $MODSECURITY_LIB_VERSION https://github.com/SpiderLabs/ModSecurity
cd ModSecurity/
git submodule init
git submodule update
sh build.sh
./configure \
--disable-doxygen-doc \
--disable-doxygen-html \
--disable-examples
make
make install
mkdir -p /etc/nginx/modsecurity
cp modsecurity.conf-recommended /etc/nginx/modsecurity/modsecurity.conf
cp unicode.mapping /etc/nginx/modsecurity/unicode.mapping
# Replace serial logging with concurrent
sed -i 's|SecAuditLogType Serial|SecAuditLogType Concurrent|g' /etc/nginx/modsecurity/modsecurity.conf
# Concurrent logging implies the log is stored in several files
echo "SecAuditLogStorageDir /var/log/audit/" >> /etc/nginx/modsecurity/modsecurity.conf
# Download owasp modsecurity crs
cd /etc/nginx/
git clone -b $OWASP_MODSECURITY_CRS_VERSION https://github.com/coreruleset/coreruleset
mv coreruleset owasp-modsecurity-crs
cd owasp-modsecurity-crs
mv crs-setup.conf.example crs-setup.conf
mv rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf.example rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf
mv rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf.example rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf
cd ..
# OWASP CRS v3 rules
echo "
Include /etc/nginx/owasp-modsecurity-crs/crs-setup.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-901-INITIALIZATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-903.9001-DRUPAL-EXCLUSION-RULES.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-903.9002-WORDPRESS-EXCLUSION-RULES.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-905-COMMON-EXCEPTIONS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-910-IP-REPUTATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-911-METHOD-ENFORCEMENT.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-912-DOS-PROTECTION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-913-SCANNER-DETECTION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-920-PROTOCOL-ENFORCEMENT.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-921-PROTOCOL-ATTACK.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-930-APPLICATION-ATTACK-LFI.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-931-APPLICATION-ATTACK-RFI.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-932-APPLICATION-ATTACK-RCE.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-933-APPLICATION-ATTACK-PHP.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-941-APPLICATION-ATTACK-XSS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-942-APPLICATION-ATTACK-SQLI.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-949-BLOCKING-EVALUATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-950-DATA-LEAKAGES.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-951-DATA-LEAKAGES-SQL.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-952-DATA-LEAKAGES-JAVA.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-953-DATA-LEAKAGES-PHP.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-954-DATA-LEAKAGES-IIS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-959-BLOCKING-EVALUATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-980-CORRELATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf
" > /etc/nginx/owasp-modsecurity-crs/nginx-modsecurity.conf
# build nginx
cd "$BUILD_PATH/nginx-$NGINX_VERSION"
# apply nginx patches
for PATCH in `ls /patches`;do
echo "Patch: $PATCH"
patch -p1 < /patches/$PATCH
done
WITH_FLAGS="--with-debug \
--with-compat \
--with-pcre-jit \
--with-http_ssl_module \
--with-http_stub_status_module \
--with-http_realip_module \
--with-http_auth_request_module \
--with-http_addition_module \
--with-http_geoip_module \
--with-http_gzip_static_module \
--with-http_sub_module \
--with-http_v2_module \
--with-stream \
--with-stream_ssl_module \
--with-stream_realip_module \
--with-stream_ssl_preread_module \
--with-threads \
--with-http_secure_link_module \
--with-http_gunzip_module"
# "Combining -flto with -g is currently experimental and expected to produce unexpected results."
# https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
CC_OPT="-g -Og -fPIE -fstack-protector-strong \
-Wformat \
-Werror=format-security \
-Wno-deprecated-declarations \
-fno-strict-aliasing \
-D_FORTIFY_SOURCE=2 \
--param=ssp-buffer-size=4 \
-DTCP_FASTOPEN=23 \
-fPIC \
-I$HUNTER_INSTALL_DIR/include \
-Wno-cast-function-type"
LD_OPT="-fPIE -fPIC -pie -Wl,-z,relro -Wl,-z,now -L$HUNTER_INSTALL_DIR/lib"
if [[ ${ARCH} != "aarch64" ]]; then
WITH_FLAGS+=" --with-file-aio"
fi
if [[ ${ARCH} == "x86_64" ]]; then
CC_OPT+=' -m64 -mtune=native'
fi
WITH_MODULES=" \
--add-module=$BUILD_PATH/ngx_devel_kit-$NDK_VERSION \
--add-module=$BUILD_PATH/set-misc-nginx-module-$SETMISC_VERSION \
--add-module=$BUILD_PATH/headers-more-nginx-module-$MORE_HEADERS_VERSION \
--add-module=$BUILD_PATH/ngx_http_substitutions_filter_module-$NGINX_SUBSTITUTIONS \
--add-module=$BUILD_PATH/lua-nginx-module-$LUA_NGX_VERSION \
--add-module=$BUILD_PATH/stream-lua-nginx-module-$LUA_STREAM_NGX_VERSION \
--add-module=$BUILD_PATH/lua-upstream-nginx-module-$LUA_UPSTREAM_VERSION \
--add-module=$BUILD_PATH/nginx_ajp_module-${NGINX_AJP_VERSION} \
--add-dynamic-module=$BUILD_PATH/nginx-http-auth-digest-$NGINX_DIGEST_AUTH \
--add-dynamic-module=$BUILD_PATH/nginx-influxdb-module-$NGINX_INFLUXDB_VERSION \
--add-dynamic-module=$BUILD_PATH/nginx-opentracing-$NGINX_OPENTRACING_VERSION/opentracing \
--add-dynamic-module=$BUILD_PATH/ModSecurity-nginx-$MODSECURITY_VERSION \
--add-dynamic-module=$BUILD_PATH/ngx_http_geoip2_module-${GEOIP2_VERSION} \
--add-dynamic-module=$BUILD_PATH/ngx_brotli"
./configure \
--prefix=/usr/local/nginx \
--conf-path=/etc/nginx/nginx.conf \
--modules-path=/etc/nginx/modules \
--http-log-path=/var/log/nginx/access.log \
--error-log-path=/var/log/nginx/error.log \
--lock-path=/var/lock/nginx.lock \
--pid-path=/run/nginx.pid \
--http-client-body-temp-path=/var/lib/nginx/body \
--http-fastcgi-temp-path=/var/lib/nginx/fastcgi \
--http-proxy-temp-path=/var/lib/nginx/proxy \
--http-scgi-temp-path=/var/lib/nginx/scgi \
--http-uwsgi-temp-path=/var/lib/nginx/uwsgi \
${WITH_FLAGS} \
--without-mail_pop3_module \
--without-mail_smtp_module \
--without-mail_imap_module \
--without-http_uwsgi_module \
--without-http_scgi_module \
--with-cc-opt="${CC_OPT}" \
--with-ld-opt="${LD_OPT}" \
--user=www-data \
--group=www-data \
${WITH_MODULES}
make
make modules
make install
cd "$BUILD_PATH/lua-resty-core-$LUA_RESTY_CORE"
make install
cd "$BUILD_PATH/lua-resty-balancer-$LUA_RESTY_BALANCER"
make all
make install
export LUA_INCLUDE_DIR=/usr/local/include/luajit-2.1
ln -s $LUA_INCLUDE_DIR /usr/include/lua5.1
cd "$BUILD_PATH/lua-cjson-$LUA_CJSON_VERSION"
make all
make install
cd "$BUILD_PATH/lua-resty-cookie-$LUA_RESTY_COOKIE_VERSION"
make all
make install
cd "$BUILD_PATH/lua-resty-lrucache-$LUA_RESTY_CACHE"
make install
cd "$BUILD_PATH/lua-resty-dns-$LUA_RESTY_DNS"
make install
cd "$BUILD_PATH/lua-resty-lock-$LUA_RESTY_LOCK"
make install
# required for OCSP verification
cd "$BUILD_PATH/lua-resty-http-$LUA_RESTY_HTTP"
make install
cd "$BUILD_PATH/lua-resty-upload-$LUA_RESTY_UPLOAD_VERSION"
make install
cd "$BUILD_PATH/lua-resty-string-$LUA_RESTY_STRING_VERSION"
make install
# build Lua bridge tracer
#cd "$BUILD_PATH/lua-bridge-tracer-$LUA_BRIDGE_TRACER_VERSION"
#mkdir .build
#cd .build
#
#cmake -DCMAKE_BUILD_TYPE=Release \
# -DBUILD_SHARED=OFF \
# -WITH_BOOST_STATIC=ON \
# ..
#
#make
#make install
# mimalloc
cd "$BUILD_PATH"
git clone --depth=1 -b v1.6.4 https://github.com/microsoft/mimalloc
cd mimalloc
mkdir -p out/release
cd out/release
cmake ../..
make
make install
# check libraries are ok
#echo "Checking libraries..."
#for LIB in $(find /usr/local/lib -name "*.so");do
# ldd $LIB | grep 'not found'
# if [ $? -eq 0 ]; then
# echo "Dependencies is missing for $LIB"
# exit 1
# fi
#done
# update image permissions
writeDirs=( \
/etc/nginx \
/usr/local/nginx \
/opt/modsecurity/var/log \
/opt/modsecurity/var/upload \
/opt/modsecurity/var/audit \
/var/log/audit \
/var/log/nginx \
);
addgroup -Sg 101 www-data
adduser -S -D -H -u 101 -h /usr/local/nginx -s /sbin/nologin -G www-data -g www-data www-data
for dir in "${writeDirs[@]}"; do
mkdir -p ${dir};
chown -R www-data.www-data ${dir};
done
rm -rf /etc/nginx/owasp-modsecurity-crs/.git
rm -rf /etc/nginx/owasp-modsecurity-crs/util/regression-tests
# remove .a files
find /usr/local -name "*.a" -print | xargs /bin/rm
|
<reponame>nimoqqq/roses
/*
* Copyright [2020-2030] [https://www.stylefeng.cn]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
*
* 1.请不要删除和修改根目录下的LICENSE文件。
* 2.请不要删除和修改Guns源码头部的版权声明。
* 3.请保留源码和相关描述文件的项目出处,作者声明等。
* 4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns
* 5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns
* 6.若您的项目无法满足以上几点,可申请商业授权
*/
package cn.stylefeng.roses.kernel.file.modular.controller;
import cn.stylefeng.roses.kernel.db.api.pojo.page.PageResult;
import cn.stylefeng.roses.kernel.file.api.constants.FileConstants;
import cn.stylefeng.roses.kernel.file.api.pojo.request.SysFileInfoRequest;
import cn.stylefeng.roses.kernel.file.api.pojo.response.SysFileInfoListResponse;
import cn.stylefeng.roses.kernel.file.api.pojo.response.SysFileInfoResponse;
import cn.stylefeng.roses.kernel.file.modular.entity.SysFileInfo;
import cn.stylefeng.roses.kernel.file.modular.service.SysFileInfoService;
import cn.stylefeng.roses.kernel.rule.annotation.BusinessLog;
import cn.stylefeng.roses.kernel.rule.enums.YesOrNotEnum;
import cn.stylefeng.roses.kernel.rule.pojo.response.ResponseData;
import cn.stylefeng.roses.kernel.rule.pojo.response.SuccessResponseData;
import cn.stylefeng.roses.kernel.rule.util.HttpServletUtil;
import cn.stylefeng.roses.kernel.scanner.api.annotation.ApiResource;
import cn.stylefeng.roses.kernel.scanner.api.annotation.GetResource;
import cn.stylefeng.roses.kernel.scanner.api.annotation.PostResource;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletResponse;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 文件信息管理
* <p>
* 该模块简要说明:
* 1.文件支持版本朔源,每次操作均会产生数据
* 2.文件支持版本回滚,升级后可选择某一版本进行回退
* <p>
* 文件管理接口有两种使用方式:
* 1.合同文件场景:文件必须保持原样,合同内容升级不影响已签署合同,业务需要关联文件ID<br>
* 文件升级不会对之前的数据造成影响
* 2.UI文件场景:文件升级后业务所有关联的文件全部升级,业务需要关联文件CODE<br>
*
* @author majianguo
* @date 2020/12/27 13:39
*/
@RestController
@ApiResource(name = "文件信息相关接口")
public class SysFileInfoController {
@Resource
private SysFileInfoService sysFileInfoService;
/**
* 上传文件
* <p>
* 支持上传到数据库,参数fileLocation传递5即可
* <p>
* fileLocation传递其他值或不传值,不能决定文件上传到本地还是阿里云或其他地方
*
* @author majianguo
* @date 2020/12/27 13:17
*/
@PostResource(name = "上传文件", path = "/sysFileInfo/upload", requiredPermission = false)
public ResponseData<SysFileInfoResponse> upload(@RequestPart("file") MultipartFile file, @Validated(SysFileInfoRequest.add.class) SysFileInfoRequest sysFileInfoRequest) {
SysFileInfoResponse fileUploadInfoResult = this.sysFileInfoService.uploadFile(file, sysFileInfoRequest);
return new SuccessResponseData<>(fileUploadInfoResult);
}
/**
* 富文本tinymce上传文件
* 需要返回格式
* //json格式
* { "location": "folder/sub-folder/new-location.png" }
*
* @author liuhanqing
* @date 2021/1/17 11:17
*/
@PostResource(name = "上传文件", path = "/sysFileInfo/tinymceUpload", requiredPermission = false)
public Map<String, String> tinymceUpload(@RequestPart("file") MultipartFile file, SysFileInfoRequest sysFileInfoRequest) {
Map<String, String> resultMap = new HashMap<>(1);
sysFileInfoRequest.setSecretFlag(YesOrNotEnum.N.getCode());
SysFileInfoResponse fileUploadInfoResult = this.sysFileInfoService.uploadFile(file, sysFileInfoRequest);
resultMap.put("location", FileConstants.FILE_PUBLIC_PREVIEW_URL + "?fileId=" + fileUploadInfoResult.getFileId());
return resultMap;
}
/**
* 私有文件预览
*
* @author fengshuonan
* @date 2020/11/29 11:29
*/
@GetResource(name = "私有文件预览", path = FileConstants.FILE_PRIVATE_PREVIEW_URL, requiredPermission = false)
public void privatePreview(@Validated(SysFileInfoRequest.detail.class) SysFileInfoRequest sysFileInfoRequest) {
HttpServletResponse response = HttpServletUtil.getResponse();
sysFileInfoRequest.setSecretFlag(YesOrNotEnum.Y.getCode());
this.sysFileInfoService.preview(sysFileInfoRequest, response);
}
/**
* 公有文件预览
*
* @author majianguo
* @date 2020/12/27 13:17
*/
@GetResource(name = "公有文件预览", path = FileConstants.FILE_PUBLIC_PREVIEW_URL, requiredPermission = false, requiredLogin = false)
public void publicPreview(@Validated(SysFileInfoRequest.detail.class) SysFileInfoRequest sysFileInfoRequest) {
HttpServletResponse response = HttpServletUtil.getResponse();
sysFileInfoRequest.setSecretFlag(YesOrNotEnum.N.getCode());
this.sysFileInfoService.preview(sysFileInfoRequest, response);
}
/**
* 通用文件预览,通过传bucket名称和object名称
*
* @author fengshuonan
* @date 2020/11/29 11:29
*/
@GetResource(name = "文件预览,通过bucketName和objectName", path = FileConstants.FILE_PREVIEW_BY_OBJECT_NAME, requiredPermission = false, requiredLogin = false)
public void previewByBucketNameObjectName(@Validated(SysFileInfoRequest.previewByObjectName.class) SysFileInfoRequest sysFileInfoRequest) {
HttpServletResponse response = HttpServletUtil.getResponse();
sysFileInfoService.previewByBucketAndObjName(sysFileInfoRequest, response);
}
/**
* 私有文件下载
*
* @author majianguo
* @date 2020/12/27 13:17
*/
@GetResource(name = "私有文件下载", path = "/sysFileInfo/privateDownload", requiredPermission = false)
public void privateDownload(@Validated(SysFileInfoRequest.detail.class) SysFileInfoRequest sysFileInfoRequest) {
HttpServletResponse response = HttpServletUtil.getResponse();
sysFileInfoRequest.setSecretFlag(YesOrNotEnum.Y.getCode());
this.sysFileInfoService.download(sysFileInfoRequest, response);
}
/**
* 公有文件下载
*
* @author majianguo
* @date 2020/12/27 13:17
*/
@GetResource(name = "公有文件下载", path = "/sysFileInfo/publicDownload", requiredLogin = false, requiredPermission = false)
public void publicDownload(@Validated(SysFileInfoRequest.detail.class) SysFileInfoRequest sysFileInfoRequest) {
HttpServletResponse response = HttpServletUtil.getResponse();
sysFileInfoRequest.setSecretFlag(YesOrNotEnum.N.getCode());
this.sysFileInfoService.download(sysFileInfoRequest, response);
}
/**
* 替换文件
* <p>
* 注意:调用本接口之后还需要调用确认接口,本次替换操作才会生效
*
* @author majianguo
* @date 2020/12/16 15:34
*/
@PostResource(name = "替换文件", path = "/sysFileInfo/update", requiredPermission = false)
public ResponseData<SysFileInfoResponse> update(@RequestPart("file") MultipartFile file, @Validated(SysFileInfoRequest.edit.class) SysFileInfoRequest sysFileInfoRequest) {
SysFileInfoResponse fileUploadInfoResult = this.sysFileInfoService.updateFile(file, sysFileInfoRequest);
return new SuccessResponseData<>(fileUploadInfoResult);
}
/**
* 版本回退
*
* @author majianguo
* @date 2020/12/16 15:34
*/
@PostResource(name = "版本回退", path = "/sysFileInfo/versionBack", requiredPermission = false)
public ResponseData<SysFileInfoResponse> versionBack(@Validated(SysFileInfoRequest.versionBack.class) SysFileInfoRequest sysFileInfoRequest) {
SysFileInfoResponse fileUploadInfoResult = this.sysFileInfoService.versionBack(sysFileInfoRequest);
return new SuccessResponseData<>(fileUploadInfoResult);
}
/**
* 根据附件IDS查询附件信息
*
* @param fileIds 附件IDS
* @return 附件返回类
* @author majianguo
* @date 2020/12/27 13:17
*/
@GetResource(name = "根据附件IDS查询附件信息", path = "/sysFileInfo/getFileInfoListByFileIds", requiredPermission = false)
public ResponseData<List<SysFileInfoResponse>> getFileInfoListByFileIds(@RequestParam(value = "fileIds") String fileIds) {
List<SysFileInfoResponse> list = this.sysFileInfoService.getFileInfoListByFileIds(fileIds);
return new SuccessResponseData<>(list);
}
/**
* 公有打包下载文件
*
* @author majianguo
* @date 2020/12/27 13:17
*/
@GetResource(name = "公有打包下载文件", path = "/sysFileInfo/publicPackagingDownload", requiredPermission = false, requiredLogin = false)
public void publicPackagingDownload(@RequestParam(value = "fileIds") String fileIds) {
HttpServletResponse response = HttpServletUtil.getResponse();
this.sysFileInfoService.packagingDownload(fileIds, YesOrNotEnum.N.getCode(), response);
}
/**
* 私有打包下载文件
*
* @author majianguo
* @date 2020/12/27 13:18
*/
@GetResource(name = "私有打包下载文件", path = "/sysFileInfo/privatePackagingDownload", requiredPermission = false)
public void privatePackagingDownload(@RequestParam(value = "fileIds") String fileIds) {
HttpServletResponse response = HttpServletUtil.getResponse();
this.sysFileInfoService.packagingDownload(fileIds, YesOrNotEnum.Y.getCode(), response);
}
/**
* 删除文件信息(真删除文件信息)
*
* @author fengshuonan
* @date 2020/11/29 11:19
*/
@PostResource(name = "删除文件信息(真删除文件信息)", path = "/sysFileInfo/deleteReally", requiredPermission = false)
@BusinessLog
public ResponseData<?> deleteReally(@RequestBody @Validated(SysFileInfoRequest.delete.class) SysFileInfoRequest sysFileInfoRequest) {
this.sysFileInfoService.deleteReally(sysFileInfoRequest);
return new SuccessResponseData<>();
}
/**
* 分页查询文件信息表
*
* @author fengshuonan
* @date 2020/11/29 11:29
*/
@GetResource(name = "分页查询文件信息表", path = "/sysFileInfo/fileInfoListPage", requiredPermission = false)
public ResponseData<PageResult<SysFileInfoListResponse>> fileInfoListPage(SysFileInfoRequest sysFileInfoRequest) {
return new SuccessResponseData<>(this.sysFileInfoService.fileInfoListPage(sysFileInfoRequest));
}
/**
* 查看详情文件信息表
*
* @author fengshuonan
* @date 2020/11/29 11:29
*/
@GetResource(name = "查看详情文件信息表", path = "/sysFileInfo/detail", requiredPermission = false)
public ResponseData<SysFileInfo> detail(@Validated(SysFileInfoRequest.detail.class) SysFileInfoRequest sysFileInfoRequest) {
return new SuccessResponseData<>(sysFileInfoService.detail(sysFileInfoRequest));
}
}
|
#!/bin/bash
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -o errexit
source $GRENADE_DIR/grenaderc
source $GRENADE_DIR/functions
source $TOP_DIR/openrc admin admin
source $TOP_DIR/inc/ini-config
set -o xtrace
BARBICAN_USER=barbican_grenade
BARBICAN_PROJECT=barbican_grenade
BARBICAN_PASS=pass
function _barbican_set_user {
OS_TENANT_NAME=$BARBICAN_PROJECT
OS_PROJECT_NAME=$BARBICAN_PROJECT
OS_USERNAME=$BARBICAN_USER
OS_PASSWORD=$BARBICAN_PASS
}
function create {
# create a tenant for the server
eval $(openstack project create -f shell -c id $BARBICAN_PROJECT)
if [[ -z "$id" ]]; then
die $LINENO "Didn't create $BARBICAN_PROJECT project"
fi
resource_save barbican project_id $id
local project_id=$id
# create the user, and set $id locally
eval $(openstack user create $BARBICAN_USER \
--project $id \
--password $BARBICAN_PASS \
-f shell -c id)
if [[ -z "$id" ]]; then
die $LINENO "Didn't create $BARBICAN_USER user"
fi
resource_save barbican user_id $id
openstack role add admin --user $id --project $project_id
_barbican_set_user
local secret_name=test_secret
local secret_data=this_is_a_secret_data
openstack secret store -p $secret_data -n $secret_name
secret_link=$(openstack secret list | awk '/ test_secret / {print $2}')
resource_save barbican secret_link $secret_link
}
function verify {
_barbican_set_user
secret_link=$(resource_get barbican secret_link)
openstack secret get $secret_link
}
function verify_noapi {
:
}
function destroy {
_barbican_set_user
set +o errexit
openstack secret delete $(resource_get barbican secret_link)
local user_id=$(resource_get barbican user_id)
local project_id=$(resource_get barbican project_id)
source $TOP_DIR/openrc admin admin
openstack user delete $user_id
openstack project delete $project_id
}
# Dispatcher
case $1 in
"create")
create
;;
"verify")
verify
;;
"verify_noapi")
verify_noapi
;;
"destroy")
destroy
;;
esac
|
#!/bin/sh
cd "$(dirname "$0")"/../..
python ./scripts/cleanup_datasets/cleanup_datasets.py -d 60 -2 -r "$@" >> ./scripts/cleanup_datasets/purge_histories.log
|
<filename>raw/local_Holiday_maintenance.sql<gh_stars>0
SELECT add_months('20/04/2021',96) from dual
SELECT * FROM sttm_lcl_hol_master hm WHERE hm.year = 2029 FOR UPDATE NOWAIT;
insert into sttm_lcl_holiday
SELECT * FROM sttm_lcl_holiday lh WHERE lh.year =2029
|
<filename>data-mongodb/src/test/java/io/micronaut/data/document/mongodb/repositories/MongoZoneRepository.java<gh_stars>100-1000
package io.micronaut.data.document.mongodb.repositories;
import io.micronaut.data.mongodb.annotation.MongoRepository;
import io.micronaut.data.document.tck.repositories.ZoneRepository;
@MongoRepository
public interface MongoZoneRepository extends ZoneRepository {
}
|
import pandas as pd
from sklearn.model_selection import train_test_split
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.models import Sequential
from keras.layers import Embedding, Dense, Flatten, Conv1D, MaxPooling1D, Dropout
# load data
df = pd.read_csv('emails.csv')
# tokenize text
tokenizer = Tokenizer(num_words=1000)
tokenizer.fit_on_texts(df['text'])
tokenized_sequences = tokenizer.texts_to_sequences(df['text'])
# pad sequences
padded_sequences = pad_sequences(tokenized_sequences)
# split into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(padded_sequences, df['is_spam'], test_size=0.2, random_state=42)
# create model
model = Sequential()
model.add(Embedding(input_dim=1000, output_dim=50, input_length=20))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(2, activation='softmax'))
# compile model
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# train model
model.fit(X_train, y_train, epochs=50, batch_size=32, verbose=1, validation_data=(X_test, y_test))
|
<reponame>arcadium-dev/core
// Copyright 2021 arcadium.dev <<EMAIL>>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package server
// mocklog "arcadium.dev/core/log/mock"
/*
func TestNewServerConfigSuccess(t *testing.T) {
t.Parallel()
ctrl := gomock.NewController(t)
mockLoggerConfig := mocklog.NewMockConfig(ctrl)
mockDBConfig := mocksql.NewMockConfig(ctrl)
mockGRPCServerConfig := mockgrpc.NewMockConfig(ctrl)
mockHTTPServerConfig := mockhttp.NewMockConfig(ctrl)
ctors := ConfigConstructors{
NewLoggerConfig: func(opts ...config.Option) (log.Config, error) {
return mockLoggerConfig, nil
},
NewDBConfig: func(opts ...config.Option) (sql.Config, error) {
return mockDBConfig, nil
},
NewGRPCServerConfig: func(opts ...config.Option) (grpc.Config, error) {
return mockGRPCServerConfig, nil
},
NewHTTPServerConfig: func(opts ...config.Option) (http.Config, error) {
return mockHTTPServerConfig, nil
},
}
cfg, err := NewConfig(ctors)
if cfg == nil || err != nil {
t.Errorf("unexpected failure: %+v %+v", cfg, err)
}
if cfg.Logger() != mockLoggerConfig {
t.Errorf("unexpected failure: %+v", cfg.Logger())
}
if cfg.DB() != mockDBConfig {
t.Errorf("unexpected failure: %+v", cfg.DB())
}
if cfg.GRPCServer() != mockGRPCServerConfig {
t.Errorf("unexpected failure: %+v", cfg.GRPCServer())
}
if cfg.HTTPServer() != mockHTTPServerConfig {
t.Errorf("unexpected failure: %+v", cfg.HTTPServer())
}
}
func createConfig(t *testing.T, cfgFuncs ...interface{}) (*Config, error) {
t.Helper()
ctor := ConfigConstructors{
NewLoggerConfig: func(opts ...config.Option) (log.Config, error) { return nil, nil },
NewDBConfig: func(opts ...config.Option) (sql.Config, error) { return nil, nil },
NewGRPCServerConfig: func(opts ...config.Option) (grpc.Config, error) { return nil, nil },
NewHTTPServerConfig: func(opts ...config.Option) (http.Config, error) { return nil, nil },
}
for _, cfgFunc := range cfgFuncs {
switch f := cfgFunc.(type) {
case func(opts ...config.Option) (log.Config, error):
ctor.NewLoggerConfig = f
case func(opts ...config.Option) (sql.Config, error):
ctor.NewDBConfig = f
case func(opts ...config.Option) (grpc.Config, error):
ctor.NewGRPCServerConfig = f
case func(opts ...config.Option) (http.Config, error):
ctor.NewHTTPServerConfig = f
default:
t.Errorf("Unexpected cfgCtor")
}
}
return NewConfig(ctor)
}
func TestNewServerConfigFailures(t *testing.T) {
t.Parallel()
t.Run("Logger failure", func(t *testing.T) {
t.Parallel()
expectedErr := "NewLogger failure"
cfg, err := createConfig(t, func(opts ...config.Option) (log.Config, error) { return nil, errors.New(expectedErr) })
if cfg != nil || err.Error() != expectedErr {
t.Errorf("Error expected: %+v %+v", cfg, err)
}
})
t.Run("DB failure", func(t *testing.T) {
t.Parallel()
expectedErr := "NewDB failure"
cfg, err := createConfig(t, func(opts ...config.Option) (sql.Config, error) { return nil, errors.New(expectedErr) })
if cfg != nil || err.Error() != expectedErr {
t.Errorf("Error expected: %+v %+v", cfg, err)
}
})
t.Run("DB failure", func(t *testing.T) {
t.Parallel()
expectedErr := "NewGRPCServer failure"
cfg, err := createConfig(t, func(opts ...config.Option) (grpc.Config, error) { return nil, errors.New(expectedErr) })
if cfg != nil || err.Error() != expectedErr {
t.Errorf("Error expected: %+v %+v", cfg, err)
}
})
t.Run("DB failure", func(t *testing.T) {
t.Parallel()
expectedErr := "NewHTTPServer failure"
cfg, err := createConfig(t, func(opts ...config.Option) (http.Config, error) { return nil, errors.New(expectedErr) })
if cfg != nil || err.Error() != expectedErr {
t.Errorf("Error expected: %+v %+v", cfg, err)
}
})
}
*/
|
package com.grasea.grandroid.ble.data;
import android.bluetooth.BluetoothGattCharacteristic;
import com.grasea.grandroid.ble.BluetoothLeService;
/**
* Created by <NAME> on 2016/5/13.
*/
public interface CharacteristicHandler {
boolean startListenBleData();
boolean stopListenBleData();
boolean isSendChannel();
boolean isReadChannel();
boolean isNotificationChannel();
BluetoothGattCharacteristic getGattCharacteristic();
boolean send(String protocol);
boolean send(byte[] protocol);
boolean send(byte[] protocol, int delay);
boolean readRssi();
}
|
import_configuration(){
if [ ! -f /opt/softwareag/common/conf/.persistent/initialized ] && [ -f /tmp/config.xml ]
then
echo " importing configuration"
until /opt/softwareag/UniversalMessaging/tools/runner/runUMTool.sh ImportRealmXML -rname=nsp://localhost:9000 -filename=/tmp/config.xml -importall=true; do sleep 10; done
touch /opt/softwareag/common/conf/.persistent/initialized
fi
}
start_UM(){
/bin/sh -c umstart.sh&
}
copy_files(){
if [ -f "/tmp/filesLocations" ]
then
cat /tmp/filesLocations | while read line || [ -n "$line" ]; do echo $line; sh -c "cp /tmp/$line|| true" ; done
fi
}
copy_external_files(){
if [ -f "/tmp/externalFilesLocations" ]
then
cat /tmp/externalFilesLocations | while read line || [ -n "$line" ]; do echo $line; sh -c "cp /tmp/$line|| true" ; done
fi
}
create_persistent_folder(){
if [ ! -d /opt/softwareag/common/conf/.persistent ]
then
mkdir -p /opt/softwareag/common/conf/.persistent
fi
}
link_license(){
if [ -f /opt/softwareag/common/conf/.persistent/licence.xml ]
then
rm -f /opt/softwareag/UniversalMessaging/server/umserver/licence/licence.xml
ln -s /opt/softwareag/common/conf/.persistent/licence.xml /opt/softwareag/UniversalMessaging/server/umserver/licence/licence.xml
fi
}
create_persistent_folder
copy_files
copy_external_files
link_license
cd /
start_UM
PID_TO_WAIT=$!
# put custom code that should be executed after the UM has been started here
import_configuration
wait $PID_TO_WAIT
|
#!/bin/bash
set +e
SDIR=$(dirname "$0")
source $SDIR/env.sh
sleep 2
mkdir -p organizations/ordererOrganizations/$DOMAIN
export FABRIC_CA_CLIENT_HOME=/organizations/ordererOrganizations/$DOMAIN
for ORG in $ORDERER_ORGS; do
initOrgVars $ORG
set -x
fabric-ca-client enroll -u https://$CA_ADMIN_USER_PASS@$CA_NAME:10054 --caname $CA_NAME --tls.certfiles /organizations/fabric-ca/${ORG}RCA/tls-cert.pem
{ set +x; } 2>/dev/null
echo 'NodeOUs:
Enable: true
ClientOUIdentifier:
Certificate: cacerts/rca-'$ORG'-10054-rca-'$ORG'.pem
OrganizationalUnitIdentifier: client
PeerOUIdentifier:
Certificate: cacerts/rca-'$ORG'-10054-rca-'$ORG'.pem
OrganizationalUnitIdentifier: peer
AdminOUIdentifier:
Certificate: cacerts/rca-'$ORG'-10054-rca-'$ORG'.pem
OrganizationalUnitIdentifier: admin
OrdererOUIdentifier:
Certificate: cacerts/rca-'$ORG'-10054-rca-'$ORG'.pem
OrganizationalUnitIdentifier: orderer' > /organizations/ordererOrganizations/$DOMAIN/msp/config.yaml
done
for ORG in $ORDERER_ORGS; do
initOrgVars $ORG
COUNT=1
COUNTER=-1
while [[ "$COUNT" -le $NUM_ORDERERS ]]; do
initOrdererVars $ORG $COUNT
ordererarr=($EXTERNAL_ORDERER_ADDRESSES)
if [ $COUNT -gt 1 ]; then
IFS=':' read -a arr <<< "${ordererarr[$COUNTER]}"
CAHOST=${arr[0]}
else
CAHOST=$ORDERER_NAME.$DOMAIN
fi
log "Registering $ORDERER_NAME with $CA_NAME"
set +x
fabric-ca-client register --caname $CA_NAME --id.name $ORDERER_NAME --id.secret $ORDERER_PASS --id.type orderer --tls.certfiles /organizations/fabric-ca/${ORG}RCA/tls-cert.pem
{ set +x; } 2>/dev/null
if [ $COUNT -eq 1 ]; then
echo "Register the $ORDERER_NAME admin"
fabric-ca-client register --caname $CA_NAME --id.name $ADMIN_NAME --id.secret $ADMIN_PASS --id.type admin --tls.certfiles /organizations/fabric-ca/${ORG}RCA/tls-cert.pem
mkdir -p organizations/ordererOrganizations/$DOMAIN/orderers
mkdir -p organizations/ordererOrganizations/$DOMAIN/users
mkdir -p organizations/ordererOrganizations/$DOMAIN/users/Admin@$DOMAIN
fi
mkdir -p organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN
echo $CAHOST
echo "Generate the $ORDERER_NAME msp"
fabric-ca-client enroll -u https://$ORDERER_NAME_PASS@$CA_NAME:10054 --caname $CA_NAME -M /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/msp --csr.hosts $CAHOST --csr.hosts localhost --csr.hosts $CA_NAME --csr.hosts $ORDERER_NAME --tls.certfiles /organizations/fabric-ca/${ORG}RCA/tls-cert.pem
cp /organizations/ordererOrganizations/$DOMAIN/msp/config.yaml /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/msp/config.yaml
echo "Generate the $ORDERER_NAME-tls certificates"
fabric-ca-client enroll -u https://$ORDERER_NAME_PASS@$CA_NAME:10054 --caname $CA_NAME -M /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/tls --enrollment.profile tls --csr.hosts $CAHOST --csr.hosts localhost --csr.hosts $CA_NAME --csr.hosts $ORDERER_NAME --tls.certfiles /organizations/fabric-ca/${ORG}RCA/tls-cert.pem
cp /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/tls/tlscacerts/* /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/tls/ca.crt
cp /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/tls/signcerts/* /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/tls/server.crt
cp /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/tls/keystore/* /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/tls/server.key
mkdir -p /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/msp/tlscacerts
cp /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/tls/tlscacerts/* /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/msp/tlscacerts/tlsca.$DOMAIN-cert.pem
mkdir -p /organizations/ordererOrganizations/$DOMAIN/msp/tlscacerts
cp /organizations/ordererOrganizations/$DOMAIN/orderers/$ORDERER_NAME.$DOMAIN/tls/tlscacerts/* /organizations/ordererOrganizations/$DOMAIN/msp/tlscacerts/tlsca.$DOMAIN-cert.pem
COUNT=$((COUNT+1))
COUNTER=$((COUNTER+2))
done
echo "Generate the admin msp"
set -x
fabric-ca-client enroll -u https://$ADMIN_NAME:$ADMIN_PASS@$CA_NAME:10054 --caname $CA_NAME -M /organizations/ordererOrganizations/$DOMAIN/users/Admin@$DOMAIN/msp --tls.certfiles /organizations/fabric-ca/${ORG}RCA/tls-cert.pem
{ set +x; } 2>/dev/null
cp /organizations/ordererOrganizations/$DOMAIN/msp/config.yaml /organizations/ordererOrganizations/$DOMAIN/users/Admin@$DOMAIN/msp/config.yaml
done
|
<filename>pkg/labels/mocklabels/mock.go
package mocklabels
import "github.com/caos/orbos/pkg/labels"
var (
productKey = "app.kubernetes.io/part-of"
productVal = "MOCKING"
operatorKey = "app.kubernetes.io/managed-by"
operatorVal = "test.caos.ch"
operatorVersionKey = "app.kubernetes.io/version"
operatorVersionVal = "v987.654.3210"
apiKindKey = "caos.ch/kind"
apiKindVal = "MockedLabels"
apiVersionKey = "caos.ch/apiversion"
apiVersionVal = "v9876"
componentKey = "app.kubernetes.io/component"
componentVal = "mocked-component"
nameKey = "app.kubernetes.io/name"
NameVal = "mocked-name"
selectableKey = "orbos.ch/selectable"
selectableVal = "yes"
Operator = labels.MustForOperator(productVal, operatorVal, operatorVersionVal)
Api = labels.MustForAPI(Operator, apiKindVal, apiVersionVal)
Component = labels.MustForComponent(Api, componentVal)
Name = labels.MustForName(Component, NameVal)
ClosedNameSelector = labels.DeriveNameSelector(Name, false)
Selectable = labels.AsSelectable(Name)
NameMap = map[string]string{
nameKey: NameVal,
componentKey: componentVal,
apiKindKey: apiKindVal,
apiVersionKey: apiVersionVal,
operatorKey: operatorVal,
operatorVersionKey: operatorVersionVal,
productKey: productVal,
}
ClosedNameSelectorMap = map[string]string{
selectableKey: selectableVal,
componentKey: componentVal,
nameKey: NameVal,
operatorKey: operatorVal,
productKey: productVal,
}
SelectableMap = map[string]string{
nameKey: NameVal,
componentKey: componentVal,
apiKindKey: apiKindVal,
apiVersionKey: apiVersionVal,
operatorKey: operatorVal,
operatorVersionKey: operatorVersionVal,
productKey: productVal,
selectableKey: selectableVal,
}
)
|
def multiply_by_index(numbers):
for i in range(len(numbers)):
numbers[i] *= i
return numbers
|
<gh_stars>1-10
package mongoid
import (
"math/cmplx"
"reflect"
. "github.com/onsi/ginkgo"
// . "github.com/onsi/ginkgo/extensions/table"
. "github.com/onsi/gomega"
"go.mongodb.org/mongo-driver/bson"
)
var _ = Describe("structValuesFromBsonM()", func() {
Context("updating types of struct field values by name", func() {
test := func(structPtr interface{}, fieldPtr interface{}, exBson bson.M, bsonFieldName string) {
fieldValue := reflect.Indirect(reflect.ValueOf(fieldPtr))
_, ok := exBson[bsonFieldName]
ExpectWithOffset(1, ok).To(Equal(true), "given bsonFieldName should be a valid key to the target value, so the test can validate successful assignment")
ExpectWithOffset(1, fieldValue.Interface()).ToNot(Equal(exBson[bsonFieldName]), "initial struct field value should not already equal the target value of the test")
structValuesFromBsonM(structPtr, exBson)
ExpectWithOffset(1, fieldValue.Interface()).To(Equal(exBson[bsonFieldName]), "struct field value should equal the target value after assignment")
}
It("bool field", func() {
boolFieldEx := struct{ BoolField bool }{true}
test(&boolFieldEx, &boolFieldEx.BoolField, bson.M{"bool_field": false}, "bool_field")
})
It("int field", func() {
intFieldEx := struct{ IntField int }{7}
test(&intFieldEx, &intFieldEx.IntField, bson.M{"int_field": 42}, "int_field")
})
It("int8 field", func() {
tinyIntFieldEx := struct{ TinyIntField int8 }{7}
test(&tinyIntFieldEx, &tinyIntFieldEx.TinyIntField, bson.M{"tiny_int_field": int8(127)}, "tiny_int_field")
})
It("int16 field", func() {
smallIntFieldEx := struct{ SmallIntField int16 }{7}
test(&smallIntFieldEx, &smallIntFieldEx.SmallIntField, bson.M{"small_int_field": int16(32767)}, "small_int_field")
})
It("int32 field", func() {
explicitIntFieldEx := struct{ AnIntField int32 }{7}
test(&explicitIntFieldEx, &explicitIntFieldEx.AnIntField, bson.M{"an_int_field": int32(2147483647)}, "an_int_field")
})
It("int64 field", func() {
bigIntFieldEx := struct{ BigIntField int64 }{7}
test(&bigIntFieldEx, &bigIntFieldEx.BigIntField, bson.M{"big_int_field": int64(4294967296)}, "big_int_field")
})
It("uint field", func() {
uintFieldEx := struct{ UIntField uint }{7}
test(&uintFieldEx, &uintFieldEx.UIntField, bson.M{"u_int_field": uint(42)}, "u_int_field")
})
It("uint8 field", func() {
tinyUIntFieldEx := struct{ TinyUIntField uint8 }{7}
test(&tinyUIntFieldEx, &tinyUIntFieldEx.TinyUIntField, bson.M{"tiny_u_int_field": uint8(255)}, "tiny_u_int_field")
})
It("uint16 field", func() {
smallUIntFieldEx := struct{ SmallUIntField uint16 }{7}
test(&smallUIntFieldEx, &smallUIntFieldEx.SmallUIntField, bson.M{"small_u_int_field": uint16(65535)}, "small_u_int_field")
})
It("uint32 field", func() {
explicitUIntFieldEx := struct{ AnUIntField uint32 }{7}
test(&explicitUIntFieldEx, &explicitUIntFieldEx.AnUIntField, bson.M{"an_u_int_field": uint32(4294967295)}, "an_u_int_field")
})
It("uint64 field", func() {
bigUIntFieldEx := struct{ BigUIntField uint64 }{7}
test(&bigUIntFieldEx, &bigUIntFieldEx.BigUIntField, bson.M{"big_u_int_field": uint64(18446744073709551615)}, "big_u_int_field")
})
It("float32 field", func() {
float32FieldEx := struct{ FloatField float32 }{0.0}
test(&float32FieldEx, &float32FieldEx.FloatField, bson.M{"float_field": float32(99.99)}, "float_field")
})
It("float64 field", func() {
float64FieldEx := struct{ Float64Field float64 }{0.0}
test(&float64FieldEx, &float64FieldEx.Float64Field, bson.M{"float_64_field": float64(-99.99)}, "float_64_field")
})
It("complex64 field", func() {
complex64FieldEx := struct{ Complex64Field complex64 }{0.0}
test(&complex64FieldEx, &complex64FieldEx.Complex64Field, bson.M{"complex_64_field": complex64(-99.99)}, "complex_64_field")
})
It("complex128 field", func() {
complex128FieldEx := struct{ Complex128Field complex128 }{0.0}
test(&complex128FieldEx, &complex128FieldEx.Complex128Field, bson.M{"complex_128_field": cmplx.Sqrt(-1.0)}, "complex_128_field")
})
It("string field", func() {
strFieldEx := struct{ StrField string }{"example"}
test(&strFieldEx, &strFieldEx.StrField, bson.M{"str_field": "forty two"}, "str_field")
})
It("array field", func() {
By("single type bson.A")
arrayFieldEx := struct{ ArrayField bson.A }{ArrayField: bson.A{"array", "example"}}
test(&arrayFieldEx, &arrayFieldEx.ArrayField, bson.M{"array_field": bson.A{"bar", "world", "3.14159"}}, "array_field")
By("mixed type bson.A")
test(&arrayFieldEx, &arrayFieldEx.ArrayField, bson.M{"array_field": bson.A{"bar", "world", 3.14159}}, "array_field")
By("native []string")
stringArrayFieldEx := struct{ StringArrayField []string }{StringArrayField: []string{"array", "example"}}
test(&stringArrayFieldEx, &stringArrayFieldEx.StringArrayField, bson.M{"string_array_field": []string{"play", "nice"}}, "string_array_field")
By("native []int")
intArrayFieldEx := struct{ IntArrayField []int }{IntArrayField: []int{1, 2, 3}}
test(&intArrayFieldEx, &intArrayFieldEx.IntArrayField, bson.M{"int_array_field": []int{42}}, "int_array_field")
})
It("map field", func() {
By("bson.M with string values")
mapFieldEx := struct{ MapField bson.M }{MapField: bson.M{"bson.M(ap)": "example", "equivalentTo": "map[string]interface{}"}}
test(&mapFieldEx, &mapFieldEx.MapField, bson.M{"map_field": bson.M{"play": "nice"}}, "map_field")
By("bson.M with mixed values")
mapMixedFieldEx := struct{ MixedMapField bson.M }{MixedMapField: bson.M{"bson.M(ap)": "example", "equivalentTo": "map[string]interface{}", "soThisIsFine": 7, "thisToo": 42.0}}
test(&mapMixedFieldEx, &mapMixedFieldEx.MixedMapField, bson.M{"mixed_map_field": bson.M{"integers": 99, "floats": 99.99, "strings": "oh my!"}}, "mixed_map_field")
By("native map[string]string")
mapStringFieldEx := struct{ StringMapField map[string]string }{StringMapField: map[string]string{"native": "map[string]string"}}
test(&mapStringFieldEx, &mapStringFieldEx.StringMapField, bson.M{"string_map_field": map[string]string{"shouldBe": "wellSupported"}}, "string_map_field")
})
It("struct with embedded int field", func() {
structFieldEx := struct{ StructField struct{ IntField int } }{StructField: struct{ IntField int }{IntField: 7}}
exBson := bson.M{"struct_field": bson.M{"int_field": 42}}
Expect(structFieldEx.StructField.IntField).ToNot(Equal(exBson["struct_field"].(bson.M)["int_field"]), "initial struct field value should not already equal the target value of the test")
structValuesFromBsonM(&structFieldEx, exBson)
Expect(structFieldEx.StructField.IntField).To(Equal(exBson["struct_field"].(bson.M)["int_field"]), "initial struct field value should not already equal the target value of the test")
})
It("struct with embedded string field", func() {
structFieldEx := struct{ StructField struct{ StringField string } }{StructField: struct{ StringField string }{StringField: "foo"}}
exBson := bson.M{"struct_field": bson.M{"string_field": "bar"}}
Expect(structFieldEx.StructField.StringField).ToNot(Equal(exBson["struct_field"].(bson.M)["string_field"]), "initial struct field value should not already equal the target value of the test")
structValuesFromBsonM(&structFieldEx, exBson)
Expect(structFieldEx.StructField.StringField).To(Equal(exBson["struct_field"].(bson.M)["string_field"]), "initial struct field value should not already equal the target value of the test")
})
}) // Context("updating a struct field value", func() {
})
|
import Node from '../src/node';
describe('Node', () => {
describe('constructer', () => {
test('new Node', () => {
const i = new Node();
expect(i).not.toBeNull();
});
});
describe('toString', () => {
test('string expression', () => {
const i = new Node();
const str = `${i}`;
expect(str).toBe('Node {}');
});
});
describe('TokenLiteral', () => {
test('only check the existence', () => {
const i = new Node();
expect(typeof i.TokenLiteral).toBe('function');
i.TokenLiteral();
});
});
});
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_folder_outline = void 0;
var ic_folder_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M9.17 6l2 2H20v10H4V6h5.17M10 4H4c-1.1 0-1.99.9-1.99 2L2 18c0 1.1.9 2 2 2h16c1.1 0 2-.9 2-2V8c0-1.1-.9-2-2-2h-8l-2-2z"
},
"children": []
}]
};
exports.ic_folder_outline = ic_folder_outline;
|
#!/bin/bash
##########################################################################
# Backup-Switch-Huawei-S6720
# Filename: bck-switch-huawei.sh
# Revision: 1.0
# Date: 17/06/2019
# By: Bruno Cavalcante
# Mail: brunopentest@gmail.com
# EX Execute:
# ./bck-switch-huawei.sh 192.168.0.1 backup-sw-huawei
##########################################################################
######################################
# VARIAVEIS ALIMENTADA PELO EXECUTAR
######################################
IP=$1
FILENAME=$2
#########################
# USUARIO E SENHA DO SW
#########################
USER='backup'
PASS='SUASENHA'
######################################################################################################
# ACESSANDO O SWITCH, TRANSFERINDO O ARQUIVO DE CONFIGURAÇÃO
######################################################################################################
wget --ftp-user=$USER --ftp-password=$PASS ftp://$IP/vrpcfg.zip
########################################################################
# COPIANDO, COLOCANDO DATA E HORA E APAGANDO ARQUIVO ANTIGO DO DEBIAN
########################################################################
cd
unzip vrpcfg.zip
cp vrpcfg.cfg /root/bck/huawei/$FILENAME-$(date "+%d.%m.%Y-%H.%M.%S")
rm vrpcfg.cfg
rm vrpcfg.zip
########################################################################
# APAGANDO OS BCK COM MAIS DE 7 DIAS
########################################################################
cd /root/bck/huawei/
find /root/bck/huawei -type f -mtime +6 -exec rm -rf {} \;
exit 0
########################################################################
# FIM DO SCRIPT
########################################################################
|
package fracCalc;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class FracCalcTestExtraCreditMultipleOps {
// Extra credit only
@Test public void multipleOps1() {assertEquals("-20/21", FracCalc.produceAnswer("1_2/3 + 5/4 + 5_5/4 - 2_2/4 / -5_6/3"));}
@Test public void multipleOps2() {assertEquals("4", FracCalc.produceAnswer("-3 - -3 + 1 + 3"));}
@Test public void multipleOps3() {assertEquals("0", FracCalc.produceAnswer("12 * 18 * 18 * 0"));}
@Test public void multipleOps4() {assertEquals("3_47/60", FracCalc.produceAnswer("20/8 + 3_1/3 - 4/5 - 5/4"));}
@Test public void multipleOps5() {assertEquals("1", FracCalc.produceAnswer("12345 - 12345 + 12345 - 12345 + 1"));}
@Test public void multipleOps6() {assertEquals("0", FracCalc.produceAnswer("1 * 4 * 0 * 0 / 1 / 4/6 / 2_3/4"));}
@Test public void multipleOps7() {assertEquals("-1", FracCalc.produceAnswer("1/5 + 1/5 + 1/5 + 1/5 + 1/5 - 2"));}
@Test public void multipleOps8() {assertEquals("0", FracCalc.produceAnswer("-4 + 1 + 1 + 1 + 1"));}
@Test public void multipleOps9() {assertEquals("4_1/2", FracCalc.produceAnswer("16/4 / 3/2 * 3/2 + 1/2"));}
@Test public void multipleOps10() {assertEquals("0", FracCalc.produceAnswer("12457 / -1 + 12457"));}
@Test public void multipleOps11() {assertEquals("7", FracCalc.produceAnswer("5_3/4 - -6_8/8 - 5_3/4"));}
@Test public void multipleOps12() {assertEquals("4", FracCalc.produceAnswer("2 * 3 - 6 + 1_1/2 + 1/2 - 1/2 - 1/2 + 3"));}
@Test public void multipleOps13() {assertEquals("-4", FracCalc.produceAnswer("2 * 3 - 6 + -1_1/2 + -1/2 - -1/2 - -1/2 - 3"));}
@Test public void multipleOps14() {assertEquals("3", FracCalc.produceAnswer("20 / 4 * -1 + 8"));}
}
|
<gh_stars>0
import ArtworksByFollowedArtists from 'desktop/apps/auction/components/artwork_browser/ArtworksByFollowedArtists'
import PromotedSaleArtworks from 'desktop/apps/auction/components/artwork_browser/PromotedSaleArtworks'
import ArtworkBrowser from 'desktop/apps/auction/components/artwork_browser/ArtworkBrowser'
import AuctionBlock from 'desktop/components/react/auction_block/auction_block'
import AuctionInfoContainer from 'desktop/apps/auction/components/layout/auction_info'
import _Banner from 'desktop/apps/auction/components/layout/Banner'
import Footer from 'desktop/apps/auction/components/layout/Footer'
import MyActiveBids from 'desktop/apps/auction/components/layout/active_bids/MyActiveBids'
import PropTypes from 'prop-types'
import React from 'react'
import block from 'bem-cn-lite'
import { connect } from 'react-redux'
// FIXME: Rewire
let Banner = _Banner
function Layout(props) {
const {
associatedSale,
showAssociatedAuctions,
showFilter,
showInfoWindow,
showMyActiveBids,
showFooter,
} = props
const b = block('auction-Layout')
return (
<div className={b()}>
<Banner />
<div className={b('container', 'responsive-layout-container')}>
<AuctionInfoContainer />
{showAssociatedAuctions && (
<AuctionBlock sale={associatedSale} relatedAuction />
)}
{showMyActiveBids && <MyActiveBids />}
<PromotedSaleArtworks />
<ArtworksByFollowedArtists />
{showFilter &&
!showInfoWindow && (
<div className="auction-main-page">
<ArtworkBrowser />
</div>
)}
{showFooter && <Footer />}
</div>
</div>
)
}
Layout.propTypes = {
associatedSale: PropTypes.object,
showAssociatedAuctions: PropTypes.bool.isRequired,
showFilter: PropTypes.bool.isRequired,
showInfoWindow: PropTypes.bool.isRequired,
showMyActiveBids: PropTypes.bool.isRequired,
showFooter: PropTypes.bool.isRequired,
}
const mapStateToProps = (state) => {
const {
app: { articles, auction, me, isEcommerceSale, isMobile, showInfoWindow },
} = state
const {
associated_sale,
eligible_sale_artworks_count,
is_open,
is_live_open,
} = auction.toJSON()
const showAssociatedAuctions = Boolean(!isMobile && associated_sale)
const showFilter = Boolean(eligible_sale_artworks_count > 0)
const showFollowedArtistsRail = Boolean(
state.artworkBrowser.showFollowedArtistsRail
)
const showMyActiveBids = Boolean(
!isEcommerceSale && me && me.bidders.length && is_open && !is_live_open
)
const showFooter = Boolean((!isMobile && articles.length) || !showFilter)
return {
associatedSale: associated_sale,
isMobile,
showAssociatedAuctions,
showFilter,
showFollowedArtistsRail,
showInfoWindow,
showMyActiveBids,
showFooter,
}
}
export default connect(mapStateToProps)(Layout)
|
import {IsMulti} from "./types"
/**
* @description
* Функция проверяет по индексу, является ли контрол мульти
*
* @param {number | null} controlIndex - Индекс контрола
*
* @returns {boolean}
*/
export const isMultiControl:IsMulti = (controlIndex) => {
return controlIndex !== null
}
/**
* @description
* Функция проверяет по индексу, является ли контрол одиночкой
*
* @param {number | null} controlIndex - Индекс контрола
*
* @returns {boolean}
*/
export const isSingletonControl:IsMulti = (controlIndex) => {
return controlIndex === null
}
/**
* @description
* Функция проверяет по индексу, является ли форма мульти
*
* @param {number | null} formIndex - Индекс формы
*
* @returns {boolean}
*/
export const isMultiForm:IsMulti = (formIndex) => {
return formIndex !== null
}
|
<gh_stars>10-100
package com.telenav.osv.network.model.image;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import com.telenav.osv.network.model.generic.ResponseModelStatus;
import com.telenav.osv.network.model.generic.ResponseNetworkBase;
import com.telenav.osv.network.model.video.ResponseModelVideoOsv;
public class ResponseModelUploadImage extends ResponseNetworkBase {
@SerializedName("osv")
@Expose
public ResponseModelVideoOsv osv;
/**
* Default constructor for the current class.
*/
public ResponseModelUploadImage(ResponseModelStatus status, ResponseModelVideoOsv osv) {
super(status);
this.osv = osv;
}
}
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.webdemo;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Created by Semyon.Atamas on 8/11/2014.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ProjectFile {
protected Boolean modifiable;
protected String publicId;
protected String text;
protected String name;
protected Type type;
/**
* This constructor is used to deserialize project file from user request. Jackson calls it wia
* reflection when we using it to deserialize user project
*
* @param name - File name
* @param text - File content
*/
@JsonCreator
public ProjectFile(@JsonProperty("name") String name,
@JsonProperty("text") String text,
@JsonProperty("publicId") String publicId) {
this.name = name;
this.text = text;
this.publicId = publicId;
this.modifiable = true;
this.type = Type.KOTLIN_FILE;
}
public ProjectFile(String name, String text, boolean modifiable, String publicId, Type type) {
this.name = name;
this.text = text;
this.modifiable = modifiable;
this.publicId = publicId;
this.type = type;
}
public ProjectFile(ProjectFile other) {
this.name = other.name;
this.text = other.text;
this.modifiable = other.modifiable;
this.publicId = other.publicId;
this.type = other.type;
}
public ProjectFile copy(){
return new ProjectFile(this);
}
public Boolean isModifiable() {
return modifiable;
}
public String getText() {
return text;
}
public String getName() {
return name;
}
public Type getType() {
return type;
}
public String getPublicId() {
return publicId;
}
public enum Type {
KOTLIN_FILE,
KOTLIN_TEST_FILE,
SOLUTION_FILE,
JAVA_FILE
}
}
|
import Bio from 'components/Bio'
import PortfolioCard from 'components/PortfolioCard'
import SEO from 'components/seo'
import { graphql } from 'gatsby'
import React from 'react'
import styled from 'styled-components'
const PortfolioStyles = styled.div`
color: var(--color);
`
const AllPortfolioStyles = styled.div`
/* Tablet */
@media only screen and (min-device-width: 768px) and (max-device-width: 1024px) {
h1 {
font-size: 1.5em;
}
.card-image img {
width: 330px;
height: auto;
}
}
/* phones */
@media screen and (min-device-width: 320px) and (max-device-width: 768px) {
h1 {
font-size: 1.2em;
}
.card-image img {
min-width: 330px;
height: auto;
}
}
`
function AllPortfolio({posts}) {
return (
<div>
{posts.map((post, index) => {
return (
<AllPortfolioStyles key={`${post.slug}-${index}`}>
<PortfolioCard post={post} />
</AllPortfolioStyles>
)
})}
</div>
)
}
function PortfolioIndex({data: {allMdx}}) {
const posts = allMdx.edges
return (
<PortfolioStyles>
<SEO title="Portfolio" />
<h1>Portfolio</h1>
<p className="">
This page details some of the project, side project, contributions to
Open Source I have worked on and my role on the project. It shows a
little overhead details of the technology stack used. You can click on
individual project to read more details.
</p>
<AllPortfolio posts={posts} />
<Bio footer />
</PortfolioStyles>
)
}
export default PortfolioIndex
export const pageQuery = graphql`
query {
allMdx(
sort: {fields: [frontmatter___publishedDate], order: DESC}
filter: {
frontmatter: {isPublished: {eq: true}}
fileAbsolutePath: {regex: "//content/portfolio//"}
}
) {
edges {
node {
frontmatter {
publishedDate(formatString: "dddd DD MMMM YYYY")
title
url
slug
imageUrl
description
technology
}
}
}
}
}
`
|
BEGIN TRANSACTION
SET QUOTED_IDENTIFIER ON
SET ARITHABORT ON
SET NUMERIC_ROUNDABORT OFF
SET CONCAT_NULL_YIELDS_NULL ON
SET ANSI_NULLS ON
SET ANSI_PADDING ON
SET ANSI_WARNINGS ON
GO
CREATE TABLE [dbo].[AUTH_IMAGE_CATEGORIES](
[ID] [int] IDENTITY(1,1) NOT NULL,
[CATEGORY] [nvarchar](50) NOT NULL,
CONSTRAINT [PK_AUTH_IMAGE_CATEGORIES] PRIMARY KEY CLUSTERED
(
[ID] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY]
GO
INSERT INTO [AUTH_IMAGE_CATEGORIES] (CATEGORY)
VALUES ('people'), ('nature'), ('flowers'), ('animals'), ('business'), ('technology'), ('cars'), ('city'), ('christmas')
GO
DROP TABLE [dbo].[AUTH_IMAGES]
GO
DROP TABLE [dbo].[AUTH_IMAGE_THUMBNAILS]
GO
CREATE TABLE [dbo].[AUTH_IMAGES](
[ID] [bigint] IDENTITY(1,1) NOT NULL,
[URL] [nvarchar](1000) NOT NULL,
[CATEGORY_ID] [int] NOT NULL,
CONSTRAINT [PK_AUTH_IMAGES] PRIMARY KEY CLUSTERED
(
[ID] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY]
GO
ALTER TABLE [dbo].[AUTH_IMAGES] WITH CHECK ADD CONSTRAINT [FK_AUTH_IMAGES_AUTH_IMAGES] FOREIGN KEY([ID])
REFERENCES [dbo].[AUTH_IMAGES] ([ID])
GO
ALTER TABLE [dbo].[AUTH_IMAGES] CHECK CONSTRAINT [FK_AUTH_IMAGES_AUTH_IMAGES]
GO
CREATE TABLE [dbo].[AUTH_IMAGE_THUMBNAILS](
[IMAGE_ID] [bigint] NOT NULL,
[THUMBNAIL] [varbinary](max) NOT NULL
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
SET ANSI_PADDING OFF
GO
ALTER TABLE [dbo].[AUTH_IMAGE_THUMBNAILS] WITH CHECK ADD CONSTRAINT [FK_AUTH_IMAGE_THUMBNAILS_AUTH_IMAGES] FOREIGN KEY([IMAGE_ID])
REFERENCES [dbo].[AUTH_IMAGES] ([ID])
ON DELETE CASCADE
GO
ALTER TABLE [dbo].[AUTH_IMAGE_THUMBNAILS] CHECK CONSTRAINT [FK_AUTH_IMAGE_THUMBNAILS_AUTH_IMAGES]
GO
ALTER TABLE [dbo].[SMS_CONTACT]
DROP COLUMN [LEFT_IMAGE]
GO
ALTER TABLE [dbo].[SMS_CONTACT]
DROP COLUMN [RIGHT_IMAGE]
GO
ALTER TABLE [dbo].[SMS_CONTACT] ADD [LEFT_IMAGE] [bigint] NULL
ALTER TABLE [dbo].[SMS_CONTACT] ADD [RIGHT_IMAGE] [bigint] NULL
ALTER TABLE [dbo].[SMS_CONTACT] WITH CHECK ADD CONSTRAINT [FK_SMS_CONTACT_AUTH_IMAGES] FOREIGN KEY([LEFT_IMAGE])
REFERENCES [dbo].[AUTH_IMAGES] ([ID])
GO
ALTER TABLE [dbo].[SMS_CONTACT] CHECK CONSTRAINT [FK_SMS_CONTACT_AUTH_IMAGES]
GO
ALTER TABLE [dbo].[SMS_CONTACT] WITH CHECK ADD CONSTRAINT [FK_SMS_CONTACT_AUTH_IMAGES1] FOREIGN KEY([RIGHT_IMAGE])
REFERENCES [dbo].[AUTH_IMAGES] ([ID])
GO
ALTER TABLE [dbo].[SMS_CONTACT] CHECK CONSTRAINT [FK_SMS_CONTACT_AUTH_IMAGES1]
GO
UPDATE [SETTINGS] SET [VALUE] = '2.3.9' WHERE [SETTING] = 'VERSION'
GO
COMMIT
GO
|
#include <stdio.h>
#include <stdint.h>
#include <strings.h>
#define MAX 8192
typedef struct __attribute__((__packed__)) {
char* str;
uint32_t sum;
} Entry;
Entry list[MAX] = { { 0 } };
// Messy swap macros to be used in sorting
// Swap string
#define SSTR(a, b, c) (list[a].str = ((char*)((int64_t)list[b].str ^ (int64_t)list[c].str)))
// Swap int
#define SINT(a, b, c) (list[a].sum = (list[b].sum ^ list[c].sum))
uint32_t strsum(char* str) {
uint32_t t = 0;
// Iterate through str char-by-char and add int value of chars
for (uint32_t i = 0; i <= strlen(str) + 1 && str[i] != '\0'; t += (uint32_t)str[i], i++);
return t;
}
int main(int argc, char** argv) {
if (MAX < argc) {
fprintf(stderr, "Input arguments overflows max of %d\n", MAX);
return 1;
}
// Iterate through args and call strsum() on each arg
// Add sum of string to list along with string pointer
for (uint32_t s = 1; argv[s] != NULL; s++) {
list[s - 1].sum = strsum(argv[s]);
list[s - 1].str = argv[s];
}
// Sucky sort
for (uint32_t i = 0; i < MAX && list[i].str != NULL; i++) {
for (uint32_t j = 0; j < MAX && list[j].str != NULL; j++) {
if (list[j].sum > list[i].sum) {
SINT(i, i, j);
SINT(j, i, j);
SINT(i, i, j);
SSTR(i, i, j);
SSTR(j, i, j);
SSTR(i, i, j);
}
}
}
for (uint32_t c = 0; c < MAX && list[c].str != NULL; printf("%s\n", list[c].str), c++);
return 0;
}
|
<filename>actors/sunbird-lms-mw/actors/user/src/main/java/org/sunbird/user/actors/UserRoleActor.java
package org.sunbird.user.actors;
import java.util.*;
import org.sunbird.actor.router.ActorConfig;
import org.sunbird.cassandra.CassandraOperation;
import org.sunbird.common.exception.ProjectCommonException;
import org.sunbird.common.factory.EsClientFactory;
import org.sunbird.common.inf.ElasticSearchService;
import org.sunbird.common.models.response.Response;
import org.sunbird.common.models.util.*;
import org.sunbird.common.request.Request;
import org.sunbird.common.request.RequestContext;
import org.sunbird.common.responsecode.ResponseCode;
import org.sunbird.helper.ServiceFactory;
import org.sunbird.learner.actors.role.service.RoleService;
import org.sunbird.learner.organisation.service.OrgService;
import org.sunbird.learner.organisation.service.impl.OrgServiceImpl;
import org.sunbird.learner.util.DataCacheHandler;
import org.sunbird.learner.util.Util;
import org.sunbird.user.service.UserRoleService;
import org.sunbird.user.service.impl.UserRoleServiceImpl;
@ActorConfig(
tasks = {"getRoles", "assignRoles"},
asyncTasks = {},
dispatcher = "most-used-two-dispatcher"
)
public class UserRoleActor extends UserBaseActor {
private CassandraOperation cassandraOperation = ServiceFactory.getInstance();
private ElasticSearchService esService = EsClientFactory.getInstance(JsonKey.REST);
private OrgService orgService = OrgServiceImpl.getInstance();
@Override
public void onReceive(Request request) throws Throwable {
Util.initializeContext(request, TelemetryEnvKey.USER);
String operation = request.getOperation();
switch (operation) {
case "getRoles":
getRoles();
break;
case "assignRoles":
assignRoles(request);
break;
default:
onReceiveUnsupportedOperation("UserRoleActor");
}
}
private void getRoles() {
logger.info("UserRoleActor: getRoles called");
Response response = DataCacheHandler.getRoleResponse();
if (response == null) {
response = RoleService.getUserRoles();
DataCacheHandler.setRoleResponse(response);
}
sender().tell(response, self());
}
@SuppressWarnings("unchecked")
private void assignRoles(Request actorMessage) {
logger.info(actorMessage.getRequestContext(), "UserRoleActor: assignRoles called");
Response response = new Response();
Map<String, Object> requestMap = actorMessage.getRequest();
requestMap.put(JsonKey.REQUESTED_BY, actorMessage.getContext().get(JsonKey.USER_ID));
requestMap.put(JsonKey.ROLE_OPERATION, "assignRole");
List<String> roles = (List<String>) requestMap.get(JsonKey.ROLES);
RoleService.validateRoles(roles);
UserRoleService userRoleService = UserRoleServiceImpl.getInstance();
List<Map<String, Object>> userRolesList =
userRoleService.updateUserRole(requestMap, actorMessage.getRequestContext());
if (!userRolesList.isEmpty()) {
response.put(JsonKey.RESPONSE, JsonKey.SUCCESS);
}
sender().tell(response, self());
if (((String) response.get(JsonKey.RESPONSE)).equalsIgnoreCase(JsonKey.SUCCESS)) {
syncUserRoles(
JsonKey.USER,
(String) requestMap.get(JsonKey.USER_ID),
userRolesList,
actorMessage.getRequestContext());
} else {
logger.info(actorMessage.getRequestContext(), "UserRoleActor: No ES call to save user roles");
throw new ProjectCommonException(
ResponseCode.roleSaveError.getErrorCode(),
ResponseCode.roleSaveError.getErrorMessage(),
ResponseCode.SERVER_ERROR.getResponseCode());
}
generateTelemetryEvent(
requestMap,
(String) requestMap.get(JsonKey.USER_ID),
"userLevel",
actorMessage.getContext());
}
private void syncUserRoles(
String type, String userId, List<Map<String, Object>> userRolesList, RequestContext context) {
Request request = new Request();
request.setRequestContext(context);
request.setOperation(ActorOperations.UPDATE_USER_ROLES_ES.getValue());
request.getRequest().put(JsonKey.TYPE, type);
request.getRequest().put(JsonKey.USER_ID, userId);
request.getRequest().put(JsonKey.ROLES, userRolesList);
logger.info(context, "UserRoleActor:syncUserRoles: Syncing to ES");
try {
tellToAnother(request);
} catch (Exception ex) {
logger.error(
context,
"UserRoleActor:syncUserRoles: Exception occurred with error message = " + ex.getMessage(),
ex);
}
}
}
|
#!/bin/bash
current_dir=$(pwd)
share_dir=$current_dir/oracle_share
rm -rf $share_dir
mkdir -p $share_dir
cp $current_dir/share/krb5.conf $share_dir
cp $current_dir/share/oracle.keytab $share_dir
cp $current_dir/share/oracle_krb_setup.sh $share_dir
docker run -it -h example.com --mount type=bind,source=$share_dir,target=/docker-entrypoint-initdb.d docker-kerberos_oracle-server:latest
|
#!/usr/bin/env bash
set -e
function update_gh_branch {
local branch_name
branch_name=$(git branch | grep '\*' | sed 's/* //')
git checkout 'gh-pages'
make
git add --all '_posts'
git commit -m 'documentation update'
git checkout "$branch_name"
}
update_gh_branch
|
<reponame>schigh/core
package models
import (
"time"
"gopkg.in/mgo.v2/bson"
)
//User struct
type User struct {
ID bson.ObjectId `json:"id" bson:"_id,omitempty"`
FullName string `json:"fullname" bson:"fullname"`
Email string `json:"email" bson:"email"`
Password string `json:"password" bson:"password"`
Country string `json:"country" bson:"country"`
City string `json:"city" bson:"city"`
Address string `json:"address" bson:"address"`
ZIP string `json:"zip" bson:"zip"`
State string `json:"state" bson:"state"`
EmailVerified bool `json:"email_verified" bson:"email_verified"`
StripeID string `json:"-" bson:"stripe_id"`
ActiveSubscriptionID string `json:"-" bson:"active_subscription_id"`
Professional bool `json:"professional" bson:"professional"`
CreatedAt time.Time `json:"created_at" bson:"created_at"`
UpdatedAt time.Time `json:"updated_at" bson:"updated_at"`
FavoriteSites []bson.ObjectId `json:"favorite_sites" bson:"favorite_sites"`
}
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.21 on 2019-06-07 11:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('js_events', '0015_speaker_vcard_enabled'),
]
operations = [
migrations.AddField(
model_name='event',
name='cpd_points',
field=models.CharField(blank=True, default='', max_length=255, verbose_name='CPD Points'),
),
migrations.AddField(
model_name='event',
name='price',
field=models.CharField(blank=True, default='', max_length=255, verbose_name='Evene Price'),
),
]
|
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.Servo;
/**
* Created by lawrencemao on 1/10/2018.
*/
//ClawGrabberThing
@TeleOp(name = "CGT", group = "Tests")
public class CGT extends LinearOpMode {
private Servo theClaw; // grabs the RELIC
private DcMotor arm; // rotates the grabber
private DcMotor release; // extends the arm
private DcMotor retract; // retracts the arm
private static final double FLAPPER_OPEN = 0.1;
private static final double FLAPPER_CLOSED = 0.9;
@Override
public void runOpMode() throws InterruptedException {
theClaw = hardwareMap.servo.get("claw");
arm = hardwareMap.dcMotor.get("arm");
release = hardwareMap.dcMotor.get("release");
retract = hardwareMap.dcMotor.get("retract");
// set zero power behavior on motors? hold in place?
theClaw.setPosition(FLAPPER_CLOSED);
waitForStart();
while(opModeIsActive()){
if(gamepad1.a){ // grabber
theClaw.setPosition(FLAPPER_OPEN);
}
else theClaw.setPosition(FLAPPER_CLOSED);
if(gamepad1.left_bumper) arm.setPower(0.5); // grabber rotation
else if (gamepad1.right_bumper) arm.setPower(-0.5);
else arm.setPower(0);
/* if(gamepad1.b){
rightFlapper.setPosition(FLAPPER_CLOSED);
leftFlapper.setPosition(1-FLAPPER_CLOSED);
} */
if(gamepad1.dpad_up){ // extend and retract arm
release.setPower(-0.6);
retract.setPower(-0.02);
}
else if(gamepad1.dpad_down){
retract.setPower(0.4);
release.setPower(0.3);
}
else {
retract.setPower(0);
release.setPower(0);
}
idle();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.