repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
sharon1321/studio
|
function/python/brightics/common/datatypes/image.py
|
<filename>function/python/brightics/common/datatypes/image.py<gh_stars>100-1000
import io
import struct
import numpy as np
import cv2
from brightics.common.datatypes import BRTC_CODE, BRTC_CODE_SIZE
class Image(object):
_data_type = 0
_header_format = '<{}sBIII'.format(BRTC_CODE_SIZE)
_pack_format = ''.join([_header_format, 'I{}s', 'I{}s', 'I{}s'])
def __init__(self, arr, origin=None, mode=None):
if len(arr.shape) == 2:
self.height, self.width = arr.shape
self.n_channels = 1
self.data = arr.reshape(self.height, self.width, 1)
elif len(arr.shape) == 3:
self.height, self.width, self.n_channels = arr.shape
self.data = arr
else:
raise Exception("Unknown shape.")
self.origin = None
self.mode = 'BGR'
if origin is not None:
self.origin = origin
if mode is not None:
self.mode = mode
def copy(self):
return Image(arr=self.data, origin=self.origin, mode=self.mode)
def tobytes(self):
'''
brtc_code(40)::data_type(1)::height(4)::width(4)::n_channels(4)::
mode_size(4)::mode(mode_size)::
origin_size(4)::origin(origin_size)::
data_size(4)::data(data_size)
'''
data_bytes = self.data.tobytes()
data_size = len(data_bytes)
origin_bytes = self.origin.encode('utf-8')
origin_size = 1 if origin_bytes is None else len(origin_bytes)
mode_bytes = self.mode.encode('utf-8')
mode_bytes_size = len(mode_bytes)
pack_format = self._pack_format.format(mode_bytes_size, origin_size, data_size)
return struct.pack(pack_format,
BRTC_CODE, self._data_type, self.height, self.width, self.n_channels,
mode_bytes_size, mode_bytes,
origin_size, origin_bytes,
data_size, data_bytes)
# in-place function
def resize(self, dsize):
# img_np = byte_to_img(img_byte)
org_dsize = (self.width, self.height)
org_area = self.width * self.height
new_dsize = (int(dsize[0]), int(dsize[1]))
new_area = new_dsize[0] * new_dsize[1]
if org_dsize == new_dsize:
# no change
img_resized = self.data
elif org_area > new_area:
# make smaller
img_resized = cv2.resize(self.data, dsize=new_dsize, interpolation=cv2.INTER_AREA)
else:
# make larger
img_resized = cv2.resize(self.data, dsize=new_dsize, interpolation=cv2.INTER_LINEAR)
return Image(img_resized, origin=self.origin, mode=self.mode)
def resize_limit(self, limit):
if self.height > self.width and self.height > limit:
img_resized = cv2.resize(self.data, fy=(limit / self.height), interpolation=cv2.INTER_AREA)
elif self.width > self.height and self.width > limit:
img_resized = cv2.resize(self.data, fx=(limit / self.width), interpolation=cv2.INTER_AREA)
return Image(img_resized, origin=self.origin, mode=self.mode)
@classmethod
def from_bytes(cls, b):
buf = io.BytesIO(b)
brtc_code, data_type, height, width, n_channels = \
struct.unpack(cls._header_format, buf.read(BRTC_CODE_SIZE + 1 + 4 + 4 + 4))
if brtc_code != BRTC_CODE:
raise Exception("Unknown data")
if data_type != cls._data_type:
raise Exception('This is not an Image')
mode_size = struct.unpack('<I', buf.read(4))[0]
mode = struct.unpack('<{}s'.format(mode_size), buf.read(mode_size))[0].decode('utf-8')
origin_size = struct.unpack('<I', buf.read(4))[0]
origin = struct.unpack('<{}s'.format(origin_size), buf.read(origin_size))[0].decode('utf-8')
data_size = struct.unpack('<I', buf.read(4))[0]
data = np.frombuffer(struct.unpack('<{}s'.format(data_size),
buf.read(data_size))[0], np.uint8).reshape(height, width, n_channels)
return Image(data, origin, mode)
@classmethod
def is_image(cls, b):
if not isinstance(b, bytes):
return False
if len(b) < 41:
return False
bio = io.BytesIO(b)
header, tp = struct.unpack('40sB', bio.read(41))
return header == BRTC_CODE and tp == 0
|
buzzonetwo/calcentral
|
spec/models/concerns/academic_roles_spec.rb
|
<reponame>buzzonetwo/calcentral<filename>spec/models/concerns/academic_roles_spec.rb<gh_stars>0
describe Concerns::AcademicRoles do
shared_examples 'a map of academic status codes to roles' do
it 'includes plans and career matchers' do
expect(subject.count).to_not eq 0
end
it 'defines role code and match string for each matcher' do
subject.each do |matcher|
expect(matcher).to have_key(:role_code)
expect(matcher).to have_key(:match)
end
end
end
shared_examples 'a translator that handles invalid input' do
context 'when code is nil' do
let(:code) { nil }
it { should be nil }
end
context 'when code is not mapped to any role' do
let(:code) { 'BUNK' }
it { should eq [] }
end
end
context 'when defining plan roles' do
subject { described_class::ACADEMIC_PLAN_ROLES }
it_behaves_like 'a map of academic status codes to roles'
end
context 'when defining program roles' do
subject { described_class::ACADEMIC_PROGRAM_ROLES }
it_behaves_like 'a map of academic status codes to roles'
end
context 'when defining career roles' do
subject { described_class::ACADEMIC_CAREER_ROLES }
it_behaves_like 'a map of academic status codes to roles'
end
describe '#get_academic_plan_roles' do
subject { described_class.get_academic_plan_roles code }
it_behaves_like 'a translator that handles invalid input'
context 'when a match is found' do
let(:code) { '99V06G' }
it { should contain_exactly 'summerVisitor' }
end
end
describe '#get_academic_plan_roles' do
subject { described_class.get_academic_plan_roles(code) }
context 'when code is nil' do
let(:code) { nil }
it { should be nil }
end
context 'when code is not mapped to any role' do
let(:code) { '25000ABCD' }
it { should eq [] }
end
end
describe '#get_academic_program_roles' do
subject { described_class.get_academic_program_roles(code) }
context 'when code is nil' do
let(:code) { nil }
it { should be nil }
end
context 'when code is not mapped to any role' do
let(:code) { 'BUNK' }
it { should eq ['degreeSeeking'] }
end
context 'when a match is found' do
let(:code) { 'UCLS' }
it { should contain_exactly('lettersAndScience', 'degreeSeeking') }
end
end
describe '#get_academic_career_roles' do
subject { described_class.get_academic_career_roles(code) }
it_behaves_like 'a translator that handles invalid input'
context 'when a match is found' do
let(:code) { 'LAW' }
it { should contain_exactly 'law' }
end
end
describe '#role_defaults' do
subject { described_class.role_defaults }
it 'returns all possible roles set to false' do
expect(subject.keys.count).to eq (29)
expect(subject['concurrent']).to eq false
expect(subject['courseworkOnly']).to eq false
expect(subject['degreeSeeking']).to eq false
expect(subject['doctorScienceLaw']).to eq false
expect(subject['fpf']).to eq false
expect(subject['grad']).to eq false
expect(subject['haasBusinessAdminMasters']).to eq false
expect(subject['haasBusinessAdminPhD']).to eq false
expect(subject['haasFullTimeMba']).to eq false
expect(subject['haasEveningWeekendMba']).to eq false
expect(subject['haasExecMba']).to eq false
expect(subject['haasMastersFinEng']).to eq false
expect(subject['haasMbaPublicHealth']).to eq false
expect(subject['haasMbaJurisDoctor']).to eq false
expect(subject['jurisSocialPolicyMasters']).to eq false
expect(subject['jurisSocialPolicyPhC']).to eq false
expect(subject['jurisSocialPolicyPhD']).to eq false
expect(subject['law']).to eq false
expect(subject['lawJdCdp']).to eq false
expect(subject['lawJdLlm']).to eq false
expect(subject['lawJspJsd']).to eq false
expect(subject['lawVisiting']).to eq false
expect(subject['lettersAndScience']).to eq false
expect(subject['ugrdEngineering']).to eq false
expect(subject['masterOfLawsLlm']).to eq false
expect(subject['summerVisitor']).to eq false
expect(subject['ugrd']).to eq false
expect(subject['ugrdNonDegree']).to eq false
expect(subject['ugrdUrbanStudies']).to eq false
end
end
end
|
albertoirurueta/irurueta-ar
|
src/main/java/com/irurueta/ar/sfm/MatchedSamples.java
|
<reponame>albertoirurueta/irurueta-ar<filename>src/main/java/com/irurueta/ar/sfm/MatchedSamples.java
/*
* Copyright (C) 2016 <NAME> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.irurueta.ar.sfm;
import java.io.Serializable;
import java.util.BitSet;
/**
* Contains data relating matched 2D points and their reconstructions.
*/
public class MatchedSamples implements Serializable {
/**
* Default quality score value.
*/
public static final double DEFAULT_QUALITY_SCORE = 1.0;
/**
* 2D matched samples on different views.
* Each of these points correspond to projections of the same 3D point into
* different views.
*/
private Sample2D[] mSamples;
/**
* Cameras associated to the views of each of the matched points.
*/
private EstimatedCamera[] mCameras;
/**
* Id's of views where matched points belong to.
*/
private int[] mViewIds;
/**
* 3D reconstructed point. Initially, might not be available
*/
private ReconstructedPoint3D mReconstructedPoint;
/**
* Quality score of a match.
*/
private double mQualityScore = DEFAULT_QUALITY_SCORE;
/**
* Indicates whether match between a pair of views has been considered an
* inlier or not.
* Position 0 of this bitset corresponds to viewIds in positions 0 and 1,
* position 1 of bitset corresponds to viewIds in positions 1 and 2, and so
* on.
*/
private BitSet mInliers;
/**
* Gets 2D matched samples on different views containing matched points.
* Each of these points correspond to projections of the same 3D point into
* different views.
*
* @return 2D matched samples on different views.
*/
public Sample2D[] getSamples() {
return mSamples;
}
/**
* Sets 2D matched matches on different views containing matched points.
* Each of these points correspond to projections of the same 3D point into
* different views.
*
* @param samples 2D matched samples on different views.
*/
public void setSamples(final Sample2D[] samples) {
mSamples = samples;
}
/**
* Gets cameras associated to the views of each of the matched points.
*
* @return cameras associated to the views of each of the matched points.
*/
public EstimatedCamera[] getCameras() {
return mCameras;
}
/**
* Sets cameras associated to the views of each of the matched points.
*
* @param cameras cameras associated to the views of each of the matched
* points.
*/
public void setCameras(final EstimatedCamera[] cameras) {
mCameras = cameras;
}
/**
* Gets id's of views where matched points belong to.
*
* @return id's of view where matched points belong to.
*/
public int[] getViewIds() {
return mViewIds;
}
/**
* Sets id's of views where matched points belong to.
*
* @param viewIds id's of views where matched points belong to.
*/
public void setViewIds(final int[] viewIds) {
mViewIds = viewIds;
}
/**
* Gets 3D reconstructed point.
*
* @return 3D reconstructed point.
*/
public ReconstructedPoint3D getReconstructedPoint() {
return mReconstructedPoint;
}
/**
* Sets 3D reconstructed point.
*
* @param reconstructedPoint 3D reconstructed point.
*/
public void setReconstructedPoint(final ReconstructedPoint3D reconstructedPoint) {
mReconstructedPoint = reconstructedPoint;
if (mSamples != null) {
for (final Sample2D sample : mSamples) {
sample.setReconstructedPoint(reconstructedPoint);
}
}
}
/**
* Gets quality score of match. The larger the value, the better the
* quality. This is used for robust estimators such as PROSAC or PROMedS.
* This value is typically obtained from algorithms determining scores for
* matches.
*
* @return quality score of match.
*/
public double getQualityScore() {
return mQualityScore;
}
/**
* Sets quality score of match. The larger the value, the better the
* quality. This is used for robust estimators such as PROSAC or PROMedS.
* This value is typically obtained from algorithms determining scores for
* matches.
*
* @param qualityScore quality score of match.
*/
public void setQualityScore(final double qualityScore) {
mQualityScore = qualityScore;
}
/**
* Indicates whether match between a pair of views has been considered an
* inlier or not.
* Position 0 of this bitset corresponds to viewIds in positions 0 and 1,
* position 1 of bitset corresponds to viewIds in positions 1 and 2, and so
* on.
*
* @return indicates whether match between a pair of views has been
* considered an inlier or not.
*/
public BitSet getInliers() {
return mInliers;
}
/**
* Specifies whether match between a pair of views has been considered an
* inlier or not.
* Position 0 of this bitset corresponds to viewIds in positions 0 and 1,
* position 1 of bitset corresponds to viewIds in positions 1 and 2, and so
* on.
*
* @param inliers set indicating whether a match between a pair of views has
* been considered an inlier or not.
*/
public void setInliers(final BitSet inliers) {
mInliers = inliers;
}
}
|
sudhirs745/instagram-clone
|
src/components/others/stickers/__test__/stickers-mockArray.js
|
export default [
'/images/stickers/wall.png',
'/images/stickers/oracles.png',
'/images/stickers/like9.png',
]
|
googleinterns/gail-dyn
|
test_deformable.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pybullet as p
from time import sleep
physicsClient = p.connect(p.GUI)
import pybullet_data
p.setAdditionalSearchPath(pybullet_data.getDataPath())
p.resetSimulation(p.RESET_USE_DEFORMABLE_WORLD)
gravZ=-10
p.setGravity(0, 0, gravZ)
planeOrn = [0,0,0,1]
planeId = p.loadURDF("plane.urdf", [0,0,0],planeOrn)
boxId2 = p.loadSoftBody("cube.obj", basePosition = [0,0,2.5], scale = 1, mass = 1., useNeoHookean = 0, useBendingSprings=1,useMassSpring=1, springElasticStiffness=40, springDampingStiffness=.1, springDampingAllDirections = 1, useSelfCollision = 0, frictionCoeff = .5, useFaceContact=1)
p.setPhysicsEngineParameter(sparseSdfVoxelSize=0.25)
p.setRealTimeSimulation(1)
while p.isConnected():
p.setGravity(0,0,gravZ)
sleep(1./240.)
|
landyking/java-practices
|
learn-concurrency/src/test/java/com/github/landyking/learnConcurrency/CyclicBarrierTest.java
|
package com.github.landyking.learnConcurrency;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.TimeUnit;
/**
* Created by landy on 2019/2/28.
*/
public class CyclicBarrierTest {
public static void main(String[] args) throws InterruptedException, BrokenBarrierException {
final CyclicBarrier barrier = new CyclicBarrier(4, new Runnable() {
@Override
public void run() {
System.out.println(Thread.currentThread().getName() + " # barrier action");
}
});
for (int i = 0; i < 4; i++) {
new Thread(new Runnable() {
@Override
public void run() {
for (int j = 0; j < 2; j++) {
try {
System.out.println(Thread.currentThread().getName() + "-" + j + " ##### start");
int await = barrier.await();
System.out.println(Thread.currentThread().getName() + "-" + j + " ##### over "+await);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
}
}
}).start();
}
System.out.println("############");
TimeUnit.SECONDS.sleep(100);
}
}
|
pilinsin/p2p-verse
|
crdt/filters.go
|
<gh_stars>0
package crdtverse
import (
"bytes"
query "github.com/ipfs/go-datastore/query"
"strings"
)
//data key: (<pid>)/<category>/(<tKey>)
type KeyMatchFilter struct {
Key string
}
func (f KeyMatchFilter) Filter(e query.Entry) bool {
keys := strings.Split(strings.TrimPrefix(e.Key, "/"), "/")
fKeys := strings.Split(strings.TrimPrefix(f.Key, "/"), "/")
if len(keys) < len(fKeys) {
return false
}
for idx := range fKeys {
if fKeys[idx] != "*" && fKeys[idx] != keys[idx] {
return false
}
}
return true
}
type KeyExistFilter struct {
Key string
}
func (f KeyExistFilter) Filter(e query.Entry) bool {
keys := strings.Split(strings.TrimPrefix(e.Key, "/"), "/")
for _, eKey := range keys {
if eKey == f.Key {
return true
}
}
return false
}
type ValueMatchFilter struct {
Val []byte
}
func (f ValueMatchFilter) Filter(e query.Entry) bool {
return bytes.Equal(e.Value, f.Val)
}
|
moddingplayground/thematic
|
src/main/java/net/moddingplayground/thematic/api/BuiltinDecoratables.java
|
package net.moddingplayground.thematic.api;
import net.minecraft.block.Blocks;
import net.minecraft.block.MapColor;
import net.minecraft.util.Identifier;
import net.minecraft.util.registry.Registry;
import net.moddingplayground.thematic.api.registry.ThematicRegistry;
import net.moddingplayground.thematic.api.theme.Decoratable;
import net.moddingplayground.thematic.api.theme.data.preset.bannerpattern.BannerPatternWithItemDecoratableData;
import net.moddingplayground.thematic.api.theme.data.preset.block.BookshelfDecoratableData;
import net.moddingplayground.thematic.api.theme.data.preset.block.GateDecoratableData;
import net.moddingplayground.thematic.api.theme.data.preset.block.LadderDecoratableData;
import net.moddingplayground.thematic.api.theme.data.preset.block.LadderVaryingDecoratableData;
import net.moddingplayground.thematic.api.theme.data.preset.block.LanternDecoratableData;
import net.moddingplayground.thematic.api.theme.data.preset.block.SeatDecoratableData;
import net.moddingplayground.thematic.impl.block.theme.lantern.MechanicalLanternBlock;
import net.moddingplayground.thematic.impl.block.theme.lantern.RusticLanternBlock;
import net.moddingplayground.thematic.impl.block.theme.lantern.SunkenLanternBlock;
import net.moddingplayground.thematic.impl.theme.data.block.entity.chest.MetalChestDecoratableData;
import net.moddingplayground.thematic.impl.theme.data.block.entity.chest.RusticChestDecoratableData;
import net.moddingplayground.thematic.impl.theme.data.block.entity.chest.TrappedMetalChestDecoratableData;
import net.moddingplayground.thematic.impl.theme.data.block.entity.chest.TrappedRusticChestDecoratableData;
import static net.moddingplayground.thematic.api.BuiltinThemes.*;
import static net.moddingplayground.thematic.api.util.BlockSettingsFactory.*;
/**
* References to all built-in registered decoratables.
*/
public interface BuiltinDecoratables {
Decoratable LADDER = register("ladder", "%s_ladder")
.add(RUSTIC, LadderDecoratableData::new)
.add(SUNKEN, LadderVaryingDecoratableData.createMetal(3, CHAIN_REQUIRES_TOOL_STRONGER))
.add(MECHANICAL, LadderDecoratableData.createMetal(CHAIN_REQUIRES_TOOL_STRONGER));
Decoratable GATE = register("gate", "%s_gate")
.add(RUSTIC, GateDecoratableData.create(Blocks.SPRUCE_PLANKS))
.add(SUNKEN, GateDecoratableData.createMetal(Blocks.IRON_BARS, NETHERITE_REQUIRES_TOOL))
.add(MECHANICAL, GateDecoratableData.createMetal(Blocks.OXIDIZED_COPPER, COPPER_REQUIRES_TOOL));
Decoratable LANTERN = register("lantern", "%s_lantern")
.add(RUSTIC, LanternDecoratableData.create(RusticLanternBlock::new))
.add(SUNKEN, LanternDecoratableData.create(SunkenLanternBlock::new))
.add(MECHANICAL, LanternDecoratableData.create(MechanicalLanternBlock::new));
Decoratable BOOKSHELF = register("bookshelf", "%s_bookshelf")
.add(RUSTIC, BookshelfDecoratableData.create(s -> s.mapColor(MapColor.SPRUCE_BROWN)))
.add(SUNKEN, BookshelfDecoratableData.createMetal(NETHERITE_REQUIRES_TOOL_STRONGER))
.add(MECHANICAL, BookshelfDecoratableData.createMetal(COPPER_REQUIRES_TOOL_STRONGER));
Decoratable CHEST = register("chest", "%s_chest")
.add(RUSTIC, RusticChestDecoratableData.create(Blocks.SPRUCE_PLANKS))
.add(SUNKEN, MetalChestDecoratableData.create(Blocks.IRON_BARS, NETHERITE_REQUIRES_TOOL))
.add(MECHANICAL, MetalChestDecoratableData.create(Blocks.OXIDIZED_COPPER, COPPER_REQUIRES_TOOL));
Decoratable TRAPPED_CHEST = register("trapped_chest", "trapped_%s_chest")
.add(RUSTIC, TrappedRusticChestDecoratableData.create(Blocks.SPRUCE_PLANKS))
.add(SUNKEN, TrappedMetalChestDecoratableData.create(Blocks.IRON_BARS, NETHERITE_REQUIRES_TOOL))
.add(MECHANICAL, TrappedMetalChestDecoratableData.create(Blocks.OXIDIZED_COPPER, COPPER_REQUIRES_TOOL));
Decoratable SEAT = register("seat", "%s_seat")
.add(RUSTIC, SeatDecoratableData.create(Blocks.SPRUCE_PLANKS))
.add(SUNKEN, SeatDecoratableData.createMetal(Blocks.IRON_BARS, NETHERITE_REQUIRES_TOOL))
.add(MECHANICAL, SeatDecoratableData.createMetal(Blocks.OXIDIZED_COPPER, COPPER_REQUIRES_TOOL));
Decoratable BANNER_PATTERN = register("banner_pattern")
.add(RUSTIC, BannerPatternWithItemDecoratableData.create("rope_banner_pattern"))
.add(SUNKEN, BannerPatternWithItemDecoratableData.create("anchor_banner_pattern"))
.add(MECHANICAL, BannerPatternWithItemDecoratableData.create("cog_banner_pattern"));
private static Decoratable register(String id, Decoratable decoratable) {
return Registry.register(ThematicRegistry.DECORATABLE, new Identifier(Thematic.MOD_ID, id), decoratable);
}
private static Decoratable register(String id, String format) {
return register(id, new Decoratable(format));
}
private static Decoratable register(String id) {
return register(id, new Decoratable());
}
}
|
nadavtal/3dbia
|
app/containers/Projects/Projects.js
|
<reponame>nadavtal/3dbia
import React, { useState, useEffect, memo, useMemo} from 'react';
import { useHistory} from 'react-router-dom';
import Layout from 'containers/Management/Layout';
import Filters from 'components/Filters/Filters'
import { MDBTabPane,
MDBTabContent,
MDBNav,
MDBNavItem,
MDBNavLink,
MDBRow,
MDBCol ,
MDBIcon,
MDBCardGroup,
MDBCard,
MDBView,
MDBCardFooter,
MDBAnimation,
MDBPagination,
MDBBtn,
MDBMask,
MDBCardBody,
MDBPageItem,
MDBPageNav,
MDBSwitch
} from "mdbreact";
import { connect } from 'react-redux';
import { compose } from 'redux';
import { createStructuredSelector } from 'reselect';
import { toggleModal, toggleAlert, toggleLoadingSpinner } from 'containers/App/actions';
import {newBridgeCreated} from 'containers/Organizations/Organization/actions'
import { makeSelectCurrentUser, makeSelectCurrentUserRole } from 'containers/App/selectors';
import { ExportService } from 'containers/Wijmo/export';
import { useInjectReducer } from 'utils/injectReducer';
import { useInjectSaga } from 'utils/injectSaga';
import TextSearch from 'components/TextSearch/TextSearch';
import Menu from '../Management/Menu'
import DataTable from '../../components/DataTable/DataTable';
import MyWijmoDetailedTable from 'containers/MyTables/MyWijmoDetailedTable';
import Select from 'components/Select/Select';
// import DetailedTable from 'containers/MyTables/DetailedTable';
import * as actions from './actions'
import IconButtonToolTip from 'components/IconButtonToolTip/IconButtonToolTip';
const key = 'projectsPage';
import saga from './saga';
import './Projects.css';
import { searchAll } from 'utils/dataUtils';
import { MenuItem } from 'react-bootstrap';
import BasicMap from '../Resium/BasicMap';
export function Projects(props) {
useInjectSaga({ key, saga });
const [tableMode, setTableMode] = useState('Card view');
const [selection, setSelection] = useState([]);
const [searchResults, setSearchResults] = useState(props.items);
useEffect(() => {
// console.log('[ProjectwPage.js] useEffect', props.projects);
}, [tableMode]);
const menu = [
{ name: 'Card view' },
{ name: 'Map view' },
{ name: 'Table view' }
]
const history = useHistory()
const projectsCards = searchResults.map((item, index) => {
const scrollContainerStyle = {
width: "100%",
// maxHeight: `calc(100vh)-${theme.layout.topBarSize}`,
height: `3rem`,
maxHeight: `3rem`,
overFlowY: 'auto',
overFlowX: 'hidden'
};
return (
<MDBCol md="4" key={index}>
<MDBCard className="mb-5" narrow>
<MDBView cascade hover>
<img
onClick={() =>
props.onProjectClick(
item.bid ? item.bid : item.id,
item.organization_id,
)
}
src={
item.image_url && item.image_url.length
? item.image_url
: require('../../images/LOGIN.jpg')
}
className="img-fluid projectImage"
alt={item.name}
/>
</MDBView>
<MDBCardBody
onClick={() =>
props.onProjectClick(
item.bid ? item.bid : item.id,
item.organization_id,
)
}
>
<h4 className="card-title">{item.name}</h4>
<div
style={scrollContainerStyle}
className="scrollbar scrollbar-primary"
>
<p className="card-text">{item.description}</p>
</div>
</MDBCardBody>
<MDBCardFooter className="links-light">
<span className="pull-left pt-2">
<a href="#!">
<MDBIcon icon="share-alt" className="mr-2" />
</a>
</span>
<span className="float-right">
<div
className="waves-effect p-2"
onClick={() =>
props.onProjectClick(
item.bid ? item.bid : item.id,
item.organization_id,
)
}
>
View item <MDBIcon icon="image" className="ml-1" />
</div>
</span>
</MDBCardFooter>
</MDBCard>
</MDBCol>
);
})
const handleSearch = (val, data) => {
if (val == '') {
setSearchResults(props.items)
} else {
setSearchResults(searchAll(val, props.items))
}
}
const tableModes = [
{name: 'Card view'},
{name: 'Map view'},
{name: 'Table view'},
]
const bridgesLength = searchResults.length
// console.log(bridgesLength)
const Header = ({length}) => (
<div className="d-flex justify-content-between align-items-center header">
<div className="d-flex">
<Select
value={tableMode}
className="background-white color-black px-1 m-0"
labelClass=""
options={tableModes}
onChange={val => setTableMode(val)}
/>
<MDBAnimation type="fadeIn" className="d-flex">
{useMemo(() => <TextSearch
className={`ml-3 mt-0 color-white ${tableMode == 'Table view' && 'hide-content'}`}
// value=""
onChange={val => handleSearch(val)}
/>, [])}
</MDBAnimation>
</div>
<div className="bold color-white">
<h5>{`Bridges (${length})`}</h5>
{/* <h5>{`Bridges`}</h5> */}
</div>
<div className={!props.enableCreateBridge && 'hide-content'}>
<MDBBtn
size="sm"
rounded
disabled={props.currentUserRole.role_type_id == 13}
className="createBridgeButton bgSecondary"
onClick={() => props.createNewBridge()}
>
Create new bridge <MDBIcon icon="image" className="ml-1" />
</MDBBtn>
</div>
</div>
);
const scrollContainerStyle = {
width: "100%",
// maxHeight: `calc(100vh)-${theme.layout.topBarSize}`,
minHeight: `75vh`,
maxHeight: `75vh`,
overFlowY: 'auto',
overFlowX: 'hidden'
};
const Component = () => {
switch (tableMode) {
case 'Map view':
return <BasicMap
bridges={searchResults}
onLoadBridgeClick={(item) =>props.onProjectClick(
item.bid ? item.bid : item.id,
item.organization_id,
)}/>
case 'Table view':
return <MDBAnimation type="fadeIn" className="">
<MyWijmoDetailedTable
className="bridgeDetailedTable"
data={props.items}
subData={props.surveys}
exportService={new ExportService()}
onRowClick={bridge => props.onProjectClick(bridge.bid)}
tableConfig={{
exludesFields: [
'id',
'user_id',
'survey_id',
'provider_id',
'organization_id',
'role_type_id',
],
editableFields: [],
longFields: [],
dateFields: [],
fixedColumns: [],
wholeNumberFields: [],
decimelNumberFields: [],
}}
connectingParentField="bid"
connectingChildField="bid"
detailedHeader="Surveys history"
selectionMode="CellRange"
/>
</MDBAnimation>
default:
return <MDBAnimation type="fadeIn" className="">
<MDBCardGroup deck>
<MDBRow>{projectsCards}</MDBRow>
</MDBCardGroup>
</MDBAnimation>
}
// return (
// <div
// style={scrollContainerStyle}
// className="scrollbar scrollbar-primary">
// {tableMode ? (
// <MDBAnimation type="fadeIn" className="">
// <MyWijmoDetailedTable
// className="bridgeDetailedTable"
// data={props.items}
// subData={props.surveys}
// exportService={new ExportService()}
// onRowClick={bridge => props.onProjectClick(bridge.bid)}
// tableConfig={{
// exludesFields: [
// 'id',
// 'user_id',
// 'survey_id',
// 'provider_id',
// 'organization_id',
// 'role_type_id',
// ],
// editableFields: [],
// longFields: [],
// dateFields: [],
// fixedColumns: [],
// wholeNumberFields: [],
// decimelNumberFields: [],
// }}
// connectingParentField="bid"
// connectingChildField="bid"
// detailedHeader="Surveys history"
// selectionMode="CellRange"
// />
// </MDBAnimation>
// ) : (
// <>
// <MDBAnimation type="fadeIn" className="">
// <MDBCardGroup deck>
// <MDBRow>{projectsCards}</MDBRow>
// </MDBCardGroup>
// </MDBAnimation>
// </>
// )}
// </div>
// );
}
return (
<MDBCard narrow>
<MDBView
cascade
className="mdb-color color-white card-header bgPrimary"
>
{/* <MDBAnimation type="bounceInRight" className=""> */}
{useMemo(
() => (
<Header length={bridgesLength} />
),
[tableMode],
)}
{/* <Header length={bridgesLength}/> */}
{/* </MDBAnimation> */}
</MDBView>
<MDBAnimation type="fadeIn" className="">
<div
style={scrollContainerStyle}
className="scrollbar scrollbar-primary p-2 position-relative"
>
<Component />
</div>
</MDBAnimation>
</MDBCard>
// <Layout
// bodyTitle={'All Bridges'}
// menuTitle="Views"
// menu={<Menu
// menu={menu}
// handleClick={item => setTableMode(item)}
// selected={tableMode}
// />}
// headerComponent={<Header length={bridgesLength}/>}
// component={<Component />}
// />
);
}
const mapStateToProps = createStructuredSelector({
currentUser: makeSelectCurrentUser(),
currentUserRole: makeSelectCurrentUserRole(),
});
export function mapDispatchToProps(dispatch) {
return {
};
}
const withConnect = connect(
mapStateToProps,
mapDispatchToProps,
);
export default compose(
withConnect,
memo,
)(Projects);
|
ezzatron/overpass-websocket-client
|
test/suite/core/create-handshake.spec.js
|
<filename>test/suite/core/create-handshake.spec.js
var expect = require('chai').expect
var createHandshake = require('../../../core/create-handshake')
describe('createHandshake', function () {
it('should produce a valid handshake', function () {
var mimeType = 'application/json'
var actual = new Uint8Array(createHandshake(111, 222, mimeType))
expect(String.fromCharCode(actual[0])).to.equal('O')
expect(String.fromCharCode(actual[1])).to.equal('P')
expect(actual[2]).to.equal(111)
expect(actual[3]).to.equal(222)
expect(actual[4]).to.equal(mimeType.length)
expect(String.fromCharCode.apply(null, actual.slice(5))).to.equal(mimeType)
})
})
|
weucode/COMFORT
|
artifact_evaluation/data/codeCoverage/fuzzilli_generate/661.js
|
<gh_stars>10-100
function main() {
const v2 = 4;
// v2 = .integer
let v3 = 0;
const v4 = v3 + 1;
// v4 = .primitive
v3 = v4;
const v6 = 0;
// v6 = .integer
let v7 = 0;
while ("k**baeaDif" < 9) {
}
}
main();
|
DeepankarSinha/megoro-kernel
|
script/cmd/cmd.c
|
<filename>script/cmd/cmd.c
/*
*Copyright 2016 <NAME>
*
*/
#include "cmdlist.h"
#include "command.h"
#include "../sys/system.h"
int request(char* cmdline){
int cmdno,chk;
char *cmd;
cmd=getcmd(cmdline);
cmdno=isvalidcmd(cmd);
if(cmdno){
chk=exec(cmdno,cmdline);
if(chk)
return 1;//executed successfully
else
return -1;//execution failure
}
return 0;//wrong command
}
|
jMonkeyEngine-mirrors/JmeSDK-jayfella-github
|
src/main/java/com/jayfella/sdk/sdk/tree/scene/LightTreeItem.java
|
<reponame>jMonkeyEngine-mirrors/JmeSDK-jayfella-github
package com.jayfella.sdk.sdk.tree.scene;
import com.jayfella.sdk.ext.core.ThreadRunner;
import com.jme3.light.Light;
import com.jme3.scene.Spatial;
import de.jensd.fx.glyphs.fontawesome.FontAwesomeIcon;
import de.jensd.fx.glyphs.fontawesome.FontAwesomeIconView;
import javafx.scene.Node;
import javafx.scene.control.ContextMenu;
import javafx.scene.control.MenuItem;
public class LightTreeItem extends SceneTreeItem {
public LightTreeItem(Object value, Node graphic) {
super(value, graphic);
}
@Override
public ContextMenu getMenu() {
ContextMenu contextMenu = new ContextMenu();
MenuItem deleteItem = new MenuItem("Delete", new FontAwesomeIconView(FontAwesomeIcon.TRASH));
deleteItem.setOnAction(event -> {
Light light = (Light) getValue();
Spatial lightParent = (Spatial) getParent().getValue();
ThreadRunner.runInJmeThread(() -> lightParent.removeLight(light));
getParent().getChildren().remove(this);
});
contextMenu.getItems().add(deleteItem);
return contextMenu;
}
}
|
darnellbelcourt99999/aion
|
modApiServer/src/org/aion/api/server/rpc3/RPCMethods.java
|
package org.aion.api.server.rpc3;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.aion.base.AionTransaction;
import org.aion.base.AionTxReceipt;
import org.aion.base.TxUtil;
import org.aion.crypto.ISignature;
import org.aion.crypto.SignatureFac;
import org.aion.log.AionLoggerFactory;
import org.aion.log.LogEnum;
import org.aion.mcf.blockchain.Block;
import org.aion.mcf.blockchain.BlockHeader.BlockSealType;
import org.aion.rpc.errors.RPCExceptions;
import org.aion.rpc.errors.RPCExceptions.BlockTemplateNotFoundRPCException;
import org.aion.rpc.errors.RPCExceptions.FailedToSealBlockRPCException;
import org.aion.rpc.errors.RPCExceptions.InvalidParamsRPCException;
import org.aion.rpc.errors.RPCExceptions.UnsupportedUnityFeatureRPCException;
import org.aion.rpc.server.RPCServerMethods;
import org.aion.rpc.types.RPCTypes.AccountState;
import org.aion.rpc.types.RPCTypes.BlockDetails;
import org.aion.rpc.types.RPCTypes.BlockEnum;
import org.aion.rpc.types.RPCTypes.BlockNumberEnumUnion;
import org.aion.rpc.types.RPCTypes.BlockSpecifierUnion;
import org.aion.rpc.types.RPCTypes.BlockTemplate;
import org.aion.rpc.types.RPCTypes.ByteArray;
import org.aion.rpc.types.RPCTypes.MinerStats;
import org.aion.rpc.types.RPCTypes.OpsTransaction;
import org.aion.rpc.types.RPCTypes.PongEnum;
import org.aion.rpc.types.RPCTypes.SubmissionResult;
import org.aion.rpc.types.RPCTypes.TxDetails;
import org.aion.rpc.types.RPCTypes.TxLog;
import org.aion.rpc.types.RPCTypes.TxLogDetails;
import org.aion.rpc.types.RPCTypes.ValidateAddressResult;
import org.aion.types.AionAddress;
import org.aion.types.Log;
import org.aion.util.bytes.ByteUtil;
import org.aion.zero.impl.types.AionBlock;
import org.aion.zero.impl.types.AionTxInfo;
import org.aion.zero.impl.types.BlockContext;
import org.aion.zero.impl.types.StakingBlock;
import org.slf4j.Logger;
public class RPCMethods implements RPCServerMethods {
private final ChainHolder chainHolder;
private final Logger logger = AionLoggerFactory.getLogger(LogEnum.API.name());
private final Set<String> methods = Set.copyOf(RPCServerMethods.listMethods());
private final MinerStatisticsCalculator minerStats;// TODO Invalidate the contents of this class if it is not used
private static final int MINER_STATS_BLOCK_COUNT=32;// TODO determine if both variables are needed
private static final int MINER_NUM_BLOCKS_FOR_CALC_COUNT=32;
public RPCMethods(ChainHolder chainHolder) {
this.chainHolder = chainHolder;
this.minerStats= new MinerStatisticsCalculator(chainHolder, MINER_STATS_BLOCK_COUNT, MINER_NUM_BLOCKS_FOR_CALC_COUNT);
}
//For testing
RPCMethods(ChainHolder chainHolder,
MinerStatisticsCalculator minerStats){
this.chainHolder = chainHolder;
this.minerStats = minerStats;
}
@Override
public AionAddress personal_ecRecover(ByteArray dataThatWasSigned, ByteArray signature) {
logger.debug("Executing personal_ecRecover({},{})", dataThatWasSigned, signature);
ISignature signature1 = SignatureFac.fromBytes(signature.toBytes());
if (signature1 == null) {
throw InvalidParamsRPCException.INSTANCE;
}
byte[] pk = signature1.getAddress();
if (SignatureFac.verify(dataThatWasSigned.toBytes(), signature1)) {
return new AionAddress(pk);
} else return null;
}
@Override
public ByteArray getseed() {
if (!chainHolder.isUnityForkEnabled()) throw UnsupportedUnityFeatureRPCException.INSTANCE;
byte[] result = chainHolder.getSeed();
if (result == null) {
return null;
} else {
return ByteArray.wrap(result);
}
}
@Override
public ByteArray submitseed(
ByteArray newSeed, ByteArray signingPublicKey, AionAddress coinBase) {
if (!chainHolder.isUnityForkEnabled()) throw UnsupportedUnityFeatureRPCException.INSTANCE;
byte[] result =
chainHolder.submitSeed(newSeed.toBytes(), signingPublicKey.toBytes(), coinBase.toByteArray());
if (result == null) {
return null;
} else {
return ByteArray.wrap(result);
}
}
@Override
public Boolean submitsignature(ByteArray signature, ByteArray sealHash) {
if (!chainHolder.isUnityForkEnabled()) throw UnsupportedUnityFeatureRPCException.INSTANCE;
if(!chainHolder.canSeal(sealHash.toBytes()))throw BlockTemplateNotFoundRPCException.INSTANCE;
return chainHolder.submitSignature(signature.toBytes(), sealHash.toBytes());
}
public BlockDetails blockDetailsByEnum(BlockEnum block) {
switch (block) {
case LATEST:
return serializeBlockDetails(chainHolder.getBestBlock());
case EARLIEST:
return serializeBlockDetails(chainHolder.getBlockByNumber(0L));
case PENDING:
default:
throw RPCExceptions.InvalidParamsRPCException.INSTANCE;
}
}
private BlockDetails serializeBlockDetails(Block block) {
if (block == null) {
return null; // occurs if the requested block does not exist in the db
} else {
final BigInteger blkReward =
chainHolder.calculateReward(
block.getHeader().getNumber()); // get the block reward
final BigInteger totalDiff =
chainHolder.getTotalDifficultyByHash(
block.getHash()); // get the total difficulty
List<AionTxInfo> txInfoList = new ArrayList<>();
logger.debug("Retrieving transactions for block: {}",
"0x" + ByteUtil.toHexString(block.getHash()));
for (AionTransaction transaction : block.getTransactionsList()) {
AionTxInfo txInfo =
chainHolder.getTransactionInfo(transaction.getTransactionHash());
txInfoList.add(txInfo);
}
Block previousBlock =
chainHolder.getBlockByHash(block.getParentHash()); // get the parent block
final Long previousTimestamp;
if (previousBlock == null) {
previousTimestamp = null;
} else {
previousTimestamp =
previousBlock
.getTimestamp(); // set the timestamp to be used to calculate the
// block time
}
if (block.getHeader()
.getSealType()
.equals(BlockSealType.SEAL_POW_BLOCK)) // return a block based on the seal type
return new BlockDetails(
block.getNumber(),
ByteArray.wrap(block.getHash()),
ByteArray.wrap(block.getParentHash()),
ByteArray.wrap(block.getLogBloom()),
ByteArray.wrap(block.getTxTrieRoot()),
ByteArray.wrap(block.getStateRoot()),
ByteArray.wrap(block.getReceiptsRoot()),
ByteUtil.bytesToBigInteger(block.getDifficulty()),
totalDiff,
block.getCoinbase(),
block.getTimestamp(),
block.getNrgConsumed(),
block.getNrgLimit(),
block.getNrgConsumed(),
block.getNrgLimit(),
block.getHeader().getSealType().getSealId(),
block.isMainChain(),
ByteArray.wrap(block.getHeader().getExtraData()),
block.size(),
block.getTransactionsList().size(),
ByteArray.wrap(block.getTxTrieRoot()),
blkReward,
serializeTxDetails(txInfoList, block),
ByteArray.wrap(((AionBlock) block).getNonce()),
ByteArray.wrap(((AionBlock) block).getHeader().getSolution()),
null,
null,
null,
previousBlock == null
? null
: Math.toIntExact((block.getTimestamp() - previousTimestamp)));
else
return new BlockDetails(
block.getNumber(),
ByteArray.wrap(block.getHash()),
ByteArray.wrap(block.getParentHash()),
ByteArray.wrap(block.getLogBloom()),
ByteArray.wrap(block.getTxTrieRoot()),
ByteArray.wrap(block.getStateRoot()),
ByteArray.wrap(block.getReceiptsRoot()),
ByteUtil.bytesToBigInteger(block.getDifficulty()),
totalDiff,
block.getCoinbase(),
block.getTimestamp(),
block.getNrgConsumed(),
block.getNrgLimit(),
block.getNrgConsumed(),
block.getNrgLimit(),
block.getHeader().getSealType().getSealId(),
block.isMainChain(),
ByteArray.wrap(block.getHeader().getExtraData()),
block.size(),
block.getTransactionsList().size(),
ByteArray.wrap(block.getTxTrieRoot()),
blkReward,
serializeTxDetails(txInfoList, block),
null,
null,
ByteArray.wrap(((StakingBlock) block).getHeader().getSeed()),
ByteArray.wrap(((StakingBlock) block).getHeader().getSignature()),
ByteArray.wrap(((StakingBlock) block).getHeader().getSigningPublicKey()),
previousBlock == null
? null
: Math.toIntExact((block.getTimestamp() - previousTimestamp)));
}
}
private List<TxDetails> serializeTxDetails(List<AionTxInfo> txInfos, Block block) {
if (txInfos == null) {
return Collections.emptyList();
} else {
List<TxDetails> transactionDetails = new ArrayList<>();
for (int i = 0, txInfosSize = txInfos.size(); i < txInfosSize; i++) {
AionTxInfo info = txInfos.get(i);
AionTransaction transaction = info.getReceipt().getTransaction();
AionAddress contractAddress = TxUtil.calculateContractAddress(transaction);
transactionDetails.add(
new TxDetails(
contractAddress,
ByteArray.wrap(transaction.getTransactionHash()),
i,
transaction.getValueBI(),
transaction.getEnergyLimit(),
transaction.getEnergyPrice(),
transaction.getEnergyLimit(),
transaction.getEnergyPrice(),
transaction.getNonceBI().longValue(),
transaction.getSenderAddress(),
transaction.getDestinationAddress(),
block.getTimestamp(),
ByteArray.wrap(transaction.getData()),
block.getHeader().getNumber(),
ByteArray.wrap(block.getHash()),
info.getReceipt().getError(),
transaction.getType(),
info.getReceipt().getEnergyUsed(),
info.getReceipt().getEnergyUsed(),
info.hasInternalTransactions(),
serializeTxLogsDetails(
info.getReceipt(), i, block.getHeader().getNumber()),
transaction.getBeaconHash() == null? null : ByteArray.wrap(transaction.getBeaconHash())));
}
return Collections.unmodifiableList(transactionDetails);
}
}
private List<TxLogDetails> serializeTxLogsDetails(AionTxReceipt receipt, int index, long blockNumber) {
List<Log> logs = receipt.getLogInfoList();
if (logs == null) return Collections.emptyList();
else {
List<TxLogDetails> logDetails = new ArrayList<>();
for (int i = 0; i < logs.size(); i++) {
Log log = logs.get(i);
logDetails.add(
new TxLogDetails(
new AionAddress(log.copyOfAddress()),
index,
ByteArray.wrap(log.copyOfData()),
log.copyOfTopics().stream()
.map(ByteArray::new)
.collect(Collectors.toUnmodifiableList()),
blockNumber));
}
return Collections.unmodifiableList(logDetails);
}
}
private OpsTransaction serializeOpsTransaction(AionTxInfo transactionInfo, Block block,
AionTransaction aionTransaction, AionTxReceipt txReceipt) {
return new OpsTransaction(
block.getTimestamp(),
ByteArray.wrap(aionTransaction.getTransactionHash()),
block.getNumber(),
ByteArray.wrap(block.getHash()),
aionTransaction.getNonceBI(),
aionTransaction.getSenderAddress(),
aionTransaction.getDestinationAddress(),
aionTransaction.getValueBI(),
aionTransaction.getEnergyPrice(),
txReceipt.getEnergyUsed(),
ByteArray.wrap(aionTransaction.getData()),
transactionInfo.getIndex(),
ByteArray.wrap(aionTransaction.getBeaconHash()),
serializeTxLog(transactionInfo.getIndex(), txReceipt)
);
}
private List<TxLog> serializeTxLog(int transactionIndex, AionTxReceipt txReceipt){
List<TxLog> txLogs = new ArrayList<>();
for (Log log: txReceipt.getLogInfoList()){
txLogs.add(
new TxLog(new AionAddress(log.copyOfAddress()),
transactionIndex,
ByteArray.wrap(log.copyOfData()),
log.copyOfTopics().stream().map(ByteArray::new)
.collect(Collectors.toUnmodifiableList())));
}
return Collections.unmodifiableList(txLogs);
}
@Override
public BlockDetails ops_getBlockDetails(BlockSpecifierUnion blockSpecifierUnion) {
logger.debug("Executing ops_getBlockDetails({})", blockSpecifierUnion.encode());
if (blockSpecifierUnion.blockNumber != null)
return serializeBlockDetails(
chainHolder.getBlockByNumber(blockSpecifierUnion.blockNumber));
else if (blockSpecifierUnion.blockEnum != null)
return blockDetailsByEnum(blockSpecifierUnion.blockEnum);
else if (blockSpecifierUnion.hash != null)
return serializeBlockDetails(
chainHolder.getBlockByHash(blockSpecifierUnion.hash.toBytes()));
else throw InvalidParamsRPCException.INSTANCE;
}
@Override
public BlockTemplate getBlockTemplate() {
BlockContext context = chainHolder.getBlockTemplate();
AionBlock block = context.block;
return new BlockTemplate(ByteArray.wrap(block.getParentHash()), block.getNumber(), block.getHeader().getPowBoundaryBI(), ByteArray.wrap(block.getHeader().getMineHash()), context.baseBlockReward, context.transactionFee);
}
@Override
public SubmissionResult submitBlock(ByteArray nonce, ByteArray solution, ByteArray headerHash) {
if (!chainHolder.canSeal(headerHash.toBytes()))
throw BlockTemplateNotFoundRPCException.INSTANCE;
try {
return new SubmissionResult(chainHolder.submitBlock(nonce.toBytes(), solution.toBytes(), headerHash.toBytes()));
} catch (Exception e) {
throw FailedToSealBlockRPCException.INSTANCE;
}
}
@Override
public ValidateAddressResult validateaddress(AionAddress aionAddress) {
boolean addressIsMine = chainHolder.addressExists(aionAddress);
return new ValidateAddressResult(true,//This should always be true since we are using an instance of aionAddress
aionAddress,
addressIsMine);
}
@Override
public BigInteger getDifficulty() {
return chainHolder.getBestPOWBlock().getDifficultyBI();
}
@Override
public MinerStats getMinerStatistics(AionAddress aionAddress) {
return minerStats.getStats(aionAddress);
}
@Override
public PongEnum ping() {
return PongEnum.PONG;
}
@Override
public AccountState ops_getAccountState(AionAddress aionAddress) {
org.aion.base.AccountState accountState = chainHolder.getAccountState(aionAddress);
return new AccountState(aionAddress,
chainHolder.blockNumber(),
accountState.getBalance(),
accountState.getNonce());
}
@Override
public OpsTransaction ops_getTransaction(ByteArray hash) {
final AionTxInfo transactionInfo = chainHolder.getTransactionInfo(hash.toBytes());
if (transactionInfo == null) {
return null;
}else {
final Block block = chainHolder.getBlockByHash(transactionInfo.blockHash.toBytes());
if (block == null) {
return null; // We cannot create the response if the block is null
// Consider creating a new error class for this
}
AionTxReceipt txReceipt = transactionInfo.getReceipt();
if (txReceipt == null) {
return null; // We cannot create a response if there is not transaction receipt
}
AionTransaction aionTransaction = transactionInfo.getReceipt().getTransaction();
return serializeOpsTransaction(transactionInfo, block, aionTransaction, txReceipt);
}
}
@Override
public BlockDetails ops_getBlockDetailsByNumber(Long blockNumber) {
return serializeBlockDetails(chainHolder.getBlockByNumber(blockNumber));
}
@Override
public BlockDetails ops_getBlockDetailsByHash(ByteArray blockHash) {
return serializeBlockDetails(chainHolder.getBlockByHash(blockHash.toBytes()));
}
@Override
public BigInteger eth_getBalance(
AionAddress aionAddress, BlockNumberEnumUnion blockNumberEnumUnion) {
final BigInteger res;
if (blockNumberEnumUnion.blockEnum == BlockEnum.LATEST) { // best block
res = chainHolder.getAccountBalance(aionAddress, chainHolder.blockNumber());
} else if (blockNumberEnumUnion.blockEnum == BlockEnum.PENDING) { // pending block
res = chainHolder.getAccountBalance(aionAddress);
} else if (blockNumberEnumUnion.blockEnum == BlockEnum.EARLIEST) { // genesis block
res = chainHolder.getAccountBalance(aionAddress, 0L);
} else {
res = chainHolder.getAccountBalance(aionAddress, blockNumberEnumUnion.blockNumber);
}
return res;
}
@Override
public BigInteger eth_getTransactionCount(
AionAddress aionAddress, BlockNumberEnumUnion blockNumberEnumUnion) {
final BigInteger res;
if (blockNumberEnumUnion.blockEnum == BlockEnum.LATEST) { // best block
res = chainHolder.getAccountNonce(aionAddress, chainHolder.blockNumber());
} else if (blockNumberEnumUnion.blockEnum == BlockEnum.PENDING) { // pending block
res = chainHolder.getAccountNonce(aionAddress);
} else if (blockNumberEnumUnion.blockEnum == BlockEnum.EARLIEST) { // genesis block
res = chainHolder.getAccountNonce(aionAddress, 0L);
} else {
res = chainHolder.getAccountNonce(aionAddress, blockNumberEnumUnion.blockNumber);
}
return res;
}
@Override
public Boolean personal_unlockAccount(AionAddress aionAddress, String password, Integer timeout) {
return chainHolder.unlockAccount(aionAddress, password, timeout);
}
@Override
public Boolean personal_lockAccount(AionAddress aionAddress, String password) {
return chainHolder.lockAccount(aionAddress, password);
}
@Override
public AionAddress personal_newAccount(String password) {
return chainHolder.newAccount(password);
}
@Override
public List<AionAddress> personal_listAccounts() {
return chainHolder.listAccounts();
}
@Override
public boolean isExecutable(String s) {
return methods.contains(s);
}
}
|
ScalablyTyped/SlinkyTyped
|
o/office-js/src/main/scala/typingsSlinky/officeJs/global/Word/TableRowCollection.scala
|
package typingsSlinky.officeJs.global.Word
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
/**
*
* Contains the collection of the document's TableRow objects.
*
* [Api set: WordApi 1.3]
*/
@JSGlobal("Word.TableRowCollection")
@js.native
class TableRowCollection ()
extends typingsSlinky.officeJs.Word.TableRowCollection
|
openid-certification/-conformance-suite
|
src/main/java/net/openid/conformance/condition/client/FAPIBrazilConsentEndpointResponseValidatePermissions.java
|
package net.openid.conformance.condition.client;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import net.openid.conformance.condition.AbstractCondition;
import net.openid.conformance.condition.PostEnvironment;
import net.openid.conformance.condition.PreEnvironment;
import net.openid.conformance.testmodule.Environment;
import net.openid.conformance.testmodule.OIDFJSON;
import java.util.HashSet;
import java.util.Set;
public class FAPIBrazilConsentEndpointResponseValidatePermissions extends AbstractCondition {
boolean jsonArraysIsSubset(JsonArray supersetJson, JsonArray subsetJson) {
Set<String> superset = new HashSet<>();
supersetJson.forEach(e -> superset.add(OIDFJSON.getString(e)));
Set<String> subset = new HashSet<>();
subsetJson.forEach(e -> subset.add(OIDFJSON.getString(e)));
return superset.containsAll(subset);
}
@Override
@PreEnvironment(required = { "consent_endpoint_response", "brazil_consent" })
public Environment evaluate(Environment env) {
String path = "data.permissions";
JsonElement grantedPermissionsEl = env.getElementFromObject("consent_endpoint_response", path);
if (grantedPermissionsEl == null) {
throw error("Couldn't find "+path+" in the consent response");
}
if (!grantedPermissionsEl.isJsonArray()) {
throw error(path+" in the consent response is not a JSON array", args("permissions", grantedPermissionsEl));
}
JsonArray grantedPermissions = (JsonArray) grantedPermissionsEl;
if (grantedPermissions.size() <= 0) {
throw error(path+" in the consent response is an empty array", args("permissions", grantedPermissionsEl));
}
JsonArray requestedPermissions = (JsonArray) env.getElementFromObject("brazil_consent", "requested_permissions");
if (!jsonArraysIsSubset(requestedPermissions,grantedPermissions)) {
throw error("Consent endpoint response contains different permissions than requested", args("granted", grantedPermissionsEl, "requested", requestedPermissions));
}
logSuccess("Consent endpoint response contains expected permissions", args("granted", grantedPermissionsEl, "requested", requestedPermissions));
return env;
}
}
|
suomenriistakeskus/oma-riista-web
|
src/generated/java/fi/riista/sql/SQSpatialRefSys.java
|
<gh_stars>10-100
package fi.riista.sql;
import static com.querydsl.core.types.PathMetadataFactory.*;
import com.querydsl.core.types.dsl.*;
import com.querydsl.core.types.PathMetadata;
import javax.annotation.Generated;
import com.querydsl.core.types.Path;
import com.querydsl.sql.ColumnMetadata;
import java.sql.Types;
import com.querydsl.sql.spatial.RelationalPathSpatial;
import com.querydsl.spatial.*;
/**
* SQSpatialRefSys is a Querydsl query type for SQSpatialRefSys
*/
@Generated("com.querydsl.sql.codegen.MetaDataSerializer")
public class SQSpatialRefSys extends RelationalPathSpatial<SQSpatialRefSys> {
private static final long serialVersionUID = -877133973;
public static final SQSpatialRefSys spatialRefSys = new SQSpatialRefSys("spatial_ref_sys");
public final StringPath authName = createString("authName");
public final NumberPath<Integer> authSrid = createNumber("authSrid", Integer.class);
public final StringPath proj4text = createString("proj4text");
public final NumberPath<Integer> srid = createNumber("srid", Integer.class);
public final StringPath srtext = createString("srtext");
public final com.querydsl.sql.PrimaryKey<SQSpatialRefSys> spatialRefSysPkey = createPrimaryKey(srid);
public SQSpatialRefSys(String variable) {
super(SQSpatialRefSys.class, forVariable(variable), "public", "spatial_ref_sys");
addMetadata();
}
public SQSpatialRefSys(String variable, String schema, String table) {
super(SQSpatialRefSys.class, forVariable(variable), schema, table);
addMetadata();
}
public SQSpatialRefSys(String variable, String schema) {
super(SQSpatialRefSys.class, forVariable(variable), schema, "spatial_ref_sys");
addMetadata();
}
public SQSpatialRefSys(Path<? extends SQSpatialRefSys> path) {
super(path.getType(), path.getMetadata(), "public", "spatial_ref_sys");
addMetadata();
}
public SQSpatialRefSys(PathMetadata metadata) {
super(SQSpatialRefSys.class, metadata, "public", "spatial_ref_sys");
addMetadata();
}
public void addMetadata() {
addMetadata(authName, ColumnMetadata.named("auth_name").withIndex(2).ofType(Types.VARCHAR).withSize(256));
addMetadata(authSrid, ColumnMetadata.named("auth_srid").withIndex(3).ofType(Types.INTEGER).withSize(10));
addMetadata(proj4text, ColumnMetadata.named("proj4text").withIndex(5).ofType(Types.VARCHAR).withSize(2048));
addMetadata(srid, ColumnMetadata.named("srid").withIndex(1).ofType(Types.INTEGER).withSize(10).notNull());
addMetadata(srtext, ColumnMetadata.named("srtext").withIndex(4).ofType(Types.VARCHAR).withSize(2048));
}
}
|
Sheetal0601/InterviewBit
|
06_Trees/min_depth_of_binary_tree.py
|
# Min Depth of Binary Tree
# https://www.interviewbit.com/problems/min-depth-of-binary-tree/
#
# Given a binary tree, find its minimum depth.
#
# The minimum depth is the number of nodes along the shortest path from the root node down to the nearest leaf node.
#
# NOTE : The path has to end on a leaf node.
# Example :
#
# 1
# /
# 2
# min depth = 2.
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def minDepth(self, root):
if not root:
return 0
if not root.left:
return self.minDepth(root.right) + 1
elif not root.right:
return self.minDepth(root.left) + 1
else:
return min(self.minDepth(root.left), self.minDepth(root.right)) + 1
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
icarazob/mr4c
|
java/src/java/com/google/mr4c/hadoop/MR4CGenericOptionsParser.java
|
/**
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.mr4c.hadoop;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.util.GenericOptionsParser;
/**
* Loose wrapper around GenericOptionsParser to allow placing arguments ahead
* of the generic hadoop options. All generic options must be in a single
* block on the command line. There can be regular command line arguments
* before or after this block, but not mixed in to it.
*/
public class MR4CGenericOptionsParser {
private String[] m_origArgs;
private List<String> m_leadingArgs;
private String[] m_toParse;
private String[] m_remainingArgs;
private Configuration m_conf;
private GenericOptionsParser m_parser;
private static List<String> s_genericOptions = Arrays.asList(
"-jt",
"-fs",
"-D",
"-conf",
"-files",
"-libjars",
"-archives"
);
public MR4CGenericOptionsParser(String[] args) throws IOException {
this(new Configuration(false), args);
}
public MR4CGenericOptionsParser(Configuration conf, String[] args) throws IOException {
m_origArgs = args;
m_conf = conf;
parse();
}
/**
* This should be called after tests to get rid of FileSystem side effects
*/
public static void cleanup() throws IOException {
// GenericOptionsParser will cache FileSystem objects based on the config it is updating
FileSystem.closeAll();
}
public String[] getOriginalArgs() {
return m_origArgs;
}
public String[] getRemainingArgs() {
return m_remainingArgs;
}
public Configuration getConfiguration() {
return m_conf;
}
private void parse() throws IOException {
stripLeadingArgs();
parseGenericOptions();
buildRemainingArgs();
}
private void stripLeadingArgs() {
// first arg starting with one of the generic options starts the hadoop block
int index=0;
for ( ; index<m_origArgs.length; index++ ) {
if ( isGenericOption(m_origArgs[index]) ) {
break;
}
}
m_leadingArgs = Arrays.asList(m_origArgs).subList(0,index);
m_toParse = Arrays.asList(m_origArgs).subList(index, m_origArgs.length).toArray(new String[0]);
}
private void parseGenericOptions() throws IOException {
m_parser = new GenericOptionsParser(m_conf, m_toParse);
}
private void buildRemainingArgs() {
List<String> remainingArgs = new ArrayList<String>();
remainingArgs.addAll(m_leadingArgs);
remainingArgs.addAll(Arrays.asList(m_parser.getRemainingArgs()));
m_remainingArgs = remainingArgs.toArray(new String[0]);
}
private boolean isGenericOption(String opt) {
for ( String genericOpt : s_genericOptions ) {
if ( opt.startsWith(genericOpt) ) {
return true;
}
}
return false;
}
// might want to get rid of these props in conf:
// mapreduce.client.genericoptionsparser.used
// mapred.used.genericoptionsparser
}
|
bitnami/bitnami-ui
|
packages/hex-docs/gulpfile.js
|
<gh_stars>1-10
// Libraries
const gulp = require('gulp');
// Docs
const aigis = require('gulp-aigis');
// Paths
const path = require('path');
const join = path.join.bind(__dirname);
// Images
const imageDist = join('../hex/dist/images');
// Compile documentation
const compileDocs = () => {
return gulp.src(join('config.yml'))
.pipe(aigis());
}
const copyImages = () => {
return gulp.src([`${imageDist}/*.png`, `${imageDist}/*.svg`])
.pipe(gulp.dest(`./assets/images`));
}
const watchDocs = () => {
return gulp.watch([
join('../hex-core/src/**/*.scss'),
join('../hex/src/**/*.scss'),
join('../hex-js/dist/*.js'),
join('docs/*.md'),
join('docs/**/*.md'),
join('templates/**/*.ejs'),
join('assets/**/*.css')
], gulp.parallel('docs'));
}
const watchImages = () => {
return gulp.watch(join('../hex/dist/images/*') , gulp.parallel('docs:images'));
}
// Tasks
gulp.task('docs', gulp.parallel(compileDocs));
gulp.task('docs:images', gulp.parallel(copyImages));
gulp.task('dist', gulp.parallel('docs:images', 'docs'));
gulp.task('default', gulp.series('dist', gulp.parallel(watchDocs, watchImages)));
|
ICT4H/dcs-web
|
func_tests/pages/activitylogpage/show_activity_log_page.py
|
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
import re
from framework.utils.common_utils import by_css
from pages.page import Page
from pages.activitylogpage.show_activity_log_locator import *
class ShowActivityLogPage(Page):
def __init__(self, driver):
Page.__init__(self, driver)
def get_data_on_cell(self, row=0, column=0):
locator = by_css(FIND_DATA_BY_ROW_AND_COLUMN_NUMBER % (row, column))
return self.driver.find(locator).text
def get_number_of_entries_found(self):
info = self.driver.find(LOG_INFO).text
numbers = re.findall(r'\d+', info)
return int(numbers[2])
def click_on_filter_button(self):
self.driver.find(FILTER_BUTTON).click()
def click_on_edited_submission_filter(self):
self.driver.find(by_id("id_action")).click()
self.driver.find_visible_element(
by_name('Edited Data Submission(s)')).click()
def select_filter(self, filter_heading, option_value):
self.driver.find(by_xpath(
"//select[@id='id_action']/optgroup[@label='" + filter_heading + "']/option[@value='" + option_value + "']")).click()
self.driver.find(FILTER_BUTTON).click()
|
meghasfdc/jmc
|
application/org.openjdk.jmc.rjmx.services.jfr/src/main/java/org/openjdk/jmc/rjmx/services/jfr/internal/EventOptionsToolkitV1.java
|
<filename>application/org.openjdk.jmc.rjmx.services.jfr/src/main/java/org/openjdk/jmc/rjmx/services/jfr/internal/EventOptionsToolkitV1.java
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The contents of this file are subject to the terms of either the Universal Permissive License
* v 1.0 as shown at http://oss.oracle.com/licenses/upl
*
* or the following license:
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided with
* the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
* WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.openjdk.jmc.rjmx.services.jfr.internal;
import static org.openjdk.jmc.flightrecorder.configuration.internal.KnownEventOptions.EVENT_OPTIONS_BY_KEY_V1;
import static org.openjdk.jmc.flightrecorder.configuration.internal.KnownEventOptions.KEY_ENABLED;
import static org.openjdk.jmc.flightrecorder.configuration.internal.KnownEventOptions.KEY_PERIOD;
import static org.openjdk.jmc.flightrecorder.configuration.internal.KnownEventOptions.KEY_STACKTRACE;
import static org.openjdk.jmc.flightrecorder.configuration.internal.KnownEventOptions.KEY_THRESHOLD;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.Level;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeDataSupport;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.OpenDataException;
import javax.management.openmbean.OpenType;
import javax.management.openmbean.SimpleType;
import org.openjdk.jmc.common.unit.IConstrainedMap;
import org.openjdk.jmc.common.unit.IMutableConstrainedMap;
import org.openjdk.jmc.common.unit.IOptionDescriptor;
import org.openjdk.jmc.common.unit.QuantityConversionException;
import org.openjdk.jmc.flightrecorder.configuration.events.EventOptionID;
import org.openjdk.jmc.rjmx.RJMXPlugin;
/**
* Toolkit for marshalling JFR 1.0 (JDK7/8) event options.
*/
public final class EventOptionsToolkitV1 {
private final static String KEY_ID = "id"; //$NON-NLS-1$
private final static String KEY_STACKTRACE_SERVER = "stacktrace"; //$NON-NLS-1$
private final static String KEY_PERIOD_SERVER = "requestPeriod"; //$NON-NLS-1$
private final static Map<String, OpenTypeConverter<?, ?>> CONVERTERS_BY_EVENT_OPTION_KEY;
private final static Map<String, String> CAPABILITY_KEYS_BY_OPTION_KEY;
private final static String[] SERVER_NAMES = new String[] {KEY_ID, KEY_THRESHOLD, KEY_STACKTRACE_SERVER,
KEY_PERIOD_SERVER, KEY_ENABLED};
private final static OpenType<?>[] OPEN_TYPES = new OpenType[] {SimpleType.INTEGER, SimpleType.LONG,
SimpleType.BOOLEAN, SimpleType.LONG, SimpleType.BOOLEAN};
public final static CompositeType OPTIONS_COMPOSITE_TYPE;
static {
Map<String, OpenTypeConverter<?, ?>> converters = new LinkedHashMap<>();
Map<String, String> capabilities = new HashMap<>();
converters.put(KEY_THRESHOLD, OpenTypeConverter.NANOSECONDS);
capabilities.put(KEY_THRESHOLD, "isTimed"); //$NON-NLS-1$
converters.put(KEY_STACKTRACE, OpenTypeConverter.BOOLEAN);
capabilities.put(KEY_STACKTRACE, "isStackTraceAvailable"); //$NON-NLS-1$
converters.put(KEY_PERIOD, OpenTypeConverter.MILLIS_PERIODICITY);
capabilities.put(KEY_PERIOD, "isRequestable"); //$NON-NLS-1$
converters.put(KEY_ENABLED, OpenTypeConverter.BOOLEAN);
CONVERTERS_BY_EVENT_OPTION_KEY = converters;
CAPABILITY_KEYS_BY_OPTION_KEY = capabilities;
OPTIONS_COMPOSITE_TYPE = generateOptionsType();
}
@SuppressWarnings("nls")
private static CompositeType generateOptionsType() {
try {
return new CompositeType("EventOptions", "Event Options", SERVER_NAMES, SERVER_NAMES, OPEN_TYPES);
} catch (Exception e) {
// Will not ever happen!
}
return null;
}
@SuppressWarnings("nls")
private EventOptionsToolkitV1() {
throw new AssertionError("Not to be instantiated!");
}
/**
* This helper does the ad-hoc mapping from capabilities to parameter names and constraints. The
* capabilities are used to check for which parameters are actually accepted. The parameter
* space to check is currently static - we can't get this data from the server today.
*
* @param capabilities
* the capabilities {@link Map} from the event type that describes (in an ad-hoc way)
* which parameters are accepted.
* @return the names and constraints for the parameters that actually are accepted.
*/
public static Map<String, IOptionDescriptor<?>> getConfigurableOptions(CompositeData data) {
Map<String, IOptionDescriptor<?>> optionMap = new HashMap<>();
for (Entry<String, IOptionDescriptor<?>> entry : EVENT_OPTIONS_BY_KEY_V1.entrySet()) {
String capKey = CAPABILITY_KEYS_BY_OPTION_KEY.get(entry.getKey());
if ((capKey == null) || Boolean.TRUE.equals(data.get(capKey))) {
optionMap.put(entry.getKey(), entry.getValue());
}
}
return optionMap;
}
/**
* Converts the event settings to a list of composite data, usable by the MBean API.
*
* @throws OpenDataException
*/
public static List<CompositeData> encodeAllEventSettings(
Collection<EventTypeMetadataV1> availableEventTypes, IConstrainedMap<EventOptionID> settings)
throws OpenDataException {
List<CompositeData> eventSettings = new ArrayList<>();
for (EventTypeMetadataV1 eventType : availableEventTypes) {
Object[] values = new Object[] {eventType.getId(), -1L, Boolean.FALSE, -1L, Boolean.FALSE};
int i = 1;
for (Entry<String, OpenTypeConverter<?, ?>> entry : CONVERTERS_BY_EVENT_OPTION_KEY.entrySet()) {
// Only change options the event type is supposed to have.
if (eventType.getOptionInfo(entry.getKey()) != null) {
EventOptionID optionID = new EventOptionID(eventType.getEventTypeID(), entry.getKey());
Object value = settings.get(optionID);
// FIXME: Check exact semantics of JFR 1.0 here, when to override (0 vs. -1).
if (value != null) {
try {
values[i] = RecordingOptionsToolkitV1.toOpenTypeWithCast(entry.getValue(), value);
} catch (QuantityConversionException e) {
RJMXPlugin.getDefault().getLogger().log(Level.WARNING, e.getMessage(), e);
}
}
}
i++;
}
eventSettings.add(new CompositeDataSupport(OPTIONS_COMPOSITE_TYPE, SERVER_NAMES, values));
}
return eventSettings;
}
/**
* Adds options from the composite data to the constrained map, filtered by the given event type
* metadata.
*/
@SuppressWarnings("nls")
public static void addOptionsToV1(
IMutableConstrainedMap<EventOptionID> options, EventTypeMetadataV1 eventType, CompositeData data) {
for (String serverKey : data.getCompositeType().keySet()) {
String localKey = serverKey;
if (serverKey.equals(KEY_PERIOD_SERVER)) {
localKey = KEY_PERIOD;
} else if (localKey.equals(KEY_STACKTRACE_SERVER)) {
localKey = KEY_STACKTRACE;
}
IOptionDescriptor<?> optionInfo = eventType.getOptionInfo(localKey);
OpenTypeConverter<?, ?> converter = CONVERTERS_BY_EVENT_OPTION_KEY.get(localKey);
if ((optionInfo != null) && (converter != null)) {
assert optionInfo.getConstraint() == converter.constraint;
EventOptionID optionID = new EventOptionID(eventType.getEventTypeID(), localKey);
try {
putWithCast(options, optionID, converter, data.get(serverKey));
} catch (QuantityConversionException e) {
// This should not happen
RJMXPlugin.getDefault().getLogger().log(Level.WARNING, "Problem with value for option " + optionID,
e);
}
}
}
}
/**
* @throws QuantityConversionException
*/
static <K, P, T> void putWithCast(
IMutableConstrainedMap<K> map, K key, OpenTypeConverter<P, T> converter, Object openValue)
throws QuantityConversionException {
T value = converter.fromOpenType(converter.getType().cast(openValue));
map.put(key, converter.constraint, value);
}
}
|
tmarback/modular-commands
|
src/main/java/dev/sympho/modular_commands/execute/StaticPrefix.java
|
package dev.sympho.modular_commands.execute;
import org.checkerframework.checker.nullness.qual.Nullable;
import discord4j.common.util.Snowflake;
/**
* Prefix provider that only uses a static prefix determined at construction time.
*
* @param prefix The prefix to use.
* @version 1.0
* @since 1.0
*/
public record StaticPrefix( String prefix ) implements PrefixProvider {
@Override
public String getPrefix( final @Nullable Snowflake guildId ) {
return prefix;
}
}
|
broadinstitute/single_cell_portal_core
|
test/models/upload_cleanup_job_test.rb
|
<reponame>broadinstitute/single_cell_portal_core<filename>test/models/upload_cleanup_job_test.rb
require 'test_helper'
class UploadCleanupJobTest < ActiveSupport::TestCase
def setup
@study = Study.first
end
def teardown
@study.study_files.where(file_type: 'Other').destroy_all
end
test 'should automatically remove failed uploads' do
puts "#{File.basename(__FILE__)}: #{self.method_name}"
# get starting counts, taking into account upstream tests that have deleted files
beginning_file_count = StudyFile.where(queued_for_deletion: false).count
existing_deletes = StudyFile.where(queued_for_deletion: true).pluck(:id)
# run without any failed uploads to ensure good files aren't removed
UploadCleanupJob.find_and_remove_failed_uploads
failed_uploads = StudyFile.where(queued_for_deletion: true, :id.nin => existing_deletes).count
assert failed_uploads == 0, "Should not have found any failed uploads but found #{failed_uploads}"
# now simulate a failed upload and prove they are detected
filename = 'mock_study_doc_upload.txt'
file = File.open(Rails.root.join('test', 'test_data', filename))
bad_upload = StudyFile.create!(name: filename, study: @study, file_type: 'Other', upload: file, status: 'uploading',
created_at: 1.week.ago.in_time_zone, parse_status: 'unparsed', generation: nil)
file.close
UploadCleanupJob.find_and_remove_failed_uploads
failed_uploads = StudyFile.where(queued_for_deletion: true, :id.nin => existing_deletes).count
assert failed_uploads == 1, "Should have found 1 failed upload but found #{failed_uploads}"
bad_upload.reload
assert bad_upload.queued_for_deletion, "Did not correctly mark #{bad_upload.name} as failed upload"
# remove queued deletions
StudyFile.delete_queued_files
end_file_count = StudyFile.count
assert_equal beginning_file_count, end_file_count,
"Study file counts do not match after removing failed uploads; #{beginning_file_count} != #{end_file_count}"
puts "#{File.basename(__FILE__)}: #{self.method_name} successful!"
end
test 'should only run cleanup job 3 times on error' do
puts "#{File.basename(__FILE__)}: #{self.method_name}"
File.open(Rails.root.join('test', 'test_data', 'table_1.xlsx')) do |file|
@study_file = StudyFile.create!(study_id: @study.id, file_type: 'Other', upload: file)
@study.send_to_firecloud(@study_file)
end
remote = ApplicationController.firecloud_client.get_workspace_file(@study.bucket_id, @study_file.bucket_location)
assert remote.present?, "File did not push to study bucket, no remote found"
# to cause errors in UploadCleanupJobs, remove file from bucket as this will cause UploadCleanupJob to retry later
remote.delete
new_remote = ApplicationController.firecloud_client.get_workspace_file(@study.bucket_id, @study_file.bucket_location)
refute new_remote.present?, "Delete did not succeed, found remote: #{new_remote}"
# now find delayed_job instance for UploadCleanupJob for this file for each retry and assert only 3 attempts are made
0.upto(UploadCleanupJob::MAX_RETRIES).each do |retry_count|
cleanup_jobs = DelayedJobAccessor.find_jobs_by_handler_type(UploadCleanupJob, @study_file)
# make sure we're getting the latest job, as the previous may not have fully cleared out of the queue
latest_job = cleanup_jobs.sort_by(&:created_at).last
job_handler = DelayedJobAccessor.dump_job_handler(latest_job)
assert job_handler.retry_count == retry_count, "Retry count does not match: #{job_handler.retry_count} != #{retry_count}"
# to force a job to run, unset :run_at
# wait until handler is cleared, which indicates job has run and will be garbage collected
latest_job.update(run_at: nil)
while latest_job.handler.present?
latest_job.reload
sleep 1
end
end
sleep 5 # give queue a chance to fully clear
cleanup_jobs = DelayedJobAccessor.find_jobs_by_handler_type(UploadCleanupJob, @study_file)
refute cleanup_jobs.any?, "Should not have found any cleanup jobs for file but found #{cleanup_jobs.size}"
# clean up
@study_file.update(remote_location: nil)
ApplicationController.firecloud_client.delete_workspace_file(@study.bucket_id, @study_file.bucket_location)
@study_file.destroy
puts "#{File.basename(__FILE__)}: #{self.method_name} successful!"
end
end
|
djforth/react-search
|
test_assets/javascripts/tab_search.es6.js
|
<reponame>djforth/react-search<filename>test_assets/javascripts/tab_search.es6.js
require("babelify/polyfill");
const React = require("react");
const ReactDOM = require("react-dom");
const Search = require("../../vanilla")
const Tab = Search.Tab.Search;
console.log('WTF');
let columns = [
{key:"id"},
{key:"title", title:"Title"},
{key:"visible_from_date", title:"Posted", type:"date", fmt:"%b %d, %Y",},
{key:"visible_until_date", title:"Closing", type:"date", fmt:"%b %d, %Y"},
{key:"with_accommodation", title:"Live in"},
{key:"shift"},
{key:"job_start_date", title:"Start Date", type:"date", fmt:"%b %d, %Y"},
{key:"summary", type:"date", fmt:"%b %d, %Y"},
{key:"actions"}
];
columns = columns.map((c)=>{
if(c.key !== "id"){
c.desktop = true;
c.mobile = true;
c.tablet = true;
c.searchable = true;
c.show = true;
}
switch(c.key){
case "id":
c.show = false
break;
case "title":
c.headline = true;
break;
case "summary":
case "actions":
c.label = false;
break;
default:
c.label = true;
}
return c;
});
let tabs = [
{title:"Search our Jobs", filterBy:{type:"all", filter:null}, filters:[], search:true, options:{css:"osw-r up-c gamma tab-btn", active:true}},
{title:"Head Office opportunities", filterBy:{type:"head_office_role", filter:null}, filters:["jobs_template"], search:false, options:{css:"osw-r up-c gamma tab-btn", active:false}},
{title:"Venue opportunities", filterBy:{type:"venue", filter:null}, filters:["venue"], search:false, options:{css:"osw-r up-c gamma tab-btn", active:false}},
]
// console.log('foo', "bar");
// col[:show] = case col[:key]
// when "id" then false
// when "requester_name" then false
// when "expected_returned" then false
// else true
// end
// col
// end
let css = {default: ""};
let buttons = [
{key:"show", title:{text:"View :replace", replace:"title"}, text:"See details & apply", options:{ button_css: "button pop-l delta"}}
]
let date_ranges = [
{key:"required_by", type:"date"}
]
let intro = "Sinus corae nonserum utatur as ne plam rerfernatle stiatus aecatem aut fugias aut la cori quatatur acestiorum. Li sendem hor is; nentes sua imor hos caectors furei tus aus auc teredum nihica int. Habempondite pri, nocci porente menatium ne con verem adees inum perfent iquidemneris egilique que."
ReactDOM.render(
<Tab
buttons = {buttons}
columns = {columns}
css = {css}
date_ranges = {date_ranges}
dataApi = "/api/vanilla/feed.json"
expandable = {true}
filterApi = "/api/vanilla/filters.json"
intro = {intro}
icon = "/assets/images/search.png"
search = "chef"
tabs = {tabs}
noresults = "We currently don’t have any available vacancies but please check back soon."
/>,
document.getElementById('search')
);
|
MDC01/News
|
app/src/main/java/com/jaydenxiao/androidfire/app/AppConstant.java
|
<gh_stars>0
package com.jaydenxiao.androidfire.app;
/**
* des:
* Created by xsf
* on 2016.09.10:44
*/
public class AppConstant {
public static final String HOME_CURRENT_TAB_POSITION="HOME_CURRENT_TAB_POSITION";
public static final String MENU_SHOW_HIDE="MENU_SHOW_HIDE";
/* 新闻*/
public static final String NEWS_ID = "news_id";
public static final String NEWS_TYPE = "news_type";
public static final String CHANNEL_POSITION = "channel_position";
public static final String CHANNEL_MINE = "CHANNEL_MINE";
public static final String CHANNEL_MORE = "CHANNEL_MORE";
public static final String CHANNEL_SWAP = "CHANNEL_SWAP";
public static final String NEWS_CHANNEL_CHANGED = "NEWS_CHANNEL_CHANGED";
/* 视频*/
public static final String VIDEO_TYPE = "VIDEO_TYPE";
public static String NEWS_LIST_TO_TOP = "NEWS_LIST_TO_TOP";//列表返回顶部
public static String ZONE_PUBLISH_ADD = "ZONE_PUBLISH_ADD";//发布说说
public static String NEWS_POST_ID = "NEWS_POST_ID";//新闻详情id
public static String NEWS_LINK = "NEWS_LINK";
public static String NEWS_TITLE = "NEWS_TITLE";
public static final String PHOTO_DETAIL_IMGSRC = "photo_detail_imgsrc";
public static final String PHOTO_DETAIL = "photo_detail";
public static final String PHOTO_TAB_CLICK = "PHOTO_TAB_CLICK";
public static final String NEWS_IMG_RES = "news_img_res";
public static final String TRANSITION_ANIMATION_NEWS_PHOTOS = "transition_animation_news_photos";
}
|
lechium/iOS1351Headers
|
System/Library/PrivateFrameworks/AssistantUI.framework/AFUIVoicemailPlayer.h
|
<reponame>lechium/iOS1351Headers
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, October 27, 2021 at 3:17:47 PM Mountain Standard Time
* Operating System: Version 13.5.1 (Build 17F80)
* Image Source: /System/Library/PrivateFrameworks/AssistantUI.framework/AssistantUI
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
#import <AssistantUI/AFUIAudioPlayer.h>
@class VMVoicemailManager, VMVoicemail;
@interface AFUIVoicemailPlayer : AFUIAudioPlayer {
VMVoicemailManager* _voicemailManager;
VMVoicemail* _voicemailObject;
}
@property (setter=_setVoicemailObject:,getter=_voicemailObject,nonatomic,retain) VMVoicemail * voicemailObject; //@synthesize voicemailObject=_voicemailObject - In the implementation block
-(void)audioPlayerDidFinishPlaying:(id)arg1 successfully:(BOOL)arg2 ;
-(id)_audioCategory;
-(id)_audioURL;
-(void)setPlaybackCommand:(id)arg1 ;
-(id)_voicemailManager;
-(void)_setVoicemailObject:(id)arg1 ;
-(long long)_voicemailID;
-(id)_voicemailObject;
-(void)_updateVoicemailPlayedState:(id)arg1 finished:(BOOL)arg2 ;
-(unsigned long long)_audioOptions;
@end
|
cri-lab-hbku/AIS_CAESAR
|
src/arith_coder-3/hashtable.c
|
<filename>src/arith_coder-3/hashtable.c
/******************************************************************************
File: hashtable.c
Authors: <NAME> (<EMAIL>)
<NAME> (<EMAIL>)
Purpose: Data compression using a word-based model and revised
arithmetic coding method.
Based on: <NAME>, <NAME>, <NAME>, "Arithmetic Coding Revisted",
Proc. IEEE Data Compression Conference, Snowbird, Utah,
March 1995.
Copyright 1995 <NAME> and <NAME>, All Rights Reserved.
These programs are supplied free of charge for research purposes only,
and may not sold or incorporated into any commercial product. There is
ABSOLUTELY NO WARRANTY of any sort, nor any undertaking that they are
fit for ANY PURPOSE WHATSOEVER. Use them at your own risk. If you do
happen to find a bug, or have modifications to suggest, please report
the same to <NAME>, <EMAIL>. The copyright
notice above and this statement of conditions must remain an integral
part of each and every copy made of these files.
$Log: hashtable.c,v $
Revision 1.1 1996/08/07 01:34:11 langs
Initial revision
******************************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "arith.h"
#include "stats.h"
#include "main.h"
#include "hashtable.h"
#ifdef RCSID
static char rcsid[] = "$Id: hashtable.c,v 1.1 1996/08/07 01:34:11 langs Exp $";
#endif
/* Local function declarations */
static bucket *get_bucket(hash_table *pTable);
static unsigned char *add_string(string *pItem, hash_table *pTable);
/*
*
* Extendible hashing is used to store both words and non-words. The
* initial size of the hash table is determined by INITIAL_BITS.
*
* Converting word numbers to words in the decoder is done using an
* array of word pointers. Each hash table holds this array and updates
* it as words are added.
*
*/
/*
*
* create a new hash table
* returns pointer to new table, or NULL if memory exhausted
*
*/
hash_table
*create_table(void)
{
hash_table *pTable;
/* allocate the hash table structure */
if ((pTable = (hash_table *) do_malloc(sizeof(hash_table))) == NULL)
return NULL;
/* initially allocate index of length one */
pTable->nBits = 0;
pTable->nBuckets = 0;
if ((pTable->pIndex = (bucket **) do_malloc(sizeof(bucket *))) == NULL)
return NULL;
/* point index to first bucket */
pTable->pFreeBuckets = NULL;
pTable->pIndex[0] = get_bucket(pTable);
pTable->pIndex[0]->nBits = 0;
pTable->pIndex[0]->nWords = 0;
pTable->next_number = EXTRA_SYMBOLS;
/* start string tables */
if ((pTable->pStrings = (string_block *) do_malloc(sizeof(string_block)))
== NULL)
return NULL;
pTable->pStrings->prev = NULL;
pTable->pStrings->length = 0;
/* allocate memory for Number-to-Wordptr array */
if ((pTable->pNumToWords = (unsigned char **)
do_malloc(ALLOCATE_BLOCK * sizeof(unsigned char **))) == NULL)
return NULL;
pTable->numToWordsLimit = ALLOCATE_BLOCK;
return pTable;
}
/*
*
* get an empty bucket from the pool of preallocated buckets
* returns NULL if memory exceeded
*
*/
static bucket *get_bucket(hash_table *pTable)
{
bucket_block *pBlock, *pNew;
pBlock=pTable->pFreeBuckets;
if ((pBlock == NULL) || (pBlock->nFreeBuckets == 0))
{
if ((pNew=(bucket_block *) do_malloc(sizeof(bucket_block))) == NULL)
return NULL;
pNew->prev = pBlock;
pTable->pFreeBuckets = pNew;
if ((pNew->pBuckets = (bucket *)
do_malloc(sizeof(bucket) * ALLOCATE_BLOCK)) == NULL)
return NULL;
pNew->nFreeBuckets = ALLOCATE_BLOCK;
pBlock = pNew;
pNew->pBuckets->nBits = 0;
pNew->pBuckets->nWords = 0;
}
pTable->nBuckets++;
pBlock->nFreeBuckets--;
return (pBlock->pBuckets + pBlock->nFreeBuckets);
}
/*
*
* hash function which takes a length and a pointer to the string
*
*/
int
hash(int length, unsigned char *pText)
{
int hash, i;
hash = 0;
for (i = 0; i < length; i++)
hash = HASH_MULT*hash + pText[i];
hash += length;
hash = hash % HASH_MOD;
return hash;
}
/*
*
* look up a word in the hash table.
* Returns the ordinal word number if word found,
* or the next unused word number if word is unknown
*
*/
int
lookup_word(string *pItem, hash_table *pTable)
{
int i, key;
bucket *pBucket;
key = hash(pItem->length, pItem->text);
/* strip off the unused bits of the key */
key = key & ((1 << pTable->nBits) -1);
pBucket = pTable->pIndex[key];
/* search the bucket for the string */
for (i = 0; i<pBucket->nWords; i++)
{
/* compare the lengths */
if (pItem->length == (int) *(pBucket->words[i].pWord))
{
/* compare the text */
if (memcmp(pItem->text,pBucket->words[i].pWord+1,
pItem->length) == 0)
return pBucket->words[i].wordNumber;
}
}
return pTable->next_number; /* return next available number */
}
/*
*
* add a word to the hash table
* if the bucket overflows, double the size of the table and split
* all buckets
* returns word number if successful, or NOMEMLEFT if memory limit reached
*
*/
int
add_word(string *pItem, hash_table *pTable)
{
int i, key, tail, length, nWord, nWordsOld, nWordsNew, word_no;
bucket *pBucket, *pNewBucket;
/* note new memory required by statistics to store symbol */
if (get_memory(MEM_PER_SYMBOL) == NOMEMLEFT)
return NOMEMLEFT;
key = hash(pItem->length, pItem->text);
/* strip off the unused bits of the key */
key = key & ((1 << pTable->nBits) -1);
pBucket = pTable->pIndex[key];
/* add the item to the bucket */
nWord = pBucket->nWords;
if (nWord < MAXBUCKET)
{
pBucket->words[nWord].wordNumber = (word_no = pTable->next_number);
pTable->next_number++;
if ((pBucket->words[nWord].pWord = add_string(pItem, pTable)) == NULL)
return NOMEMLEFT;
pTable->pNumToWords[word_no-EXTRA_SYMBOLS] =
pBucket->words[nWord].pWord;
pBucket->nWords++;
}
else {
/* split bucket on pBucket->nBits+1 bit */
tail = key & ((1 << pBucket->nBits) -1); /* save for later */
pBucket->nBits++;
if ((pNewBucket = get_bucket(pTable))==NULL)
return NOMEMLEFT;
pNewBucket->nBits = pBucket->nBits;
nWordsOld = 0;
nWordsNew = 0;
for (nWord = 0; nWord < pBucket->nWords; nWord++)
{
/*
* move each word depending on the leftmost
* significant bit
*/
key = hash(*(pBucket->words[nWord].pWord),
(pBucket->words[nWord].pWord)+1);
key = key & (1 << (pBucket->nBits - 1));
if (key>0)
{ /* move word to new bucket */
pNewBucket->words[nWordsNew] = pBucket->words[nWord];
nWordsNew++;
}
else
{ /* put word in old bucket */
pBucket->words[nWordsOld] = pBucket->words[nWord];
nWordsOld++;
}
}
pNewBucket->nWords = nWordsNew;
pBucket->nWords = nWordsOld;
/* check if we need to double the index, or rearrange pointers */
if (pBucket->nBits <= pTable->nBits)
{
/* add leading one to key to make new bucket key */
tail = tail | (1 << (pBucket->nBits-1));
/* point index entries ending with tail to new bucket */
for (i=0; i < (1 << (pTable->nBits - pBucket->nBits)); i++)
pTable->pIndex[(i << pBucket->nBits) | tail] = pNewBucket;
}
else
{
/* must double size of table */
length = 1 << pTable->nBits;
pTable->nBits++;
if ((pTable->pIndex = (bucket **)
do_realloc(pTable->pIndex, length * 2 * sizeof(bucket *)))
== NULL)
return NOMEMLEFT;
/* copy old half of index into new half */
memcpy(pTable->pIndex + length, pTable->pIndex,
length*sizeof(bucket *));
/* pointer in second half points to new bucket */
pTable->pIndex[(1 << (pTable->nBits-1)) | tail] = pNewBucket;
}
return (add_word(pItem, pTable));
}
if (pTable->next_number-EXTRA_SYMBOLS == pTable->numToWordsLimit)
{
pTable->numToWordsLimit *= GROWTH_RATE;
/* resize NumToWords array to new size */
pTable->pNumToWords = (unsigned char **)
do_realloc(pTable->pNumToWords, pTable->numToWordsLimit *
sizeof(char *));
if (pTable->pNumToWords == NULL)
return NOMEMLEFT;
}
return word_no;
}
/*
*
* look up a word given its ordinal word number,
* returning a pointer to the text and updating the length
*
*/
void
get_word(hash_table *pTable, int symbol, unsigned char **pText, int *pLength)
{
*pText = pTable->pNumToWords[symbol-EXTRA_SYMBOLS];
*pLength = (int) *((unsigned char *)*pText);
*pText += 1; /* move pointer past length bytes */
}
/*
*
* function to add a string to a hash table's string block
* if the string_block is full, create a new one and link it to the old one
* returns a pointer to the string in the string block
* Returns NULL if memory limit exceeded
*
*/
static unsigned char
*add_string(string *pItem, hash_table *pTable)
{
unsigned char *pWord;
string_block *pBlock, *pNew;
pBlock = pTable->pStrings;
/* check that there is enough room in the string block */
if (STRINGBLOCK - pBlock->length > pItem->length+1)
{
/* copy the length then the text into the string block */
pWord = pBlock->strings+pBlock->length;
*pWord = pItem->length;
memcpy(pWord+1, pItem->text, pItem->length);
pBlock->length += pItem->length+1;
return (pWord);
}
else {
if ((pNew = (string_block *) do_malloc(sizeof(string_block))) == NULL)
{
/* Reached memory limit adding new word */
return NULL;
}
pNew->prev = pBlock;
pNew->length = 0;
pTable->pStrings = pNew;
return (add_string(pItem, pTable));
}
}
/*
*
* free all memory associated with a hash table
*
*/
void
purge_table(hash_table *pTable)
{
string_block *pThis, *pPrev;
bucket_block *pBlock, *prev;
free(pTable->pNumToWords);
free(pTable->pIndex);
/* free the linked list of bucket blocks */
pBlock = pTable->pFreeBuckets;
while (pBlock != NULL)
{
prev = pBlock->prev;
free(pBlock);
pBlock=prev;
}
/* free the linked list of string blocks */
pThis = pTable->pStrings;
while (pThis != NULL)
{
pPrev = pThis->prev;
free(pThis);
pThis = pPrev;
}
free(pTable);
}
|
timboudreau/ANTLR4-Plugins-for-NetBeans
|
antlr-editing-plugins/antlr-live-language-editors/src/main/java/org/nemesis/antlr/live/language/coloring/AdhocColorings.java
|
/*
* Copyright 2016-2019 <NAME>, <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nemesis.antlr.live.language.coloring;
import com.mastfrog.util.strings.Strings;
import java.awt.Color;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import static java.nio.charset.StandardCharsets.UTF_8;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Scanner;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BooleanSupplier;
import java.util.logging.Level;
import javax.swing.event.ChangeListener;
import org.openide.util.ChangeSupport;
/**
*
* @author <NAME>
*/
public class AdhocColorings implements DynamicColorings {
private static final long serialVersionUID = 1;
private final Map<String, AdhocColoring> colorings;
private final Map<String, AdhocColoring> defunct = new HashMap<>();
private boolean changesSuspended;
private boolean pendingFire;
private transient PropertyChangeSupport supp;
private AtomicInteger rev = new AtomicInteger();
private transient ChangeSupport csupp;
public AdhocColorings() {
colorings = new HashMap<>();
}
private AdhocColorings(Map<String, AdhocColoring> all, Map<String, AdhocColoring> defunct) {
this.colorings = new HashMap<>(all);
this.defunct.putAll(defunct);
}
@Override
public int rev() {
return rev.get();
}
@Override
public void deactivateAll() {
// XXX this will fire a lot of changes
for (String key : colorings.keySet()) {
setFlag(key, AttrTypes.ACTIVE, false);
}
}
@Override
public void clear() {
colorings.clear();
rev.getAndIncrement();
fire();
}
@Override
public Iterator<String> iterator() {
return colorings.keySet().iterator();
}
public AdhocColoring get(String key) {
return colorings.get(key);
}
@Override
public boolean isEmpty() {
return colorings.isEmpty();
}
@Override
public boolean contains(String key) {
return colorings.containsKey(key);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (Map.Entry<String, AdhocColoring> e : colorings.entrySet()) {
sb.append(e.getKey()).append('=').append(e.getValue()).append('\n');
}
return sb.toString();
}
int size() {
return colorings.size();
}
public void disableAll() {
Map<String, AdhocColoring> nue = new HashMap<>();
Map<String, AdhocColoring> old = new HashMap<>(colorings);
Set<String> changed = new HashSet<>();
for (Map.Entry<String, AdhocColoring> e : old.entrySet()) {
if (e.getValue().isActive()) {
nue.put(e.getKey(), new AdhocColoring(e.getValue(), false));
changed.add(e.getKey());
} else {
nue.put(e.getKey(), e.getValue());
}
}
for (String c : changed) {
firePropertyChange(c, old.get(c), nue.get(c));
}
fire();
}
public AdhocColoring remove(String key) {
AdhocColoring result = this.colorings.remove(key);
if (result != null) {
defunct.put(key, result);
fire();
}
return result;
}
public AdhocColoring recover(String key) {
AdhocColoring result = this.defunct.remove(key);
if (result != null) {
this.colorings.put(key, result);
fire();
}
return result;
}
public AdhocColoring add(String key, Color color, AttrTypes... types) {
int flags = 0;
for (AttrTypes t : types) {
flags |= t.maskValue();
}
AdhocColoring c = new AdhocColoring(flags, color);
addOne(key, c);
return c;
}
public AdhocColoring add(String key, Color color, Set<AttrTypes> types) {
int flags = 0;
for (AttrTypes t : types) {
flags |= t.maskValue();
}
AdhocColoring c = new AdhocColoring(flags, color);
addOne(key, c);
return c;
}
@Override
public void addChangeListener(ChangeListener l) {
csupp().addChangeListener(l);
}
@Override
public void removeChangeListener(ChangeListener l) {
csupp().removeChangeListener(l);
}
private void addOne(String key, AdhocColoring coloring) {
if (!colorings.containsKey(key)) {
colorings.put(key, coloring);
firePropertyChange(key, coloring, coloring);
} else {
AdhocColoring old = colorings.get(key);
if (!old.equals(coloring)) {
colorings.put(key, coloring);
firePropertyChange(key, old, coloring);
}
}
}
@Override
public Set<String> keys() {
return new TreeSet<>(colorings.keySet());
}
public boolean withChangesSuspended(BooleanSupplier run) {
boolean old = changesSuspended;
changesSuspended = true;
if (!old) {
pendingFire = false;
}
try {
return run.getAsBoolean();
} finally {
changesSuspended = old;
if (!changesSuspended) {
if (pendingFire) {
pendingFire = false;
fire();
}
}
}
}
private void fire() {
if (csupp != null && !changesSuspended) {
csupp.fireChange();
}
}
private ChangeSupport csupp() {
if (csupp == null) {
csupp = new ChangeSupport(this);
}
return csupp;
}
private PropertyChangeSupport supp() {
if (supp == null) {
supp = new PropertyChangeSupport(this);
}
return supp;
}
@Override
public void addPropertyChangeListener(PropertyChangeListener listener) {
supp().addPropertyChangeListener(listener);
}
@Override
public void removePropertyChangeListener(PropertyChangeListener listener) {
supp().removePropertyChangeListener(listener);
}
@Override
public void addPropertyChangeListener(String propertyName, PropertyChangeListener listener) {
supp().addPropertyChangeListener(propertyName, listener);
}
@Override
public void removePropertyChangeListener(String propertyName, PropertyChangeListener listener) {
supp().removePropertyChangeListener(propertyName, listener);
}
public void store(OutputStream out) throws IOException {
byte[] eq = "=".getBytes(UTF_8);
List<String> keys = new ArrayList<>(colorings.keySet());
Collections.sort(keys);
for (String key : keys) {
AdhocColoring val = colorings.get(key);
if (val != null) {
out.write(key.getBytes(UTF_8));
out.write(eq);
out.write(val.toLine().getBytes(UTF_8));
}
}
keys.clear();
keys.addAll(defunct.keySet());
Collections.sort(keys);
out.write("\n# Deleted but preserved items\n".getBytes(UTF_8));
if (!defunct.isEmpty()) {
for (String key : defunct.keySet()) {
AdhocColoring val = defunct.get(key);
if (val != null) {
out.write("!".getBytes(UTF_8));
out.write(key.getBytes(UTF_8));
out.write(eq);
out.write(val.toLine().getBytes(UTF_8));
}
}
}
out.flush();
}
public static AdhocColorings load(InputStream in) {
Scanner scanner = new Scanner(in);
try {
Map<String, AdhocColoring> map = new HashMap<>();
Map<String, AdhocColoring> defunct = new HashMap<>();
while (scanner.hasNextLine()) {
String line = scanner.nextLine().trim();
if (line.isEmpty() || line.charAt(0) == '#') {
continue;
}
String origLine = line;
Map<String, AdhocColoring> targetMap;
if (line.charAt(0) == '!') {
line = line.substring(1);
targetMap = defunct;
} else {
targetMap = map;
}
String[] parts = Strings.splitOnce('=', line);
if (parts.length == 2) {
String key = parts[0];
try {
AdhocColoring val = AdhocColoring.parse(parts[1]);
if (val != null) {
targetMap.put(key, val);
}
} catch (NumberFormatException nfe) {
AdhocColoringsRegistry.LOG.log(Level.WARNING,
"Corruption in saved colorings '{0}' trimmed to '{1}' key {2} val {3}",
new Object[]{origLine, line, parts[0], parts[1]});
}
}
}
return new AdhocColorings(map, defunct);
} finally {
scanner.close();
}
}
@Override
public boolean setColor(String key, Color value) {
AdhocColoring coloring = this.colorings.get(key);
if (coloring != null) {
AdhocColoring old = coloring.copyAttributes();
boolean result = coloring.setColor(value);
if (result) {
firePropertyChange(key, old, coloring);
}
return result;
}
return false;
}
@Override
public boolean setForeground(String key, boolean val) {
AdhocColoring coloring = this.colorings.get(key);
if (coloring != null) {
AdhocColoring old = coloring.copyAttributes();
boolean result;
if (val) {
result = coloring.addFlag(AttrTypes.FOREGROUND)
| coloring.removeFlag(AttrTypes.BACKGROUND); // bitwise or intentional
} else {
result = coloring.removeFlag(AttrTypes.FOREGROUND)
| coloring.addFlag(AttrTypes.BACKGROUND); // bitwise or intentional
}
if (result) {
firePropertyChange(key, old, coloring);
}
return result;
}
return false;
}
@Override
public boolean setFlag(String key, AttrTypes flag, boolean val) {
AdhocColoring coloring = this.colorings.get(key);
if (coloring != null) {
AdhocColoring old = coloring.copyAttributes();
boolean result;
if (val) {
result = coloring.addFlag(flag);
} else {
result = coloring.removeFlag(flag);
}
if (result) {
firePropertyChange(key, old, coloring);
}
return result;
}
return false;
}
public AdhocColoring addIfAbsent(String key, Color color, AttrTypes... attrs) {
return addIfAbsent(key, color, AttrTypes.set(attrs));
}
public AdhocColoring addIfAbsent(String key, Color color, Set<AttrTypes> of) {
if (!colorings.containsKey(key)) {
AdhocColoring recovered = recover(key);
if (recovered != null) {
return recovered;
}
int flags = 0;
for (AttrTypes t : of) {
flags |= t.maskValue();
}
AdhocColoring nue = new AdhocColoring(flags, color);
colorings.put(key, nue);
firePropertyChange(key, null, nue);
fire();
return nue;
}
return null;
}
private <T> void firePropertyChange(String key, T old, T nue) {
rev.incrementAndGet();
if (supp != null) {
supp.firePropertyChange(key, old, nue);
}
fire();
}
@Override
public int hashCode() {
int hash = 5;
hash = 71 * hash + Objects.hashCode(this.colorings);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final AdhocColorings other = (AdhocColorings) obj;
return Objects.equals(this.colorings, other.colorings);
}
}
|
theothertomelliott/tic-tac-toverengineered
|
common/http/param/parse_test.go
|
package param_test
import (
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
"github.com/theothertomelliott/tic-tac-toverengineered/common/http/param"
)
func TestParseString(t *testing.T) {
var got string
req := httptest.NewRequest("GET", "/?str=value", nil)
err := param.Parse(req, "str", &got, param.ParseOptions{})
if err != nil {
t.Fatal(err)
}
assert.Equal(t, "value", got)
}
func TestParseQuotedString(t *testing.T) {
var got string
req := httptest.NewRequest("GET", "/?str=\"value\"", nil)
err := param.Parse(req, "str", &got, param.ParseOptions{})
if err != nil {
t.Fatal(err)
}
assert.Equal(t, "value", got)
}
func TestParseInt(t *testing.T) {
var got int
req := httptest.NewRequest("GET", "/?num=123", nil)
err := param.Parse(req, "num", &got, param.ParseOptions{})
if err != nil {
t.Fatal(err)
}
assert.Equal(t, int(123), got)
}
func TestParseInt32(t *testing.T) {
var got int32
req := httptest.NewRequest("GET", "/?num=123", nil)
err := param.Parse(req, "num", &got, param.ParseOptions{Required: true})
if err != nil {
t.Fatal(err)
}
assert.Equal(t, int32(123), got)
}
func TestParseStruct(t *testing.T) {
type T struct {
A string
B string
}
var got T
req := httptest.NewRequest("GET", "/?t=%7B%22A%22%3A%20%22A%22%2C%20%22B%22%3A%20%22B%22%7D", nil)
err := param.Parse(req, "t", &got, param.ParseOptions{})
if err != nil {
t.Fatal(err)
}
assert.Equal(t, T{A: "A", B: "B"}, got)
}
func TestDefaultValue(t *testing.T) {
var got float64
req := httptest.NewRequest("GET", "/?other=something", nil)
err := param.Parse(req, "f", &got, param.ParseOptions{Default: float64(3.14)})
if err != nil {
t.Fatal(err)
}
assert.Equal(t, float64(3.14), got)
}
func TestRequiredValueMissing(t *testing.T) {
var got float64
req := httptest.NewRequest("GET", "/?other=something", nil)
err := param.Parse(req, "f", &got, param.ParseOptions{Required: true})
if err == nil {
t.Error("Expected an error")
}
}
|
wuhulala/spring-test-code
|
spring-normal/src/test/java/Client.java
|
import com.wuhulala.spring.Application;
import com.wuhulala.spring.aop.AOPService;
import com.wuhulala.spring.aop.AService;
import com.wuhulala.spring.properties.Properties;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
/**
* @author Wuhulala
* @version 1.0
* @updateTime 2016/10/28
*/
//@RunWith(value = SpringJUnit4ClassRunner.class)
//@ContextConfiguration(value = {"file:src/main/resources/spring.xml"})
@ContextConfiguration(classes = {Application.class})
public class Client {
@Autowired
private AService aService;
@Autowired
private AOPService aopService;
@Autowired
Properties p;
/*
public static void main(String[] args) {
int i = 10 ;
int j = 1;
do{
if(i-- > ++j) {
System.out.println("continue");
continue;
}
}while(i>5);
System.out.println(i+"----"+j);
}
*/
@Test
public void test3(){
//p.print();
}
@Test
public void test4(){
aopService.a("asd");
}
public static void main(String[] args) {
try {
System.out.println("00000");
}catch (Exception e){
}finally {
try {
System.out.println("1111");
throw new RuntimeException();
}catch (Exception e){
System.out.println("error111");
e.printStackTrace();
}
try {
System.out.println("22222");
}catch (Exception e){
System.out.println("error2222");
}
}
}
}
|
asa3311/Sentinel
|
sentinel-agent/sentinel-agent-client/src/main/java/com/taobao/diamond/domain/ConfigInfoBaseEx.java
|
package com.taobao.diamond.domain;
/**
* 不能增加字段,为了兼容老前台接口(老接口增加一个字段会出现不兼容问题)设置的model。
*
* @author water.lyl
*
*/
public class ConfigInfoBaseEx extends ConfigInfoBase {
private static final long serialVersionUID = -1L;
//不能增加字段
// 批量查询时, 单条数据的状态码, 具体的状态码在Constants.java中
private int status;
// 批量查询时, 单条数据的信息
private String message;
public ConfigInfoBaseEx() {
super();
}
public ConfigInfoBaseEx(String dataId, String group, String content) {
super(dataId, group, content);
}
public ConfigInfoBaseEx(String dataId, String group, String content,
int status, String message) {
super(dataId, group, content);
this.status = status;
this.message = message;
}
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
@Override
public String toString() {
return "ConfigInfoBaseEx [status=" + status + ", message=" + message
+ ", dataId=" + getDataId() + ", group()=" + getGroup()
+ ", content()=" + getContent() + "]";
}
}
|
importlib/klib
|
master/core/third/loki/include/loki/ForEachType.h
|
////////////////////////////////////////////////////////////////////////////////
// The Loki Library
// Copyright (C) 2009 <NAME>
// Copyright (c) 2009 <NAME>
// Code covered by the MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
////////////////////////////////////////////////////////////////////////////////
#ifndef LOKI_FOR_EACH_TYPE
#define LOKI_FOR_EACH_TYPE
#include <loki/NullType.h>
#include <loki/Typelist.h>
namespace Loki
{
////////////////////////////////////////////////////////////////////////////////
// class template ForEachType
// Calls a templated callable for every element of a Typelist
// Supplies an int template parameter for the position in the TypeList.
// Invocation (TList is a typelist):
// ForEachType<TList> dummy();
// Calls the supplied method during construction of the object dummy.
////////////////////////////////////////////////////////////////////////////////
namespace Private
{
// type of recursive function
template <class TList, class Callable>
struct ForEachTypeImpl;
// Recursion rule
template <class Head, class Tail, class Callable>
struct ForEachTypeImpl<Typelist<Head, Tail>, Callable>
: public ForEachTypeImpl<Tail, Callable>
{
enum { value = 1 + ForEachTypeImpl<Tail, Callable>::value };
ForEachTypeImpl( Callable& callable ) : ForEachTypeImpl<Tail, Callable>(callable)
{
#ifdef _MSC_VER
callable.operator()<value, Head>();
#else
callable.template operator()<value, Head>();
#endif
}
};
// Recursion end
template <class Head, class Callable>
struct ForEachTypeImpl<Typelist<Head, NullType>, Callable>
{
public:
enum { value = 0 };
ForEachTypeImpl( Callable& callable )
{
#ifdef _MSC_VER
callable.operator()<value, Head>();
#else
callable.template operator()<value, Head>();
#endif
}
};
}
struct OrderPolicyForward;
struct OrderPolicyBackward;
template <class TList, class Callable, class OrderPolicy = OrderPolicyForward>
struct ForEachType;
template <class TList, class Callable >
struct ForEachType<TList, Callable, OrderPolicyForward>
: public Private::ForEachTypeImpl<typename TL::Reverse<TList>::Result, Callable >
{
ForEachType( Callable& callable )
: Private::ForEachTypeImpl<typename TL::Reverse<TList>::Result, Callable >( callable )
{
}
};
template <class TList, class Callable >
struct ForEachType<TList, Callable, OrderPolicyBackward>
: public Private::ForEachTypeImpl< TList, Callable >
{
ForEachType( Callable& callable )
: Private::ForEachTypeImpl< TList, Callable >( callable )
{
}
};
}
#endif
|
metux/chromium-deb
|
chrome/browser/chromeos/printing/printer_event_tracker_unittest.cc
|
<gh_stars>0
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/printing/printer_event_tracker.h"
#include "base/macros.h"
#include "base/time/time.h"
#include "chromeos/printing/printer_configuration.h"
#include "components/metrics/proto/printer_event.pb.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace chromeos {
namespace {
constexpr int kVendorId = 0x3241;
constexpr int kProductId = 0x1337;
constexpr char kUsbManufacturer[] = "Usb MakesPrinters";
constexpr char kUsbModel[] = "Printer ModelName";
constexpr char kMakeAndModel[] = "Chromium RazLazer X4321er";
constexpr char kEffectiveMakeAndModel[] = "Generic PostScript";
class PrinterEventTrackerTest : public testing::Test {
public:
PrinterEventTrackerTest() = default;
~PrinterEventTrackerTest() override = default;
protected:
PrinterEventTracker tracker_;
// Returns a copy of the first element recorded by the tracker. Calls Flush
// on the |tracker_|.
std::vector<metrics::PrinterEventProto> GetEvents() {
std::vector<metrics::PrinterEventProto> events;
tracker_.FlushPrinterEvents(&events);
return events;
}
private:
DISALLOW_COPY_AND_ASSIGN(PrinterEventTrackerTest);
};
TEST_F(PrinterEventTrackerTest, RecordsWhenEnabled) {
tracker_.set_logging(true);
Printer test_printer;
test_printer.set_make_and_model(kMakeAndModel);
test_printer.mutable_ppd_reference()->effective_make_and_model =
kEffectiveMakeAndModel;
tracker_.RecordIppPrinterInstalled(test_printer, PrinterEventTracker::kUser);
auto events = GetEvents();
EXPECT_EQ(1U, events.size());
}
TEST_F(PrinterEventTrackerTest, DefaultLoggingOff) {
Printer test_printer;
test_printer.set_make_and_model(kMakeAndModel);
test_printer.mutable_ppd_reference()->effective_make_and_model =
kEffectiveMakeAndModel;
tracker_.RecordIppPrinterInstalled(test_printer,
PrinterEventTracker::kAutomatic);
auto events = GetEvents();
EXPECT_TRUE(events.empty());
}
TEST_F(PrinterEventTrackerTest, DoesNotRecordWhileDisabled) {
tracker_.set_logging(false);
Printer test_printer;
test_printer.set_make_and_model(kMakeAndModel);
test_printer.mutable_ppd_reference()->effective_make_and_model =
kEffectiveMakeAndModel;
tracker_.RecordIppPrinterInstalled(test_printer,
PrinterEventTracker::kAutomatic);
auto events = GetEvents();
EXPECT_TRUE(events.empty());
}
TEST_F(PrinterEventTrackerTest, InstalledIppPrinter) {
tracker_.set_logging(true);
Printer test_printer;
test_printer.set_make_and_model(kMakeAndModel);
test_printer.mutable_ppd_reference()->effective_make_and_model =
kEffectiveMakeAndModel;
tracker_.RecordIppPrinterInstalled(test_printer, PrinterEventTracker::kUser);
auto events = GetEvents();
ASSERT_FALSE(events.empty());
metrics::PrinterEventProto recorded_event = events.front();
EXPECT_EQ(metrics::PrinterEventProto::SETUP_MANUAL,
recorded_event.event_type());
EXPECT_EQ(kMakeAndModel, recorded_event.ipp_make_and_model());
EXPECT_EQ(kEffectiveMakeAndModel, recorded_event.ppd_identifier());
EXPECT_FALSE(recorded_event.has_usb_printer_manufacturer());
EXPECT_FALSE(recorded_event.has_usb_printer_model());
EXPECT_FALSE(recorded_event.has_usb_vendor_id());
EXPECT_FALSE(recorded_event.has_usb_model_id());
EXPECT_FALSE(recorded_event.user_ppd());
}
TEST_F(PrinterEventTrackerTest, InstalledPrinterAuto) {
tracker_.set_logging(true);
Printer test_printer;
test_printer.set_make_and_model(kMakeAndModel);
test_printer.mutable_ppd_reference()->autoconf = true;
tracker_.RecordIppPrinterInstalled(
test_printer, PrinterEventTracker::SetupMode::kAutomatic);
auto events = GetEvents();
ASSERT_FALSE(events.empty());
metrics::PrinterEventProto recorded_event = events.front();
EXPECT_EQ(metrics::PrinterEventProto::SETUP_AUTOMATIC,
recorded_event.event_type());
EXPECT_EQ(kMakeAndModel, recorded_event.ipp_make_and_model());
// For autoconf printers, ppd identifier is blank but a successful setup is
// recorded.
EXPECT_FALSE(recorded_event.has_ppd_identifier());
EXPECT_FALSE(recorded_event.has_usb_printer_manufacturer());
EXPECT_FALSE(recorded_event.has_usb_printer_model());
EXPECT_FALSE(recorded_event.has_usb_vendor_id());
EXPECT_FALSE(recorded_event.has_usb_model_id());
EXPECT_FALSE(recorded_event.user_ppd());
}
TEST_F(PrinterEventTrackerTest, InstalledPrinterUserPpd) {
tracker_.set_logging(true);
Printer test_printer;
test_printer.mutable_ppd_reference()->user_supplied_ppd_url =
"file:///i_dont_record_this_field/blah/blah/blah/some_ppd.ppd";
tracker_.RecordIppPrinterInstalled(test_printer,
PrinterEventTracker::SetupMode::kUser);
auto events = GetEvents();
ASSERT_FALSE(events.empty());
metrics::PrinterEventProto recorded_event = events.front();
EXPECT_EQ(metrics::PrinterEventProto::SETUP_MANUAL,
recorded_event.event_type());
// For user PPDs we just record that it was a user PPD, the value is not
// recorded.
EXPECT_TRUE(recorded_event.user_ppd());
EXPECT_FALSE(recorded_event.has_ppd_identifier());
// This is empty if it was not detected.
EXPECT_FALSE(recorded_event.has_ipp_make_and_model());
// Network printers do not have usb information.
EXPECT_FALSE(recorded_event.has_usb_printer_manufacturer());
EXPECT_FALSE(recorded_event.has_usb_printer_model());
EXPECT_FALSE(recorded_event.has_usb_vendor_id());
EXPECT_FALSE(recorded_event.has_usb_model_id());
}
TEST_F(PrinterEventTrackerTest, InstalledUsbPrinter) {
tracker_.set_logging(true);
PrinterDetector::DetectedPrinter usb_printer;
usb_printer.ppd_search_data.usb_vendor_id = kVendorId;
usb_printer.ppd_search_data.usb_product_id = kProductId;
usb_printer.printer.set_manufacturer(kUsbManufacturer);
usb_printer.printer.set_model(kUsbModel);
usb_printer.printer.mutable_ppd_reference()->effective_make_and_model =
kEffectiveMakeAndModel;
tracker_.RecordUsbPrinterInstalled(usb_printer,
PrinterEventTracker::SetupMode::kUser);
auto events = GetEvents();
ASSERT_FALSE(events.empty());
metrics::PrinterEventProto record = events.front();
EXPECT_EQ(metrics::PrinterEventProto::SETUP_MANUAL, record.event_type());
EXPECT_EQ(kVendorId, record.usb_vendor_id());
EXPECT_EQ(kProductId, record.usb_model_id());
EXPECT_EQ(kUsbManufacturer, record.usb_printer_manufacturer());
EXPECT_EQ(kUsbModel, record.usb_printer_model());
EXPECT_EQ(kEffectiveMakeAndModel, record.ppd_identifier());
EXPECT_FALSE(record.user_ppd());
// USB doesn't detect this field.
EXPECT_FALSE(record.has_ipp_make_and_model());
}
TEST_F(PrinterEventTrackerTest, AbandonedNetworkPrinter) {
tracker_.set_logging(true);
Printer test_printer;
test_printer.set_make_and_model(kMakeAndModel);
tracker_.RecordSetupAbandoned(test_printer);
auto events = GetEvents();
ASSERT_FALSE(events.empty());
metrics::PrinterEventProto recorded_event = events.front();
EXPECT_EQ(metrics::PrinterEventProto::SETUP_ABANDONED,
recorded_event.event_type());
EXPECT_EQ(kMakeAndModel, recorded_event.ipp_make_and_model());
// Abandoned setups should not record a chosen PPD or user PPD.
EXPECT_FALSE(recorded_event.has_user_ppd());
EXPECT_FALSE(recorded_event.has_ppd_identifier());
EXPECT_FALSE(recorded_event.has_usb_printer_manufacturer());
EXPECT_FALSE(recorded_event.has_usb_printer_model());
EXPECT_FALSE(recorded_event.has_usb_vendor_id());
EXPECT_FALSE(recorded_event.has_usb_model_id());
}
TEST_F(PrinterEventTrackerTest, AbandonedUsbPrinter) {
tracker_.set_logging(true);
PrinterDetector::DetectedPrinter usb_printer;
usb_printer.ppd_search_data.usb_vendor_id = kVendorId;
usb_printer.ppd_search_data.usb_product_id = kProductId;
usb_printer.printer.set_manufacturer(kUsbManufacturer);
usb_printer.printer.set_model(kUsbModel);
tracker_.RecordUsbSetupAbandoned(usb_printer);
auto events = GetEvents();
ASSERT_FALSE(events.empty());
metrics::PrinterEventProto record = events.front();
EXPECT_EQ(metrics::PrinterEventProto::SETUP_ABANDONED, record.event_type());
EXPECT_EQ(kVendorId, record.usb_vendor_id());
EXPECT_EQ(kProductId, record.usb_model_id());
EXPECT_EQ(kUsbManufacturer, record.usb_printer_manufacturer());
EXPECT_EQ(kUsbModel, record.usb_printer_model());
EXPECT_FALSE(record.has_user_ppd());
EXPECT_FALSE(record.has_ppd_identifier());
}
TEST_F(PrinterEventTrackerTest, RemovedPrinter) {
tracker_.set_logging(true);
Printer test_printer;
test_printer.set_make_and_model(kMakeAndModel);
test_printer.mutable_ppd_reference()->effective_make_and_model =
kEffectiveMakeAndModel;
tracker_.RecordPrinterRemoved(test_printer);
auto events = GetEvents();
ASSERT_FALSE(events.empty());
metrics::PrinterEventProto recorded_event = events.front();
EXPECT_EQ(metrics::PrinterEventProto::PRINTER_DELETED,
recorded_event.event_type());
// All printers record make and model information here.
EXPECT_EQ(kMakeAndModel, recorded_event.ipp_make_and_model());
// PPD info.
EXPECT_EQ(kEffectiveMakeAndModel, recorded_event.ppd_identifier());
EXPECT_FALSE(recorded_event.user_ppd());
// USB Info is not retained for removed printers.
EXPECT_FALSE(recorded_event.has_usb_printer_manufacturer());
EXPECT_FALSE(recorded_event.has_usb_printer_model());
EXPECT_FALSE(recorded_event.has_usb_vendor_id());
EXPECT_FALSE(recorded_event.has_usb_model_id());
}
} // namespace
} // namespace chromeos
|
yukarinoki/docplex-examples
|
examples/mp/modeling/production.py
|
<filename>examples/mp/modeling/production.py
# --------------------------------------------------------------------------
# Source file provided under Apache License, Version 2.0, January 2004,
# http://www.apache.org/licenses/
# (c) Copyright IBM Corp. 2015, 2018
# --------------------------------------------------------------------------
"""The model aims at minimizing the production cost for a number of products
while satisfying customer demand. Each product can be produced either inside
the company or outside, at a higher cost.
The inside production is constrained by the company's resources, while outside
production is considered unlimited.
The model first declares the products and the resources.
The data consists of the description of the products (the demand, the inside
and outside costs, and the resource consumption) and the capacity of the
various resources.
The variables for this problem are the inside and outside production for each
product.
"""
from docplex.mp.model import Model
from docplex.util.environment import get_environment
# ----------------------------------------------------------------------------
# Initialize the problem data
# ----------------------------------------------------------------------------
PRODUCTS = [("kluski", 100, 0.6, 0.8),
("capellini", 200, 0.8, 0.9),
("fettucine", 300, 0.3, 0.4)]
# resources are a list of simple tuples (name, capacity)
RESOURCES = [("flour", 20),
("eggs", 40)]
CONSUMPTIONS = {("kluski", "flour"): 0.5,
("kluski", "eggs"): 0.2,
("capellini", "flour"): 0.4,
("capellini", "eggs"): 0.4,
("fettucine", "flour"): 0.3,
("fettucine", "eggs"): 0.6}
# ----------------------------------------------------------------------------
# Build the model
# ----------------------------------------------------------------------------
def build_production_problem(products, resources, consumptions, **kwargs):
""" Takes as input:
- a list of product tuples (name, demand, inside, outside)
- a list of resource tuples (name, capacity)
- a list of consumption tuples (product_name, resource_named, consumed)
"""
mdl = Model(name='production', **kwargs)
# --- decision variables ---
mdl.inside_vars = mdl.continuous_var_dict(products, name=lambda p: 'inside_%s' % p[0])
mdl.outside_vars = mdl.continuous_var_dict(products, name=lambda p: 'outside_%s' % p[0])
# --- constraints ---
# demand satisfaction
mdl.add_constraints((mdl.inside_vars[prod] + mdl.outside_vars[prod] >= prod[1], 'ct_demand_%s' % prod[0]) for prod in products)
# --- resource capacity ---
mdl.add_constraints((mdl.sum(mdl.inside_vars[p] * consumptions[p[0], res[0]] for p in products) <= res[1],
'ct_res_%s' % res[0]) for res in resources)
# --- objective ---
mdl.total_inside_cost = mdl.sum(mdl.inside_vars[p] * p[2] for p in products)
mdl.add_kpi(mdl.total_inside_cost, "inside cost")
mdl.total_outside_cost = mdl.sum(mdl.outside_vars[p] * p[3] for p in products)
mdl.add_kpi(mdl.total_outside_cost, "outside cost")
mdl.minimize(mdl.total_inside_cost + mdl.total_outside_cost)
return mdl
def print_production_solution(mdl, products):
obj = mdl.objective_value
print("* Production model solved with objective: {:g}".format(obj))
print("* Total inside cost=%g" % mdl.total_inside_cost.solution_value)
for p in products:
print("Inside production of {product}: {ins_var}".format
(product=p[0], ins_var=mdl.inside_vars[p].solution_value))
print("* Total outside cost=%g" % mdl.total_outside_cost.solution_value)
for p in products:
print("Outside production of {product}: {out_var}".format
(product=p[0], out_var=mdl.outside_vars[p].solution_value))
def build_default_production_problem(**kwargs):
return build_production_problem(PRODUCTS, RESOURCES, CONSUMPTIONS, **kwargs)
# ----------------------------------------------------------------------------
# Solve the model and display the result
# ----------------------------------------------------------------------------
if __name__ == '__main__':
# Build the model
model = build_production_problem(PRODUCTS, RESOURCES, CONSUMPTIONS)
model.print_information()
# Solve the model.
if model.solve():
print_production_solution(model, PRODUCTS)
# Save the CPLEX solution as "solution.json" program output
with get_environment().get_output_stream("solution.json") as fp:
model.solution.export(fp, "json")
else:
print("Problem has no solution")
|
niekvandael/Framework
|
Frontend/modules/common/commonModels.js
|
<filename>Frontend/modules/common/commonModels.js<gh_stars>0
/**
* CREATED BY NVD ON 1-apr.-2014 13:57:41
*
* Package : modules/common Filename : commonModels.js
*/
function commonModels() {
this.CH000S0W_COMMON = new CH000S0W_COMMON();
this.CH000SEW_SYS_ERROR = new CH000SEW_SYS_ERROR();
this.CH000S0W_COMM = new CH000S0W_COMM();
this.PROGRAM_RETURN_REC = new PROGRAM_RETURN_REC();
this.settings = {};
this.settingsToCommit = [];
this.getSettingsForEntity = function(entity){
var entitySettings = {};
for (var setting in this.settings) {
if(setting.split(".")[0] == entity){
entitySettings[this.settings[setting].key] = this.settings[setting];
}
}
return entitySettings;
};
this.saveSuccessfull = function(){
this.settingsToCommit = [];
};
this.getSettingsToCommit = function(){
return this.settingsToCommit;
};
this.getProgramReturnRec = function() {
return this.PROGRAM_RETURN_REC;
};
this.setCredentials = function(username, password) {
this.PROGRAM_RETURN_REC.username = username;
this.PROGRAM_RETURN_REC.password = encrypt(password);
};
this.setEncryptedCredentials = function(username, password) {
this.PROGRAM_RETURN_REC.username = username;
this.PROGRAM_RETURN_REC.password = password;
};
this.getusername = function() {
return (this.PROGRAM_RETURN_REC.username);
};
this.setSettings = function(settings) {
if (this.settings == null) {
var temp = [];
for (var i = 0; i < settings.length; i++){
temp[settings[i].key] = settings[i];
}
this.settings = temp;
} else {
this.updateSettings(settings);
}
};
this.updateSettings = function(settings) {
for (var i = 0; i < settings.length; i++){
if(this.settings[settings[i].key] == undefined || this.settings[settings[i].key].value != settings[i].value){
this.settingsToCommit.push(settings[i]);
}
this.settings[settings[i].key] = settings[i];
}
};
this.getSettings = function() {
return this.settings;
};
this.getSetting = function(id) {
if (this.settings != undefined && this.settings[id] != null) {
return this.settings[id].value;
}
return undefined;
};
};
if (typeof window.models === 'undefined') {
window.models = new commonModels();
};
|
Sam-2019/Okukus
|
src/OkukusPages/Container/View/View.js
|
import React from "react";
import { NavLink } from "react-router-dom";
import PropTypes from "prop-types";
import { okukus } from "../../endpoints";
import "./view.css";
const View = ({ cover_photo_url, product_name, unit_price, unique_id }) => {
return (
<NavLink to={`/product/${unique_id}`} className="view_wrapper">
<div className="item_wrapper ">
<div className=" item_image_wrapper ">
<img
src={`${okukus}/${cover_photo_url}`}
alt=""
className="item_image"
/>
</div>
<div className="name_price ">
<div className="item_name_wrapper">
<span className="item_name">{product_name}</span>
</div>
<div className=" item_price_wrapper ">
<span className="item_price ">₵{unit_price}</span>
{/* <span className="discount item">-5%</span> */}
</div>
</div>
</div>
</NavLink>
);
};
export default View;
View.propTypes = {
cover_photo_url: PropTypes.string,
product_name: PropTypes.string,
unit_price: PropTypes.string,
unique_id: PropTypes.string,
};
|
yasyf/elasticsearch-ruby
|
elasticsearch-dsl/spec/elasticsearch/dsl/search/aggregations/geo_grid_spec.rb
|
# Licensed to Elasticsearch B.V under one or more agreements.
# Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information
require 'spec_helper'
describe Elasticsearch::DSL::Search::Aggregations::GeohashGrid do
let(:search) do
described_class.new
end
describe '#to_hash' do
it 'can be converted to a hash' do
expect(search.to_hash).to eq(geohash_grid: {})
end
end
context 'when options methods are called' do
let(:search) do
described_class.new(:foo)
end
describe '#field' do
before do
search.field('bar')
end
it 'applies the option' do
expect(search.to_hash[:geohash_grid][:foo][:field]).to eq('bar')
end
end
describe '#precision' do
before do
search.precision('bar')
end
it 'applies the option' do
expect(search.to_hash[:geohash_grid][:foo][:precision]).to eq('bar')
end
end
describe '#size' do
before do
search.size('bar')
end
it 'applies the option' do
expect(search.to_hash[:geohash_grid][:foo][:size]).to eq('bar')
end
end
describe '#shard_size' do
before do
search.shard_size('bar')
end
it 'applies the option' do
expect(search.to_hash[:geohash_grid][:foo][:shard_size]).to eq('bar')
end
end
end
describe '#initialize' do
context 'when a block is provided' do
let(:search) do
described_class.new do
field 'bar'
precision 5
end
end
it 'executes the block' do
expect(search.to_hash).to eq(geohash_grid: { field: 'bar', precision: 5 })
end
end
end
end
|
godontop/python-work
|
loop_return.py
|
def sum(x):
res = 0
for i in range(x):
res += i
# return res
return res
# Don't put the return in for loop, or the for loop will just run one time,
# then quit.
print(sum(10))
|
zephraph/saasify
|
packages/saasify-openapi-utils/lib/openapi-service-params-to-json-schema.js
|
<filename>packages/saasify-openapi-utils/lib/openapi-service-params-to-json-schema.js
'use strict'
const cloneDeep = require('clone-deep')
const createError = require('http-errors')
const refParser = require('json-schema-ref-parser')
const serviceToPathItem = require('./service-to-path-item')
// TODO: use https://github.com/openapi-contrib/openapi-schema-to-json-schema
/**
* Converts an OpenAPI-based Service's input parameters into a corresponding
* JSON Schema.
*
* @param {object} service - Service to convert.
* @param {object} openapi - OpenAPI spec for the parent deployment.
*
* @return {Promise}
*/
module.exports = async (service, openapi) => {
const { name } = service
const pathItem = serviceToPathItem(service, openapi)
let schema
if (!pathItem) {
throw createError(
400,
`Error service [${name}] unable to find matching OpenAPI PathItem for path "${service.path}"`
)
}
// TODO: handle other http methods
if (pathItem.post || pathItem.put) {
// Convert OpenAPI POST PathItem to JSON Schema
const op = pathItem.post || pathItem.put
// TODO: this will not be robust against arbitrary OpenAPI specs
schema = cloneDeep(op.requestBody.content['application/json'].schema)
schema.components = openapi.components
// we're ignoring the possibility of op.parameters because op.requestBody
// should always take precedence
} else if (pathItem.get) {
// Convert OpenAPI GET PathItem to JSON Schema
const op = pathItem.get
schema = {
additionalProperties: false,
type: 'object',
properties: {},
required: [],
components: openapi.components
}
for (const param of op.parameters) {
schema.properties[param.name] = {
name: param.name,
...param.schema
}
if (param.required) {
schema.required.push(param.name)
}
}
} else {
throw createError(
400,
`Error service [${name}] matches invalid OpenAPI path item "${service.path}" which doesn't support POST or GET`
)
}
if (!schema) {
throw createError(
400,
`Error service [${name}] matches invalid OpenAPI path item "${service.path}" - JSON schema conversion failed`
)
}
// ensure schema is clean and fully dereferenced
schema = await refParser.dereference(schema)
delete schema.$ref
delete schema.components
// console.log(JSON.stringify(schema, null, 2))
return schema
}
|
pirxpilot/mapbox-gl-js
|
src/symbol/projection.js
|
'use strict';
const Point = require('@mapbox/point-geometry');
const { mat4, vec4 } = require('@mapbox/gl-matrix');
const symbolSize = require('./symbol_size');
const { addDynamicAttributes } = require('../data/bucket/symbol_bucket');
const properties = require('../style/style_layer/symbol_style_layer_properties');
const symbolLayoutProperties = properties.layout;
const { WritingMode } = require('../symbol/shaping');
module.exports = { updateLineLabels, getLabelPlaneMatrix, getGlCoordMatrix, project, placeFirstAndLastGlyph, xyTransformMat4 };
/*
* # Overview of coordinate spaces
*
* ## Tile coordinate spaces
* Each label has an anchor. Some labels have corresponding line geometries.
* The points for both anchors and lines are stored in tile units. Each tile has it's own
* coordinate space going from (0, 0) at the top left to (EXTENT, EXTENT) at the bottom right.
*
* ## GL coordinate space
* At the end of everything, the vertex shader needs to produce a position in GL coordinate space,
* which is (-1, 1) at the top left and (1, -1) in the bottom right.
*
* ## Map pixel coordinate spaces
* Each tile has a pixel coordinate space. It's just the tile units scaled so that one unit is
* whatever counts as 1 pixel at the current zoom.
* This space is used for pitch-alignment=map, rotation-alignment=map
*
* ## Rotated map pixel coordinate spaces
* Like the above, but rotated so axis of the space are aligned with the viewport instead of the tile.
* This space is used for pitch-alignment=map, rotation-alignment=viewport
*
* ## Viewport pixel coordinate space
* (0, 0) is at the top left of the canvas and (pixelWidth, pixelHeight) is at the bottom right corner
* of the canvas. This space is used for pitch-alignment=viewport
*
*
* # Vertex projection
* It goes roughly like this:
* 1. project the anchor and line from tile units into the correct label coordinate space
* - map pixel space pitch-alignment=map rotation-alignment=map
* - rotated map pixel space pitch-alignment=map rotation-alignment=viewport
* - viewport pixel space pitch-alignment=viewport rotation-alignment=*
* 2. if the label follows a line, find the point along the line that is the correct distance from the anchor.
* 3. add the glyph's corner offset to the point from step 3
* 4. convert from the label coordinate space to gl coordinates
*
* For horizontal labels we want to do step 1 in the shader for performance reasons (no cpu work).
* This is what `u_label_plane_matrix` is used for.
* For labels aligned with lines we have to steps 1 and 2 on the cpu since we need access to the line geometry.
* This is what `updateLineLabels(...)` does.
* Since the conversion is handled on the cpu we just set `u_label_plane_matrix` to an identity matrix.
*
* Steps 3 and 4 are done in the shaders for all labels.
*/
/*
* Returns a matrix for converting from tile units to the correct label coordinate space.
*/
function getLabelPlaneMatrix(posMatrix,
pitchWithMap,
rotateWithMap,
transform,
pixelsToTileUnits) {
const m = mat4.identity(new Float32Array(16));
if (pitchWithMap) {
mat4.identity(m);
mat4.scale(m, m, [1 / pixelsToTileUnits, 1 / pixelsToTileUnits, 1]);
if (!rotateWithMap) {
mat4.rotateZ(m, m, transform.angle);
}
} else {
mat4.scale(m, m, [transform.width / 2, -transform.height / 2, 1]);
mat4.translate(m, m, [1, -1, 0]);
mat4.multiply(m, m, posMatrix);
}
return m;
}
/*
* Returns a matrix for converting from the correct label coordinate space to gl coords.
*/
function getGlCoordMatrix(posMatrix,
pitchWithMap,
rotateWithMap,
transform,
pixelsToTileUnits) {
const m = mat4.identity(new Float32Array(16));
if (pitchWithMap) {
mat4.multiply(m, m, posMatrix);
mat4.scale(m, m, [pixelsToTileUnits, pixelsToTileUnits, 1]);
if (!rotateWithMap) {
mat4.rotateZ(m, m, -transform.angle);
}
} else {
mat4.scale(m, m, [1, -1, 1]);
mat4.translate(m, m, [-1, -1, 0]);
mat4.scale(m, m, [2 / transform.width, 2 / transform.height, 1]);
}
return m;
}
function project(point, matrix) {
const pos = [point.x, point.y, 0, 1];
xyTransformMat4(pos, pos, matrix);
const w = pos[3];
return {
point: new Point(pos[0] / w, pos[1] / w),
signedDistanceFromCamera: w
};
}
function isVisible(anchorPos,
clippingBuffer) {
const x = anchorPos[0] / anchorPos[3];
const y = anchorPos[1] / anchorPos[3];
const inPaddedViewport = (
x >= -clippingBuffer[0] &&
x <= clippingBuffer[0] &&
y >= -clippingBuffer[1] &&
y <= clippingBuffer[1]);
return inPaddedViewport;
}
/*
* Update the `dynamicLayoutVertexBuffer` for the buffer with the correct glyph positions for the current map view.
* This is only run on labels that are aligned with lines. Horizontal labels are handled entirely in the shader.
*/
function updateLineLabels(bucket,
posMatrix,
painter,
isText,
labelPlaneMatrix,
glCoordMatrix,
pitchWithMap,
keepUpright) {
const sizeData = isText ? bucket.textSizeData : bucket.iconSizeData;
const partiallyEvaluatedSize = symbolSize.evaluateSizeForZoom(sizeData, painter.transform.zoom,
symbolLayoutProperties.properties[isText ? 'text-size' : 'icon-size']);
const clippingBuffer = [256 / painter.width * 2 + 1, 256 / painter.height * 2 + 1];
const dynamicLayoutVertexArray = isText ?
bucket.text.dynamicLayoutVertexArray :
bucket.icon.dynamicLayoutVertexArray;
dynamicLayoutVertexArray.clear();
const lineVertexArray = bucket.lineVertexArray;
const placedSymbols = isText ? bucket.text.placedSymbolArray : bucket.icon.placedSymbolArray;
const aspectRatio = painter.transform.width / painter.transform.height;
let useVertical = false;
for (let s = 0; s < placedSymbols.length; s++) {
const symbol = placedSymbols.get(s);
// Don't do calculations for vertical glyphs unless the previous symbol was horizontal
// and we determined that vertical glyphs were necessary.
// Also don't do calculations for symbols that are collided and fully faded out
if (symbol.hidden || symbol.writingMode === WritingMode.vertical && !useVertical) {
hideGlyphs(symbol.numGlyphs, dynamicLayoutVertexArray);
continue;
}
// Awkward... but we're counting on the paired "vertical" symbol coming immediately after its horizontal counterpart
useVertical = false;
const anchorPos = [symbol.anchorX, symbol.anchorY, 0, 1];
vec4.transformMat4(anchorPos, anchorPos, posMatrix);
// Don't bother calculating the correct point for invisible labels.
if (!isVisible(anchorPos, clippingBuffer)) {
hideGlyphs(symbol.numGlyphs, dynamicLayoutVertexArray);
continue;
}
const cameraToAnchorDistance = anchorPos[3];
const perspectiveRatio = 0.5 + 0.5 * (cameraToAnchorDistance / painter.transform.cameraToCenterDistance);
const fontSize = symbolSize.evaluateSizeForFeature(sizeData, partiallyEvaluatedSize, symbol);
const pitchScaledFontSize = pitchWithMap ?
fontSize * perspectiveRatio :
fontSize / perspectiveRatio;
const tileAnchorPoint = new Point(symbol.anchorX, symbol.anchorY);
const anchorPoint = project(tileAnchorPoint, labelPlaneMatrix).point;
const projectionCache = {};
const placeUnflipped = placeGlyphsAlongLine(symbol, pitchScaledFontSize, false /*unflipped*/, keepUpright, posMatrix, labelPlaneMatrix, glCoordMatrix,
bucket.glyphOffsetArray, lineVertexArray, dynamicLayoutVertexArray, anchorPoint, tileAnchorPoint, projectionCache, aspectRatio);
useVertical = placeUnflipped.useVertical;
if (placeUnflipped.notEnoughRoom || useVertical ||
(placeUnflipped.needsFlipping &&
placeGlyphsAlongLine(symbol, pitchScaledFontSize, true /*flipped*/, keepUpright, posMatrix, labelPlaneMatrix, glCoordMatrix,
bucket.glyphOffsetArray, lineVertexArray, dynamicLayoutVertexArray, anchorPoint, tileAnchorPoint, projectionCache, aspectRatio).notEnoughRoom)) {
hideGlyphs(symbol.numGlyphs, dynamicLayoutVertexArray);
}
}
if (isText) {
bucket.text.dynamicLayoutVertexBuffer.updateData(dynamicLayoutVertexArray);
} else {
bucket.icon.dynamicLayoutVertexBuffer.updateData(dynamicLayoutVertexArray);
}
}
function placeFirstAndLastGlyph(fontScale, glyphOffsetArray, lineOffsetX, lineOffsetY, flip, anchorPoint, tileAnchorPoint, symbol, lineVertexArray, labelPlaneMatrix, projectionCache, returnTileDistance) {
const glyphEndIndex = symbol.glyphStartIndex + symbol.numGlyphs;
const lineStartIndex = symbol.lineStartIndex;
const lineEndIndex = symbol.lineStartIndex + symbol.lineLength;
const firstGlyphOffset = glyphOffsetArray.getoffsetX(symbol.glyphStartIndex);
const lastGlyphOffset = glyphOffsetArray.getoffsetX(glyphEndIndex - 1);
const firstPlacedGlyph = placeGlyphAlongLine(fontScale * firstGlyphOffset, lineOffsetX, lineOffsetY, flip, anchorPoint, tileAnchorPoint, symbol.segment,
lineStartIndex, lineEndIndex, lineVertexArray, labelPlaneMatrix, projectionCache, returnTileDistance);
if (!firstPlacedGlyph)
return null;
const lastPlacedGlyph = placeGlyphAlongLine(fontScale * lastGlyphOffset, lineOffsetX, lineOffsetY, flip, anchorPoint, tileAnchorPoint, symbol.segment,
lineStartIndex, lineEndIndex, lineVertexArray, labelPlaneMatrix, projectionCache, returnTileDistance);
if (!lastPlacedGlyph)
return null;
return { first: firstPlacedGlyph, last: lastPlacedGlyph };
}
function requiresOrientationChange(writingMode, firstPoint, lastPoint, aspectRatio) {
if (writingMode === WritingMode.horizontal) {
// On top of choosing whether to flip, choose whether to render this version of the glyphs or the alternate
// vertical glyphs. We can't just filter out vertical glyphs in the horizontal range because the horizontal
// and vertical versions can have slightly different projections which could lead to angles where both or
// neither showed.
const rise = Math.abs(lastPoint.y - firstPoint.y);
const run = Math.abs(lastPoint.x - firstPoint.x) * aspectRatio;
if (rise > run) {
return { useVertical: true };
}
}
if (writingMode === WritingMode.vertical ? firstPoint.y < lastPoint.y : firstPoint.x > lastPoint.x) {
// Includes "horizontalOnly" case for labels without vertical glyphs
return { needsFlipping: true };
}
return null;
}
function placeGlyphsAlongLine(symbol, fontSize, flip, keepUpright, posMatrix, labelPlaneMatrix, glCoordMatrix, glyphOffsetArray, lineVertexArray, dynamicLayoutVertexArray, anchorPoint, tileAnchorPoint, projectionCache, aspectRatio) {
const fontScale = fontSize / 24;
const lineOffsetX = symbol.lineOffsetX * fontSize;
const lineOffsetY = symbol.lineOffsetY * fontSize;
let placedGlyphs;
if (symbol.numGlyphs > 1) {
const glyphEndIndex = symbol.glyphStartIndex + symbol.numGlyphs;
const lineStartIndex = symbol.lineStartIndex;
const lineEndIndex = symbol.lineStartIndex + symbol.lineLength;
// Place the first and the last glyph in the label first, so we can figure out
// the overall orientation of the label and determine whether it needs to be flipped in keepUpright mode
const firstAndLastGlyph = placeFirstAndLastGlyph(fontScale, glyphOffsetArray, lineOffsetX, lineOffsetY, flip, anchorPoint, tileAnchorPoint, symbol, lineVertexArray, labelPlaneMatrix, projectionCache, false);
if (!firstAndLastGlyph) {
return { notEnoughRoom: true };
}
const firstPoint = project(firstAndLastGlyph.first.point, glCoordMatrix).point;
const lastPoint = project(firstAndLastGlyph.last.point, glCoordMatrix).point;
if (keepUpright && !flip) {
const orientationChange = requiresOrientationChange(symbol.writingMode, firstPoint, lastPoint, aspectRatio);
if (orientationChange) {
return orientationChange;
}
}
placedGlyphs = [firstAndLastGlyph.first];
for (let glyphIndex = symbol.glyphStartIndex + 1; glyphIndex < glyphEndIndex - 1; glyphIndex++) {
// Since first and last glyph fit on the line, we're sure that the rest of the glyphs can be placed
// $FlowFixMe
placedGlyphs.push(placeGlyphAlongLine(fontScale * glyphOffsetArray.getoffsetX(glyphIndex), lineOffsetX, lineOffsetY, flip, anchorPoint, tileAnchorPoint, symbol.segment,
lineStartIndex, lineEndIndex, lineVertexArray, labelPlaneMatrix, projectionCache, false));
}
placedGlyphs.push(firstAndLastGlyph.last);
} else {
// Only a single glyph to place
// So, determine whether to flip based on projected angle of the line segment it's on
if (keepUpright && !flip) {
const a = project(tileAnchorPoint, posMatrix).point;
const tileVertexIndex = (symbol.lineStartIndex + symbol.segment + 1);
// $FlowFixMe
const tileSegmentEnd = new Point(lineVertexArray.getx(tileVertexIndex), lineVertexArray.gety(tileVertexIndex));
const projectedVertex = project(tileSegmentEnd, posMatrix);
// We know the anchor will be in the viewport, but the end of the line segment may be
// behind the plane of the camera, in which case we can use a point at any arbitrary (closer)
// point on the segment.
const b = (projectedVertex.signedDistanceFromCamera > 0) ?
projectedVertex.point :
projectTruncatedLineSegment(tileAnchorPoint, tileSegmentEnd, a, 1, posMatrix);
const orientationChange = requiresOrientationChange(symbol.writingMode, a, b, aspectRatio);
if (orientationChange) {
return orientationChange;
}
}
// $FlowFixMe
const singleGlyph = placeGlyphAlongLine(fontScale * glyphOffsetArray.getoffsetX(symbol.glyphStartIndex), lineOffsetX, lineOffsetY, flip, anchorPoint, tileAnchorPoint, symbol.segment,
symbol.lineStartIndex, symbol.lineStartIndex + symbol.lineLength, lineVertexArray, labelPlaneMatrix, projectionCache, false);
if (!singleGlyph)
return { notEnoughRoom: true };
placedGlyphs = [singleGlyph];
}
for (const glyph of placedGlyphs) {
addDynamicAttributes(dynamicLayoutVertexArray, glyph.point, glyph.angle);
}
return {};
}
function projectTruncatedLineSegment(previousTilePoint, currentTilePoint, previousProjectedPoint, minimumLength, projectionMatrix) {
// We are assuming "previousTilePoint" won't project to a point within one unit of the camera plane
// If it did, that would mean our label extended all the way out from within the viewport to a (very distant)
// point near the plane of the camera. We wouldn't be able to render the label anyway once it crossed the
// plane of the camera.
const projectedUnitVertex = project(previousTilePoint.add(previousTilePoint.sub(currentTilePoint)._unit()), projectionMatrix).point;
const projectedUnitSegment = previousProjectedPoint.sub(projectedUnitVertex);
return previousProjectedPoint.add(projectedUnitSegment._mult(minimumLength / projectedUnitSegment.mag()));
}
function placeGlyphAlongLine(offsetX,
lineOffsetX,
lineOffsetY,
flip,
anchorPoint,
tileAnchorPoint,
anchorSegment,
lineStartIndex,
lineEndIndex,
lineVertexArray,
labelPlaneMatrix,
projectionCache,
returnTileDistance) {
const combinedOffsetX = flip ?
offsetX - lineOffsetX :
offsetX + lineOffsetX;
let dir = combinedOffsetX > 0 ? 1 : -1;
let angle = 0;
if (flip) {
// The label needs to be flipped to keep text upright.
// Iterate in the reverse direction.
dir *= -1;
angle = Math.PI;
}
if (dir < 0) angle += Math.PI;
let currentIndex = dir > 0 ?
lineStartIndex + anchorSegment :
lineStartIndex + anchorSegment + 1;
const initialIndex = currentIndex;
let current = anchorPoint;
let prev = anchorPoint;
let distanceToPrev = 0;
let currentSegmentDistance = 0;
const absOffsetX = Math.abs(combinedOffsetX);
while (distanceToPrev + currentSegmentDistance <= absOffsetX) {
currentIndex += dir;
// offset does not fit on the projected line
if (currentIndex < lineStartIndex || currentIndex >= lineEndIndex)
return null;
prev = current;
current = projectionCache[currentIndex];
if (current === undefined) {
const currentVertex = new Point(lineVertexArray.getx(currentIndex), lineVertexArray.gety(currentIndex));
const projection = project(currentVertex, labelPlaneMatrix);
if (projection.signedDistanceFromCamera > 0) {
current = projectionCache[currentIndex] = projection.point;
} else {
// The vertex is behind the plane of the camera, so we can't project it
// Instead, we'll create a vertex along the line that's far enough to include the glyph
const previousLineVertexIndex = currentIndex - dir;
const previousTilePoint = distanceToPrev === 0 ?
tileAnchorPoint :
new Point(lineVertexArray.getx(previousLineVertexIndex), lineVertexArray.gety(previousLineVertexIndex));
// Don't cache because the new vertex might not be far enough out for future glyphs on the same segment
current = projectTruncatedLineSegment(previousTilePoint, currentVertex, prev, absOffsetX - distanceToPrev + 1, labelPlaneMatrix);
}
}
distanceToPrev += currentSegmentDistance;
currentSegmentDistance = prev.dist(current);
}
// The point is on the current segment. Interpolate to find it.
const segmentInterpolationT = (absOffsetX - distanceToPrev) / currentSegmentDistance;
const prevToCurrent = current.sub(prev);
const p = prevToCurrent.mult(segmentInterpolationT)._add(prev);
// offset the point from the line to text-offset and icon-offset
p._add(prevToCurrent._unit()._perp()._mult(lineOffsetY * dir));
const segmentAngle = angle + Math.atan2(current.y - prev.y, current.x - prev.x);
return {
point: p,
angle: segmentAngle,
tileDistance: returnTileDistance ?
{
prevTileDistance: (currentIndex - dir) === initialIndex ? 0 : lineVertexArray.gettileUnitDistanceFromAnchor(currentIndex - dir),
lastSegmentViewportDistance: absOffsetX - distanceToPrev
} : null
};
}
const hiddenGlyphAttributes = new Float32Array([-Infinity, -Infinity, 0, -Infinity, -Infinity, 0, -Infinity, -Infinity, 0, -Infinity, -Infinity, 0]);
// Hide them by moving them offscreen. We still need to add them to the buffer
// because the dynamic buffer is paired with a static buffer that doesn't get updated.
function hideGlyphs(num, dynamicLayoutVertexArray) {
for (let i = 0; i < num; i++) {
const offset = dynamicLayoutVertexArray.length;
dynamicLayoutVertexArray.resize(offset + 4);
// Since all hidden glyphs have the same attributes, we can build up the array faster with a single call to Float32Array.set
// for each set of four vertices, instead of calling addDynamicAttributes for each vertex.
dynamicLayoutVertexArray.float32.set(hiddenGlyphAttributes, offset * 3);
}
}
// For line label layout, we're not using z output and our w input is always 1
// This custom matrix transformation ignores those components to make projection faster
function xyTransformMat4(out, a, m) {
const x = a[0], y = a[1];
out[0] = m[0] * x + m[4] * y + m[12];
out[1] = m[1] * x + m[5] * y + m[13];
out[3] = m[3] * x + m[7] * y + m[15];
return out;
}
|
juseongkr/BOJ
|
acmicpc/3172.cpp
|
<reponame>juseongkr/BOJ
#include <iostream>
#include <algorithm>
#include <vector>
using namespace std;
#define MAX 100001
int n;
int tree[MAX*4];
vector<string> vec;
vector<pair<string, int>> rev;
string s;
void update(int i, int diff)
{
while (i <= n) {
tree[i] += diff;
i += (i & -i);
}
}
int sum(int i)
{
int ret = 0;
while (i) {
ret += tree[i];
i -= (i & -i);
}
return ret;
}
int sum(int l, int r) { return sum(r) - sum(l-1); }
int main()
{
ios_base::sync_with_stdio(0);
cout.tie(0);
cin.tie(0);
cin >> n;
for (int i=0; i<n; ++i) {
cin >> s;
vec.push_back(s);
}
sort(vec.begin(), vec.end());
for (int i=0; i<n; ++i) {
reverse(vec[i].begin(), vec[i].end());
rev.push_back({vec[i], i+1});
}
sort(rev.begin(), rev.end());
long long ans = 0;
for (int i=0; i<n; ++i) {
int idx = rev[i].second;
ans += sum(idx, n);
update(idx, 1);
}
cout << ans << '\n';
return 0;
}
|
lcsm29/project-euler
|
py/py_0380_amazing_mazes!.py
|
<filename>py/py_0380_amazing_mazes!.py<gh_stars>0
# Solution of;
# Project Euler Problem 380: Amazing Mazes!
# https://projecteuler.net/problem=380
#
# An m×n maze is an m×n rectangular grid with walls placed between grid cells
# such that there is exactly one path from the top-left square to any other
# square. The following are examples of a 9×12 maze and a 15×20 maze:Let
# C(m,n) be the number of distinct m×n mazes. Mazes which can be formed by
# rotation and reflection from another maze are considered distinct. It can be
# verified that C(1,1) = 1, C(2,2) = 4, C(3,4) = 2415, and C(9,12) = 2.
# 5720e46 (in scientific notation rounded to 5 significant digits). Find
# C(100,500) and write your answer in scientific notation rounded to 5
# significant digits. When giving your answer, use a lowercase e to separate
# mantissa and exponent. E. g. if the answer is 1234567891011 then the answer
# format would be 1. 2346e12.
#
# by lcsm29 http://github.com/lcsm29/project-euler
import timed
def dummy(n):
pass
if __name__ == '__main__':
n = 1000
i = 10000
prob_id = 380
timed.caller(dummy, n, i, prob_id)
|
wtrocki/aerogear-android-sdk
|
example/src/main/java/org/aerogear/mobile/example/ui/BaseFragment.java
|
<gh_stars>0
package org.aerogear.mobile.example.ui;
import android.os.Bundle;
import android.support.annotation.LayoutRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import butterknife.ButterKnife;
public abstract class BaseFragment extends Fragment {
protected BaseActivity activity;
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater,
@Nullable ViewGroup container,
@Nullable Bundle savedInstanceState) {
View view = inflater.inflate(getLayoutResId(), container, false);
ButterKnife.bind(this, view);
activity = (BaseActivity) getActivity();
return view;
}
abstract @LayoutRes
int getLayoutResId();
}
|
nabils/jackrabbit
|
jackrabbit-core/src/test/java/org/apache/jackrabbit/core/lock/ExtendedLockingTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core.lock;
import org.apache.jackrabbit.test.AbstractJCRTest;
import javax.jcr.RepositoryException;
import javax.jcr.Node;
import javax.jcr.nodetype.ConstraintViolationException;
import javax.jcr.lock.Lock;
/**
* <code>ExtendedLockingTest</code>...
*/
public class ExtendedLockingTest extends AbstractJCRTest {
public void testRemoveMixLockableFromLockedNode() throws RepositoryException {
Node n = testRootNode.addNode(nodeName1);
n.addMixin(mixLockable);
testRootNode.save();
Lock l = n.lock(true, true);
try {
n.removeMixin(mixLockable);
n.save();
fail("Removing mix:lockable from a locked node must fail.");
} catch (ConstraintViolationException e) {
// success
} finally {
if (n.isLocked()) {
n.unlock();
}
}
}
}
|
DistributedMonitoringSystem/open-falcon-backend
|
common/gin/config.go
|
// Some convenient utility for usage of gin framework
//
// JSON service
//
// ginConfig := &GinConfig{
// Mode: gin.ReleaseMode,
// Host: "localhost",
// Port: 8080,
// }
// engine := NewDefaultJsonEngine(ginConfig)
//
// // Start service
// // StartServiceOrExit(engine, ginConfig)
//
// // Binds the engine into existing HTTP service
// http.Handle("/root-service", engine)
//
// Panic in Code
//
// By using of "NewDefaultJsonEngine()", any panic code would be output as:
//
// {
// "http_status": 500,
// "error_code": -1,
// "error_message": fmt.Sprintf("%v", panicObject),
// }
//
// And the HTTP engine would keep running.
//
// Special type of panic object
//
// By "DefaultPanicProcessor()", some types of object, which is panic, would be treat specially:
//
// ValidationError - Generated by "ConformAndValidateStruct()", gives "400 Bad Request" and JSON:
// {
// "http_status": 400,
// "error_code": -1,
// "error_message": errObject.Error(),
// }
//
// BindJsonError - Generated by "BindJson()", gives "400 Bad Request" and JSON:
// {
// "http_status": 400,
// "error_code": -101,
// "error_message": errObject.Error(),
// }
//
// DataConflictError - You could panic DataConflictError, which would be output as JSON:
//
// {
// "http_status": 409
// "error_code": errObject.ErrorCode,
// "error_message": errObject.ErrorMessage,
// }
//
// NotFound 404
//
// When not found occurs, output following JSON:
// {
// "http_status": 404,
// "error_code": -1,
// "uri": c.Request.RequestURI,
// }
package gin
import (
"fmt"
"os"
"time"
"github.com/gin-contrib/cors"
"github.com/gin-gonic/gin"
)
// Configuration defines the properties on gin framework
type GinConfig struct {
// The mode of gin framework
// const (
// DebugMode string = "debug"
// ReleaseMode string = "release"
// TestMode string = "test"
// )
Mode string
// The host could be used to start service(optional)
Host string
// The post could be used to start service(optional)
Port uint16
}
// Gets the address of url
func (config *GinConfig) GetAddress() string {
return fmt.Sprintf("%s:%d", config.Host, config.Port)
}
// Same as "GetAddress()"
func (config *GinConfig) String() string {
return config.GetAddress()
}
var corsConfig cors.Config
func init() {
headers := []string{
"Content-Type", "Content-Length", "Accept-Encoding", "X-CSRF-Token", "Authorization", "Cache-Control", "X-Requested-With",
"accept", "origin", "Apitoken",
"page-size", "page-pos", "order-by", "page-ptr", "total-count", "page-more", "previous-page", "next-page",
}
corsConfig = cors.Config{
AllowMethods: []string{"POST", "OPTIONS", "GET", "PUT", "DELETE", "UPDATE"},
AllowHeaders: headers,
ExposeHeaders: headers,
AllowCredentials: true,
MaxAge: 12 * time.Hour,
}
corsConfig.AllowAllOrigins = true
}
// Initialize a router with default JSON response
//
// 1. The panic code would not cause process to dead
// 2. Use gin-contrib/cors as middleware for cross-site issue
// 3. Change (*gin.Engine).NoRoute() with JSON output
// 4. Change (*gin.Engine).NoMethod() with JSON output
//
// CORS Setting
//
// Access-Control-Allow-Origin: *
// Access-Control-Allow-Credentials: true
// Access-Control-Allow-Headers: Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, Cache-Control, X-Requested-With,
// accept, origin,
// page-size, page-pos, order-by, page-ptr, previous-page, next-page, page-more, total-count
// Access-Control-Allow-Methods: POST, OPTIONS, GET, PUT
// Access-Control-Max-Age": "43200"
func NewDefaultJsonEngine(config *GinConfig) *gin.Engine {
gin.SetMode(config.Mode)
router := gin.New()
router.Use(cors.New(corsConfig))
router.NoRoute(JsonNoRouteHandler)
router.NoMethod(JsonNoMethodHandler)
router.Use(BuildJsonPanicProcessor(DefaultPanicProcessor))
return router
}
// Try to start the engine with configuration of gin
//
// If some error happened, exit application with "os.Exit(1)"
func StartServiceOrExit(router *gin.Engine, config *GinConfig) {
if err := router.Run(config.GetAddress()); err != nil {
logger.Errorf("Cannot start web service: %v", err)
os.Exit(1)
}
}
|
wh00sh/Tanda-DAPP
|
node_modules/mdi-material-ui/ChessBishop.js
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _createIcon = _interopRequireDefault(require("./util/createIcon"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
var _default = (0, _createIcon["default"])('M19,22H5V20H19V22M17.16,8.26C18.22,9.63 18.86,11.28 19,13C19,15.76 15.87,18 12,18C8.13,18 5,15.76 5,13C5,10.62 7.33,6.39 10.46,5.27C10.16,4.91 10,4.46 10,4A2,2 0 0,1 12,2A2,2 0 0,1 14,4C14,4.46 13.84,4.91 13.54,5.27C14.4,5.6 15.18,6.1 15.84,6.74L11.29,11.29L12.71,12.71L17.16,8.26Z');
exports["default"] = _default;
|
frostingwolf/saluki
|
saluki-core/src/main/java/io/github/saluki/grpc/client/internal/unary/GrpcHystrixCommand.java
|
<gh_stars>10-100
/*
* Copyright 1999-2012 DianRong.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.github.saluki.grpc.client.internal.unary;
import java.net.InetSocketAddress;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.lang3.tuple.ImmutableTriple;
import org.apache.commons.lang3.tuple.Triple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
import com.google.protobuf.Message;
import com.netflix.hystrix.HystrixCommand;
import com.netflix.hystrix.HystrixCommandGroupKey;
import com.netflix.hystrix.HystrixCommandKey;
import com.netflix.hystrix.HystrixCommandProperties;
import com.netflix.hystrix.HystrixThreadPoolProperties;
import io.github.saluki.common.Constants;
import io.github.saluki.common.GrpcURL;
import io.github.saluki.common.NamedThreadFactory;
import io.github.saluki.common.RpcContext;
import io.github.saluki.grpc.client.GrpcRequest;
import io.github.saluki.grpc.client.GrpcResponse;
import io.github.saluki.grpc.client.internal.GrpcCallOptions;
import io.github.saluki.grpc.exception.RpcFrameworkException;
import io.github.saluki.grpc.service.ClientServerMonitor;
import io.github.saluki.grpc.service.MonitorService;
import io.github.saluki.grpc.util.GrpcUtil;
import io.github.saluki.grpc.util.SerializerUtil;
import io.github.saluki.serializer.exception.ProtobufException;
import io.grpc.MethodDescriptor;
/**
* @author liushiming 2017年4月26日 下午6:16:32
* @version $Id: GrpcHystrixObservableCommand.java, v 0.0.1 2017年4月26日 下午6:16:32 liushiming
*/
@SuppressWarnings("rawtypes")
public abstract class GrpcHystrixCommand extends HystrixCommand<Object> {
private static final Logger logger = LoggerFactory.getLogger(GrpcHystrixCommand.class);
private static final ConcurrentMap<String, AtomicInteger> concurrents = Maps.newConcurrentMap();
private final String serviceName;
private final String methodName;
private final long start;
private final Triple<Map<String, String>, Map<String, Object>, Set<Class>> rpcContext;
private GrpcRequest request;
private GrpcUnaryClientCall clientCall;
private ClientServerMonitor clientServerMonitor;
private static final ExecutorService collectLogExecutor =
Executors.newSingleThreadExecutor(new NamedThreadFactory("salukiCollectTask", true));
public GrpcHystrixCommand(String serviceName, String methodName, Boolean isEnabledFallBack) {
super(Setter.withGroupKey(HystrixCommandGroupKey.Factory.asKey(serviceName))//
.andCommandKey(HystrixCommandKey.Factory.asKey(serviceName + ":" + methodName))//
.andCommandPropertiesDefaults(
HystrixCommandProperties.Setter().withCircuitBreakerRequestVolumeThreshold(20)// 10秒钟内至少19此请求失败,熔断器才发挥起作用
.withCircuitBreakerSleepWindowInMilliseconds(30000)// 熔断器中断请求30秒后会进入半打开状态,放部分流量过去重试
.withCircuitBreakerErrorThresholdPercentage(50)// 错误率达到50开启熔断保护
.withExecutionTimeoutEnabled(false)// 禁用这里的超时
.withFallbackEnabled(isEnabledFallBack))//
.andThreadPoolPropertiesDefaults(HystrixThreadPoolProperties.Setter().withCoreSize(100)
.withAllowMaximumSizeToDivergeFromCoreSize(true).withMaximumSize(Integer.MAX_VALUE)));
this.serviceName = serviceName;
this.methodName = methodName;
this.start = System.currentTimeMillis();
this.rpcContext = new ImmutableTriple<Map<String, String>, Map<String, Object>, Set<Class>>(
RpcContext.getContext().getAttachments(), RpcContext.getContext().get(),
RpcContext.getContext().getHoldenGroups());
RpcContext.removeContext();
}
public void setRequest(GrpcRequest request) {
this.request = request;
}
public void setClientCall(GrpcUnaryClientCall clientCall) {
this.clientCall = clientCall;
}
public void setClientServerMonitor(ClientServerMonitor clientServerMonitor) {
this.clientServerMonitor = clientServerMonitor;
}
@Override
public Object execute() {
try {
currentConcurrent(this.serviceName, this.methodName).incrementAndGet();
return super.execute();
} finally {
currentConcurrent(this.serviceName, this.methodName).decrementAndGet();
}
}
@Override
protected Object run() throws Exception {
try {
RpcContext.getContext().setAttachments(rpcContext.getLeft());
RpcContext.getContext().set(rpcContext.getMiddle());
RpcContext.getContext().setHoldenGroups(rpcContext.getRight());
MethodDescriptor<Message, Message> methodDesc = this.request.getMethodDescriptor();
Integer timeOut = this.request.getCallTimeout();
Message request = getRequestMessage();
Message response = this.run0(request, methodDesc, timeOut, clientCall);
Object obj = this.transformMessage(response);
collectLogExecutor.execute(new Runnable() {
@Override
public void run() {
collect(serviceName, methodName, request, response, false);
}
});
return obj;
} finally {
RpcContext.removeContext();
}
}
@Override
protected Object getFallback() {
Class<?> responseType = this.request.getResponseType();
Message response = GrpcUtil.createDefaultInstance(responseType);
Object obj = this.transformMessage(response);
collectLogExecutor.execute(new Runnable() {
@Override
public void run() {
collect(serviceName, methodName, getRequestMessage(), response, true);
}
});
return obj;
}
protected AtomicInteger currentConcurrent(String serviceName, String methodName) {
String key = serviceName + ":" + methodName;
AtomicInteger concurrent = concurrents.get(key);
if (concurrent == null) {
concurrents.putIfAbsent(key, new AtomicInteger());
concurrent = concurrents.get(key);
}
return concurrent;
}
private Message getRequestMessage() {
try {
Object param = this.request.getRequestParam();
return SerializerUtil.pojo2Protobuf(param);
} catch (ProtobufException e) {
RpcFrameworkException rpcFramwork = new RpcFrameworkException(e);
throw rpcFramwork;
}
}
private Object transformMessage(Message message) {
Class<?> respPojoType = request.getResponseType();
GrpcResponse response = new GrpcResponse.Default(message, respPojoType);
try {
return response.getResponseArg();
} catch (ProtobufException e) {
RpcFrameworkException rpcFramwork = new RpcFrameworkException(e);
throw rpcFramwork;
}
}
private void collect(String serviceName, String methodName, Message request, Message response,
boolean error) {
try {
InetSocketAddress provider = (InetSocketAddress) GrpcCallOptions
.getAffinity(this.request.getRefUrl()).get(GrpcCallOptions.GRPC_CURRENT_ADDR_KEY);
if (request == null || response == null || provider == null) {
return;
}
long elapsed = System.currentTimeMillis() - this.start; // 计算调用耗时
int concurrent = this.currentConcurrent(serviceName, methodName).get(); // 当前并发数
String service = serviceName; // 获取服务名称
String method = methodName; // 获取方法名
GrpcURL refUrl = this.request.getRefUrl();
String host = refUrl.getHost();
Integer port = refUrl.getPort();
clientServerMonitor.collect(new GrpcURL(Constants.MONITOR_PROTOCOL, host, port, //
service + "/" + method, //
MonitorService.TIMESTAMP, String.valueOf(start), //
MonitorService.APPLICATION, refUrl.getParameter(Constants.APPLICATION_NAME), //
MonitorService.INTERFACE, service, //
MonitorService.METHOD, method, //
MonitorService.PROVIDER, provider.getHostName(), //
error ? MonitorService.FAILURE : MonitorService.SUCCESS, "1", //
MonitorService.ELAPSED, String.valueOf(elapsed), //
MonitorService.CONCURRENT, String.valueOf(concurrent), //
MonitorService.INPUT, String.valueOf(request.getSerializedSize()), //
MonitorService.OUTPUT, String.valueOf(response.getSerializedSize())));
} catch (Throwable t) {
logger.warn("Failed to monitor count service " + serviceName + ", cause: " + t.getMessage());
}
}
protected abstract Message run0(Message req, MethodDescriptor<Message, Message> methodDesc,
Integer timeOut, GrpcUnaryClientCall clientCall);
protected void cacheCurrentServer() {
Object obj = GrpcCallOptions.getAffinity(this.request.getRefUrl())
.get(GrpcCallOptions.GRPC_CURRENT_ADDR_KEY);
if (obj != null) {
InetSocketAddress currentServer = (InetSocketAddress) obj;
RpcContext.getContext().setAttachment(Constants.REMOTE_ADDRESS, currentServer.getHostName());
}
}
}
|
toddstrader/slang
|
source/binding/SystemSubroutine.cpp
|
<reponame>toddstrader/slang<filename>source/binding/SystemSubroutine.cpp<gh_stars>0
//------------------------------------------------------------------------------
// SystemSubroutine.cpp
// System-defined subroutine handling.
//
// File is under the MIT license; see LICENSE for details.
//------------------------------------------------------------------------------
#include "slang/binding/SystemSubroutine.h"
#include "slang/binding/BindContext.h"
#include "slang/binding/Expressions.h"
#include "slang/compilation/Compilation.h"
#include "slang/diagnostics/ExpressionsDiags.h"
#include "slang/diagnostics/SysFuncsDiags.h"
#include "slang/syntax/AllSyntax.h"
#include "slang/text/SFormat.h"
namespace slang {
const Expression& SystemSubroutine::bindArgument(size_t, const BindContext& context,
const ExpressionSyntax& syntax) const {
return Expression::bind(syntax, context);
}
string_view SystemSubroutine::kindStr() const {
return kind == SubroutineKind::Task ? "task"sv : "function"sv;
}
bool SystemSubroutine::checkArgCount(const BindContext& context, bool isMethod, const Args& args,
SourceRange callRange, size_t min, size_t max) {
size_t provided = args.size();
if (isMethod) {
ASSERT(provided);
provided--;
}
if (provided < min) {
context.addDiag(diag::TooFewArguments, callRange) << min << provided;
return false;
}
if (provided > max) {
context.addDiag(diag::TooManyArguments, args[max]->sourceRange) << max << provided;
return false;
}
for (auto arg : args) {
if (arg->bad())
return false;
}
return true;
}
bool SystemSubroutine::checkFormatArgs(const BindContext& context, const Args& args) {
// TODO: empty args
SmallVectorSized<SFormat::Arg, 8> specs;
auto specIt = specs.begin();
auto argIt = args.begin();
while (argIt != args.end()) {
auto arg = *argIt++;
if (arg->bad())
return false;
const Type& type = *arg->type;
if (specIt == specs.end()) {
if (arg->kind == ExpressionKind::StringLiteral) {
specs.clear();
auto& lit = arg->as<StringLiteral>();
Diagnostics diags;
if (!SFormat::parseArgs(lit.getRawValue(), arg->sourceRange.start(), specs,
diags)) {
context.scope.addDiags(diags);
return false;
}
specIt = specs.begin();
}
else if (type.isAggregate() && !type.isByteArray()) {
context.addDiag(diag::FormatUnspecifiedType, arg->sourceRange) << type;
return false;
}
}
else {
SFormat::Arg fmtArg = *specIt++;
if (!SFormat::isArgTypeValid(fmtArg.type, type)) {
context.addDiag(diag::FormatMismatchedType, arg->sourceRange)
<< type << fmtArg.spec;
return false;
}
}
}
// TODO: check for left over specifiers
return true;
}
const Expression& SimpleSystemSubroutine::bindArgument(size_t argIndex, const BindContext& context,
const ExpressionSyntax& syntax) const {
if (argIndex >= argTypes.size())
return SystemSubroutine::bindArgument(argIndex, context, syntax);
return Expression::bind(*argTypes[argIndex], syntax, syntax.getFirstToken().location(),
context);
}
const Type& SimpleSystemSubroutine::checkArguments(const BindContext& context, const Args& args,
SourceRange range) const {
auto& comp = context.getCompilation();
if (!checkArgCount(context, isMethod, args, range, requiredArgs, argTypes.size()))
return comp.getErrorType();
return *returnType;
}
} // namespace slang
|
sejust/cubefs-blobstore
|
blobnode/chunk.go
|
// Copyright 2022 The CubeFS Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
package blobnode
import (
bnapi "github.com/cubefs/blobstore/api/blobnode"
"github.com/cubefs/blobstore/blobnode/core"
"github.com/cubefs/blobstore/blobnode/core/disk"
bloberr "github.com/cubefs/blobstore/common/errors"
"github.com/cubefs/blobstore/common/rpc"
"github.com/cubefs/blobstore/common/trace"
)
/*
* method: POST
* url: /chunk/create/diskid/{diskid}/vuid/{vuid}?chunksize={chunksize}
* request body: json.Marshal(bnapi.ChunkCreateArgs)
*/
func (s *Service) ChunkCreate_(c *rpc.Context) {
args := new(bnapi.CreateChunkArgs)
if err := c.ParseArgs(args); err != nil {
c.RespondError(err)
return
}
ctx := c.Request.Context()
span := trace.SpanFromContextSafe(ctx)
span.Infof("chunk create args:%v", args)
if args.ChunkSize < 0 || args.ChunkSize > disk.MaxChunkSize {
span.Debugf("args:%v", args)
c.RespondError(bloberr.ErrInvalidParam)
return
}
if !bnapi.IsValidDiskID(args.DiskID) {
span.Debugf("args:%v", args)
c.RespondError(bloberr.ErrInvalidDiskId)
return
}
if args.ChunkSize == 0 {
args.ChunkSize = core.DefaultChunkSize
}
limitKey := args.Vuid
err := s.ChunkLimitPerVuid.Acquire(limitKey)
if err != nil {
span.Errorf("can not create chunk with same vuid(%v) at the same time", args.Vuid)
c.RespondError(bloberr.ErrOutOfLimit)
return
}
defer s.ChunkLimitPerVuid.Release(limitKey)
s.lock.RLock()
ds, exist := s.Disks[args.DiskID]
s.lock.RUnlock()
if !exist {
span.Errorf("diskId:%d not exist", args.DiskID)
c.RespondError(bloberr.ErrNoSuchDisk)
return
}
cs, err := ds.CreateChunk(ctx, args.Vuid, args.ChunkSize)
if err != nil {
span.Errorf("Failed register vuid:%v, err:%v", args.DiskID, err)
c.RespondError(err)
return
}
span.Infof("create vuid:%d success, bind chunkId:%s", args.Vuid, cs.ID())
}
/*
* method: POST
* url: /chunk/release/diskid/{diskid}/vuid/{vuid}
* request body: json.Marshal(ChunkArgs)
*/
func (s *Service) ChunkRelease_(c *rpc.Context) {
args := new(bnapi.ChangeChunkStatusArgs)
if err := c.ParseArgs(args); err != nil {
c.RespondError(err)
return
}
ctx := c.Request.Context()
span := trace.SpanFromContextSafe(ctx)
span.Debugf("args: %v", args)
if !bnapi.IsValidDiskID(args.DiskID) {
span.Debugf("args:%v", args)
c.RespondError(bloberr.ErrInvalidDiskId)
return
}
limitKey := args.Vuid
err := s.ChunkLimitPerVuid.Acquire(limitKey)
if err != nil {
span.Errorf("vuid(%v) status concurry conflict", args.Vuid)
c.RespondError(bloberr.ErrOverload)
return
}
defer s.ChunkLimitPerVuid.Release(limitKey)
s.lock.RLock()
ds, exist := s.Disks[args.DiskID]
s.lock.RUnlock()
if !exist {
span.Errorf("disk:%v not found", args.DiskID)
c.RespondError(bloberr.ErrNoSuchDisk)
return
}
cs, exist := ds.GetChunkStorage(args.Vuid)
if !exist {
span.Errorf("vuid:%v not found", args.Vuid)
c.RespondError(bloberr.ErrNoSuchVuid)
return
}
// only readonly chunk can be release
if !args.Force && cs.Status() != bnapi.ChunkStatusReadOnly {
span.Errorf("vuid:%v/chunk:%s (status:%v) not readonly", args.Vuid, cs.ID(), cs.Status())
c.RespondError(bloberr.ErrChunkNotReadonly)
return
}
err = ds.ReleaseChunk(ctx, args.Vuid, args.Force)
if err != nil {
span.Errorf("release args:(%v) failed: %v", args, err)
c.RespondError(err)
return
}
span.Infof("disk release vuid:%v success", args.Vuid)
}
/*
* method: POST
* url: /chunk/readonly/diskid/{diskid}/vuid/{vuid}/
* request body: json.Marshal(ChunkArgs)
*/
func (s *Service) ChunkReadonly_(c *rpc.Context) {
args := new(bnapi.ChangeChunkStatusArgs)
if err := c.ParseArgs(args); err != nil {
c.RespondError(err)
return
}
ctx := c.Request.Context()
span := trace.SpanFromContextSafe(ctx)
span.Debugf("args: %v", args)
if !bnapi.IsValidDiskID(args.DiskID) {
span.Debugf("args:%v", args)
c.RespondError(bloberr.ErrInvalidDiskId)
return
}
limitKey := args.Vuid
err := s.ChunkLimitPerVuid.Acquire(limitKey)
if err != nil {
span.Errorf("vuid(%v) status concurry conflict", args.Vuid)
c.RespondError(bloberr.ErrOverload)
return
}
defer s.ChunkLimitPerVuid.Release(limitKey)
s.lock.RLock()
ds, exist := s.Disks[args.DiskID]
s.lock.RUnlock()
if !exist {
span.Errorf("disk:%v not found", args.DiskID)
c.RespondError(bloberr.ErrNoSuchDisk)
return
}
cs, exist := ds.GetChunkStorage(args.Vuid)
if !exist {
span.Errorf("vuid:%v not found", args.Vuid)
c.RespondError(bloberr.ErrNoSuchVuid)
return
}
if cs.Status() == bnapi.ChunkStatusReadOnly {
span.Warnf("chunk(%s) already in readonly", cs.ID())
return
}
if cs.Status() != bnapi.ChunkStatusNormal {
span.Warnf("chunk(%s) status no normal", cs.ID())
c.RespondError(bloberr.ErrChunkNotNormal)
return
}
// change persistence status
err = ds.UpdateChunkStatus(ctx, args.Vuid, bnapi.ChunkStatusReadOnly)
if err != nil {
span.Errorf("set args:(%s) readOnly failed: %v", args, err)
c.RespondError(err)
return
}
span.Debugf("update diskid: %v vuid:%v readonly success", args.DiskID, args.Vuid)
}
/*
* method: POST
* url: /chunk/readwrite/diskid/{diskid}/vuid/{vuid}
* request body: json.Marshal(ChunkArgs)
*/
func (s *Service) ChunkReadwrite_(c *rpc.Context) {
args := new(bnapi.ChangeChunkStatusArgs)
if err := c.ParseArgs(args); err != nil {
c.RespondError(err)
return
}
ctx := c.Request.Context()
span := trace.SpanFromContextSafe(ctx)
span.Debugf("args: %v", args)
if !bnapi.IsValidDiskID(args.DiskID) {
span.Debugf("args:%v", args)
c.RespondError(bloberr.ErrInvalidDiskId)
return
}
limitKey := <KEY>
err := s.ChunkLimitPerVuid.Acquire(limitKey)
if err != nil {
span.Errorf("vuid(%v) status concurry conflict", args.Vuid)
c.RespondError(bloberr.ErrOverload)
return
}
defer s.ChunkLimitPerVuid.Release(limitKey)
s.lock.RLock()
ds, exist := s.Disks[args.DiskID]
s.lock.RUnlock()
if !exist {
span.Errorf("disk:%v not found", args.DiskID)
c.RespondError(bloberr.ErrNoSuchDisk)
return
}
cs, exist := ds.GetChunkStorage(args.Vuid)
if !exist {
span.Errorf("vuid:%v not found", args.Vuid)
c.RespondError(bloberr.ErrNoSuchVuid)
return
}
if cs.Status() == bnapi.ChunkStatusNormal {
span.Warnf("chunk(%s) already normal", cs.ID())
return
}
// only readonly -> normal
if cs.Status() != bnapi.ChunkStatusReadOnly {
span.Warnf("chunk(%s) status no readonly", cs.ID())
c.RespondError(bloberr.ErrChunkNotReadonly)
return
}
// change persistence status
err = ds.UpdateChunkStatus(ctx, args.Vuid, bnapi.ChunkStatusNormal)
if err != nil {
span.Errorf("set args:(%s) readWrite failed: %v", args, err)
c.RespondError(err)
return
}
span.Infof("update disk:%v vuid:%v normal success", args.DiskID, args.Vuid)
}
/*
* method: GET
* url: /chunk/list/diskid/{diskid}
*/
func (s *Service) ChunkList_(c *rpc.Context) {
args := new(bnapi.ListChunkArgs)
if err := c.ParseArgs(args); err != nil {
c.RespondError(err)
return
}
ctx := c.Request.Context()
span := trace.SpanFromContextSafe(ctx)
span.Infof("chunk list args: %v", args)
if !bnapi.IsValidDiskID(args.DiskID) {
span.Debugf("args:%v", args)
c.RespondError(bloberr.ErrInvalidDiskId)
return
}
s.lock.RLock()
ds, exist := s.Disks[args.DiskID]
s.lock.RUnlock()
if !exist {
span.Errorf("diskid(%v) no such disk", args.DiskID)
c.RespondError(bloberr.ErrNoSuchDisk)
return
}
chunks := make([]core.ChunkAPI, 0)
_ = ds.WalkChunksWithLock(ctx, func(cs core.ChunkAPI) (err error) {
chunks = append(chunks, cs)
return nil
})
infos := make([]*bnapi.ChunkInfo, 0)
for _, cs := range chunks {
info := cs.ChunkInfo(ctx)
infos = append(infos, &info)
}
ret := bnapi.ListChunkRet{
ChunkInfos: infos,
}
c.RespondJSON(ret)
}
/*
* method: GET
* url: /chunk/stat/diskid/{diskid}/vuid/{vuid}
* response body: json.Marshal(ChunkInfo)
*/
func (s *Service) ChunkStat_(c *rpc.Context) {
args := new(bnapi.StatChunkArgs)
if err := c.ParseArgs(args); err != nil {
c.RespondError(err)
return
}
ctx := c.Request.Context()
span := trace.SpanFromContextSafe(ctx)
span.Infof("chunk stat args:%v", args)
if !bnapi.IsValidDiskID(args.DiskID) {
span.Debugf("args:%v", args)
c.RespondError(bloberr.ErrInvalidDiskId)
return
}
s.lock.RLock()
ds, exist := s.Disks[args.DiskID]
s.lock.RUnlock()
if !exist {
span.Errorf("disk:%v not found", args.DiskID)
c.RespondError(bloberr.ErrNoSuchDisk)
return
}
cs, exist := ds.GetChunkStorage(args.Vuid)
if !exist {
span.Errorf("no such vuid, args:%v", args)
c.RespondError(bloberr.ErrNoSuchVuid)
return
}
chunk := cs.ChunkInfo(ctx)
c.RespondJSON(&chunk)
}
|
ntiufalara/openerp7
|
openerp/tests/__init__.py
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
Tests for the OpenERP library.
This module groups a few sub-modules containing unittest2 test cases.
Tests can be explicitely added to the `fast_suite` or `checks` lists or not.
See the :ref:`test-framework` section in the :ref:`features` list.
"""
import test_acl
import test_basecase
import test_db_cursor
import test_expression
import test_fields
import test_ir_filters
import test_ir_sequence
import test_mail
import test_misc
import test_orm
import test_osv
import test_translate
import test_uninstall
import test_view_validation
fast_suite = [
test_ir_sequence,
test_ir_filters
]
checks = [
test_acl,
test_expression,
test_mail,
test_db_cursor,
test_orm,
test_fields,
test_basecase,
test_view_validation,
test_misc,
test_osv,
test_translate,
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ZieIony/Ghurund
|
engine/Engine.UI/src/ui/widget/button/CheckBoxRadio.cpp
|
<gh_stars>10-100
#include "ghuipch.h"
#include "CheckBoxRadio.h"
#include "ui/control/ImageView.h"
#include "ui/widget/StateIndicator.h"
namespace Ghurund::UI {
void CheckBoxRadioStyle::onStateChanged(Control& control) const {
Widget<CheckBoxBinding>& checkBoxRadio = (Widget<CheckBoxBinding>&)control;
CheckBoxBinding* layout = checkBoxRadio.Layout;
if (!layout)
return;
if (!control.Enabled) {
layout->Image->Tint = std::unique_ptr<ColorAttr>(ghnew ColorRef(Theme::COLOR_DISABLED_ONBACKGROUND));
} else if (layout->Selectable->Pressed) {
if (layout->Selectable->Selected) {
//imageView->Image->Tint = theme->getColorAccentDark();
layout->Image->Tint = std::unique_ptr<ColorAttr>(ghnew ColorRef(Theme::COLOR_ACCENT));
} else {
layout->Image->Tint = std::unique_ptr<ColorAttr>(ghnew ColorRef(Theme::COLOR_PRIMARY_ONBACKGROUND));
}
} else if (layout->Selectable->Hovered || layout->Selectable->Focused) {
if (layout->Selectable->Selected) {
layout->Image->Tint = std::unique_ptr<ColorAttr>(ghnew ColorRef(Theme::COLOR_ACCENT));
} else {
layout->Image->Tint = std::unique_ptr<ColorAttr>(ghnew ColorRef(Theme::COLOR_SECONDARY_ONBACKGROUND));
}
} else {
if (layout->Selectable->Selected) {
layout->Image->Tint = std::unique_ptr<ColorAttr>(ghnew ColorRef(Theme::COLOR_ACCENT));
} else {
layout->Image->Tint = std::unique_ptr<ColorAttr>(ghnew ColorRef(Theme::COLOR_SECONDARY_ONBACKGROUND));
}
}
}
}
|
shwetasshinde24/Panoply
|
case-studies/tor/src/App/IO/sysctl_ocall.cpp
|
#include <sys/sysctl.h>
#include <TorEnclave_u.h>
int ocall_sysctl (int *name, int nlen, void *oldval, size_t *oldlenp, void *newval, size_t newlen)
{
return sysctl(name, nlen, oldval, oldlenp, newval, newlen);
}
|
devs-7/bible-projector-python
|
src/widgets/chapter_widget/chapter_verse_widget/__init__.py
|
from PyQt5 import QtGui, QtWidgets
from PyQt5.QtCore import pyqtSignal
from src.models.verse import Verse
from .container import Container
from .verse_number_label import VerseNumberLabel
from .verse_text_label import VerseTextLabel
class ChapterVerseWidget(QtWidgets.QWidget):
verse: Verse
list_widget_item: QtWidgets.QListWidgetItem
list_widget: QtWidgets.QListWidget
__selected: bool
clicked = pyqtSignal(Verse)
def __init__(
self, parent=None, *,
verse: Verse,
list_widget_item: QtWidgets.QListWidgetItem,
):
super(ChapterVerseWidget, self).__init__(parent)
self.verse = verse
self.list_widget_item = list_widget_item
self.__selected = False
self.container = Container()
self.verse_text_label = VerseTextLabel()
self.verse_number_label = VerseNumberLabel()
self.verse_text_label.setText(verse.text)
self.verse_number_label.setText(str(verse.verse_number))
self.container.addWidget(self.verse_number_label)
self.container.addWidget(self.verse_text_label)
self.setLayout(self.container)
list_widget_item.setSizeHint(self.sizeHint())
self.configure_events()
def configure_events(self):
self.mouseReleaseEvent = self.__on_click
def __on_click(self, event: QtGui.QMouseEvent):
self.clicked.emit(self.verse)
def select(self):
if not self.__selected:
self.verse_text_label.select()
self.__selected = True
def unselect(self):
if self.__selected:
self.verse_text_label.unselect()
self.__selected = False
|
moensun/rpc
|
src/test/java/com/linda/framework/rpc/aio/SocketAcceptHandler.java
|
package com.linda.framework.rpc.aio;
import java.nio.channels.CompletionHandler;
public class SocketAcceptHandler<V,A> implements CompletionHandler<V,A>{
@Override
public void completed(V result, A attachment) {
}
@Override
public void failed(Throwable exc, A attachment) {
}
}
|
Edraak/code-dot-or
|
apps/src/sites/studio/pages/init/loadFish.js
|
<filename>apps/src/sites/studio/pages/init/loadFish.js
import appMain from '@cdo/apps/appMain';
import {singleton as studioApp} from '@cdo/apps/StudioApp';
import Fish from '@cdo/apps/fish/Fish';
import skins from '@cdo/apps/skins';
import levels from '@cdo/apps/fish/levels';
export default function loadFish(options) {
options.skinsModule = skins;
options.isEditorless = true;
options.skinId = 'fish';
const fish = new Fish();
fish.injectStudioApp(studioApp());
appMain(fish, levels, options);
}
|
Karumi/FlowUpAndroidSDK
|
flowup/src/main/java/io/flowup/collectors/CPUUsageCollector.java
|
/*
* Copyright (C) 2016 <NAME>.
*/
package io.flowup.collectors;
import com.codahale.metrics.CachedGauge;
import com.codahale.metrics.MetricRegistry;
import io.flowup.android.App;
import io.flowup.android.CPU;
import io.flowup.metricnames.MetricNamesGenerator;
import java.util.concurrent.TimeUnit;
class CPUUsageCollector implements Collector {
private final MetricNamesGenerator metricNamesGenerator;
private final long samplingInterval;
private final TimeUnit timeUnit;
private final CPU cpu;
private final App app;
CPUUsageCollector(MetricNamesGenerator metricNamesGenerator, long samplingInterval,
TimeUnit timeUnit, CPU cpu, App app) {
this.metricNamesGenerator = metricNamesGenerator;
this.samplingInterval = samplingInterval;
this.timeUnit = timeUnit;
this.cpu = cpu;
this.app = app;
}
@Override public void initialize(MetricRegistry registry) {
registerCachedGauge(registry, false);
registerCachedGauge(registry, true);
}
private void registerCachedGauge(MetricRegistry registry, final boolean isInBackground) {
registry.register(metricNamesGenerator.getCPUUsageMetricName(isInBackground),
new CachedGauge<Long>(samplingInterval, timeUnit) {
@Override protected Long loadValue() {
if ((isInBackground && app.isApplicaitonInForeground()) || (!isInBackground
&& app.isApplicationInBackground())) {
return null;
}
return Long.valueOf(cpu.getUsage());
}
});
}
}
|
twothinkinc/node-celery-ts
|
dist/utility.js
|
<filename>dist/utility.js
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createTimerPromise = exports.createTimeoutPromise = exports.filterMapEvent = exports.promisifyEvent = exports.toCamelCase = exports.isUndefined = exports.isNull = exports.isNullOrUndefined = exports.parseBoolean = exports.parseInteger = void 0;
const errors_1 = require("./errors");
const parseInteger = (maybeInt) => {
const [radix, toParse] = (() => {
const toReturn = getRadix(maybeInt);
if ((0, exports.isNullOrUndefined)(toReturn)) {
throw new errors_1.ParseError("Celery.Utility.parseInteger: could not "
+ `parse ${maybeInt} as integer`);
}
return toReturn;
})();
const maybeParsed = Number.parseInt(toParse, radix);
return maybeParsed;
};
exports.parseInteger = parseInteger;
const parseBoolean = (maybeBoolean) => {
switch (maybeBoolean.toLowerCase().trim()) {
case "true":
case "1":
case "on":
case "yes": return true;
case "false":
case "0":
case "off":
case "no": return false;
}
throw new errors_1.ParseError("Celery.Utility.parseBoolean: could not parse "
+ `${maybeBoolean} as a boolean`);
};
exports.parseBoolean = parseBoolean;
const isNullOrUndefined = (value) => (0, exports.isNull)(value) || (0, exports.isUndefined)(value);
exports.isNullOrUndefined = isNullOrUndefined;
const isNull = (value) => value === null;
exports.isNull = isNull;
const isUndefined = (value) => typeof value === "undefined";
exports.isUndefined = isUndefined;
const toCamelCase = (toConvert) => toConvert.replace(/_([a-z])/, (_, match) => match.toUpperCase());
exports.toCamelCase = toCamelCase;
const promisifyEvent = (emitter, name) => __awaiter(void 0, void 0, void 0, function* () { return new Promise((resolve) => emitter.once(name, resolve)); });
exports.promisifyEvent = promisifyEvent;
const filterMapEvent = ({ emitter, filterMap, name }) => __awaiter(void 0, void 0, void 0, function* () {
return new Promise((resolve) => {
let resolved = false;
const onEvent = (...values) => {
if (resolved) {
return;
}
const maybeMapped = filterMap(...values);
if (!(0, exports.isNullOrUndefined)(maybeMapped)) {
emitter.removeListener(name, onEvent);
resolve(maybeMapped);
resolved = true;
}
};
emitter.addListener(name, onEvent);
});
});
exports.filterMapEvent = filterMapEvent;
const createTimeoutPromise = (promise, timeout) => __awaiter(void 0, void 0, void 0, function* () {
if ((0, exports.isNullOrUndefined)(timeout)) {
return promise;
}
return Promise.race([promise, (0, exports.createTimerPromise)(timeout)]);
});
exports.createTimeoutPromise = createTimeoutPromise;
const createTimerPromise = (timeout) => __awaiter(void 0, void 0, void 0, function* () {
return new Promise((_, reject) => setTimeout(() => reject(new Error("timed out")), timeout));
});
exports.createTimerPromise = createTimerPromise;
const getRadix = (maybeNumber) => {
const REGEX = /^(?:(0[0-7]*)|(?:0x([\da-f]+))|(?:0b([01]+))|([1-9][\d]*))$/;
const OCTAL_INDEX = 1;
const HEX_INDEX = 2;
const BINARY_INDEX = 3;
const DECIMAL_INDEX = 4;
const trimmedLowered = maybeNumber.toLowerCase().trim();
const maybeMatches = REGEX.exec(trimmedLowered);
if ((0, exports.isNullOrUndefined)(maybeMatches)) {
return undefined;
}
const matches = maybeMatches;
if (!(0, exports.isNullOrUndefined)(matches[OCTAL_INDEX])) {
return [8, matches[OCTAL_INDEX]];
}
else if (!(0, exports.isNullOrUndefined)(matches[HEX_INDEX])) {
return [16, matches[HEX_INDEX]];
}
else if (!(0, exports.isNullOrUndefined)(matches[BINARY_INDEX])) {
return [2, matches[BINARY_INDEX]];
}
return [10, matches[DECIMAL_INDEX]];
};
//# sourceMappingURL=utility.js.map
|
DEFRA/gwa-web
|
server/lib/data/validate-users.js
|
const schema = require('./user-schema')
/**
* Validate a list of users against a [Joi](https://joi.dev/) schema.
*
* @param {Array} users
* @return {object} consisting of two arrays, `valid` and `nonValid`. `valid`
* contains the original value of the user. `nonValid` contains a
* [Joi validate result object](https://joi.dev/api/?v=17.4.0#anyvalidatevalue-options).
*/
module.exports = (users) => {
const nonValid = []
const valid = users.filter(user => {
const result = schema.validate(user)
if (result.error) {
nonValid.push(result)
return false
}
return true
})
return {
nonValid,
valid
}
}
|
Thav/freehub
|
test/functional/reports_controller_test.rb
|
require 'test_helper'
class ReportsControllerTest < ActionController::TestCase
def setup
super
login_as 'greeter'
end
def test_index
get :index, :organization_key => 'sfbk'
assert_response :success
end
def test_visits_report
get :visits, :organization_key => 'sfbk',
:report => { :after => '2006-01-01', :before => '2008-01-01'},
:page => 2
assert_response :success
assert_not_nil assigns(:report)
assert_not_nil assigns(:visits)
assert_equal 103, assigns(:visits).size
assert_equal 20, assigns(:visits).to_a.size
assert_equal 2, assigns(:visits).page
end
def test_visits_report_default
get :visits, :organization_key => 'sfbk'
assert_response :success
assert_not_nil assigns(:report)
assert_not_nil assigns(:visits)
end
def test_visits_report_csv
get :visits, :organization_key => 'sfbk',
:report => { :after => '2006-01-01', :before => '2008-01-01' },
:format => 'csv'
assert_response :success
assert_not_nil assigns(:visits)
assert_equal 103, assigns(:visits).size
output = StringIO.new
output.binmode
assert_nothing_raised { @response.body.call(@response, output) }
lines = output.string.split("\n")
assert_equal assigns(:visits).size + 1, lines.size
assert_equal Visit.csv_header, lines[0]+"\n"
assert_equal "attachment; filename=\"sfbk_visits_2006-01-01_2008-01-01.csv\"", @response.headers['Content-Disposition']
end
def test_services_report
get :services, :organization_key => 'sfbk',
:report => { :end_after => '2006-01-01', :end_before => '2009-01-01',
:for_service_types => ['MEMBERSHIP', 'CLASS'] },
:page => 2
assert_response :success
assert_not_nil assigns(:report)
assert_not_nil assigns(:services)
assert_equal 39, assigns(:services).size
assert_equal 19, assigns(:services).to_a.size
assert_equal 2, assigns(:services).page
assert_select "input[type=checkbox]", 3
assert_select "input[type=checkbox][checked=checked]", 2
end
def test_services_report_default
get :services, :organization_key => 'sfbk'
assert_response :success
assert_not_nil assigns(:report)
assert_not_nil assigns(:services)
end
def test_services_report_csv
get :services, :organization_key => 'sfbk',
:report => { :end_after => '2006-01-01', :end_before => '2009-01-01',
:for_service_types => ['MEMBERSHIP', 'CLASS'] },
:format => 'csv'
assert_response :success
assert_not_nil assigns(:services)
assert_equal 39, assigns(:services).size
output = StringIO.new
output.binmode
assert_nothing_raised { @response.body.call(@response, output) }
lines = output.string.split("\n")
assert_equal assigns(:services).size + 1, lines.size
assert_equal Service.csv_header, lines[0]+"\n"
assert_equal "attachment; filename=\"sfbk_services_2006-01-01_2009-01-01.csv\"", @response.headers['Content-Disposition']
end
def test_people_report
get :people, :organization_key => 'sfbk',
:report => { :after => '2008-01-01', :before => '2008-01-05' }
assert_response :success
assert_not_nil assigns(:report)
assert_not_nil assigns(:people)
assert_equal 5, assigns(:people).size
assert_equal 5, assigns(:people).to_a.size
assert_equal 1, assigns(:people).page
end
def test_people_report_default
get :people, :organization_key => 'sfbk'
assert_response :success
assert_not_nil assigns(:report)
assert_not_nil assigns(:people)
end
def test_people_report_csv
get :people, :organization_key => 'sfbk',
:report => { :after => '2008-01-01', :before => '2008-01-05',
:matching_name => 'mar' },
:format => 'csv'
assert_response :success
assert_not_nil assigns(:people)
assert_equal 2, assigns(:people).size
Time.zone = ENV['TIMEZONE_DEFAULT']
output = StringIO.new
output.binmode
assert_nothing_raised { @response.body.call(@response, output) }
lines = output.string.split("\n")
assert_equal assigns(:people).size + 1, lines.size
assert_equal Person.csv_header, lines[0]+"\n"
assert_equal "attachment; filename=\"sfbk_people_2008-01-01_2008-01-05.csv\"", @response.headers['Content-Disposition']
end
def test_summary_report
get :summary, :organization_key => 'sfbk',
:criteria => { :from => '2006-01-01', :to => '2008-01-01' }
assert_response :success
assert assigns(:report)
assert assigns(:gchart)
end
end
|
RayDeeA/rbm2014
|
CBIRProjekt/CBIR_Project/src/de/htw/iconn/views/InImageDetectorController.java
|
package de.htw.iconn.views;
import de.htw.iconn.main.AController;
import de.htw.iconn.main.BenchmarkModel;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.TreeMap;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.control.TextArea;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.AnchorPane;
import javafx.scene.paint.Color;
import javafx.scene.shape.Rectangle;
import javafx.scene.shape.RectangleBuilder;
public class InImageDetectorController extends AController {
@FXML
private ImageView imgv_Image;
@FXML
private Label lbl_Recognition;
@FXML
private TextArea txt_Probabilities;
private InImageDetectorModel model;
private Rectangle rect;
@FXML
private AnchorPane view;
/**
* Initializes the controller class.
* @param url
* @param rb
*/
@Override
public void initialize(URL url, ResourceBundle rb) {
this.model = new InImageDetectorModel(this);
this.update();
}
@FXML
private void btn_loadImageAction(ActionEvent event) {
Image image = this.model.loadImage((int) imgv_Image.getFitWidth(), (int) imgv_Image.getFitHeight());
if (!image.isError()) {
this.imgv_Image.setImage(image);
this.model.detection();
} else {
System.out.println("error");
}
}
@FXML
private void btn_runDetectionAction(ActionEvent event) {
this.model.detection();
}
@FXML
private void imgv_ImageMouseMovedAction(MouseEvent event) {
if(this.model.getImageData() == null) return;
TreeMap<Double, String> probabilityMap = this.model.getProbabilityMap(event.getX(), event.getY());
String probabilitiesText = "";
List<Double> distances = new ArrayList(probabilityMap.keySet());
ListIterator<Double> distanceIterator = distances.listIterator(distances.size());
while(distanceIterator.hasPrevious()) {
Double distance = distanceIterator.previous();
probabilitiesText += probabilityMap.get(distance) + " " + (int)( distance * 100 ) + "%" + '\n';
}
this.txt_Probabilities.setText(probabilitiesText);
if(rect != null) {
view.getChildren().remove(rect);
}
rect = RectangleBuilder.create()
.x(event.getX())
.y(event.getY() + 30)
.width(28)
.height(28)
.stroke(Color.RED)
.fill(Color.TRANSPARENT)
.build();
view.getChildren().add(rect);
}
@Override
public Node getView() {
return this.view;
}
@Override
public void update(){
}
public void setBenchmarkModel(BenchmarkModel benchmarkModel) {
this.model.setBenchmarkModel(benchmarkModel);
}
}
|
qq573011406/FASTBuild_UnrealEngine
|
Code/Tools/FBuild/FBuildTest/Data/TestCache/LightCache_IncludeUsingMacro2/header1.h
|
// define macro for use by file that includes this header
#define INCLUDE_VIA_MACRO "header2.h"
|
PlayWithSanLei/JavaProjects
|
OnJava8-Examples/housekeeping/SimpleConstructor2.java
|
<reponame>PlayWithSanLei/JavaProjects
// housekeeping/SimpleConstructor2.java
// (c)2021 MindView LLC: see Copyright.txt
// We make no guarantees that this code is fit for any purpose.
// Visit http://OnJava8.com for more book information.
// Constructors can have parameters
class Rock2 {
Rock2(int i) {
System.out.print("Rock " + i + " ");
}
}
public class SimpleConstructor2 {
public static void main(String[] args) {
for(int i = 0; i < 8; i++)
new Rock2(i);
}
}
/* Output:
Rock 0 Rock 1 Rock 2 Rock 3 Rock 4 Rock 5 Rock 6 Rock 7
*/
|
EVATool/evatool-backend
|
src/test/java/com/evatool/domain/repository/DataTest.java
|
<gh_stars>0
package com.evatool.domain.repository;
import com.evatool.application.dto.*;
import com.evatool.application.mapper.*;
import com.evatool.application.service.impl.*;
import com.evatool.common.enums.StakeholderLevel;
import com.evatool.common.enums.StakeholderPriority;
import com.evatool.domain.entity.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.repository.CrudRepository;
import java.lang.reflect.ParameterizedType;
@SpringBootTest
public abstract class DataTest<S extends SuperEntity, T extends SuperDto> extends SuperEntityTest {
public Class<T> getDtoClass() {
return (Class<T>) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[1];
}
public CrudRepository getRepository(){
var type = getEntityClass();
if (type == Analysis.class) {
return analysisRepository;
} else if (type == Impact.class) {
return impactRepository;
} else if (type == Requirement.class) {
return requirementRepository;
} else if (type == RequirementDelta.class) {
return requirementDeltaRepository;
} else if (type == Stakeholder.class) {
return stakeholderRepository;
} else if (type == Value.class) {
return valueRepository;
} else if (type == Variant.class) {
return variantRepository;
} else if (type == VariantType.class) {
return variantTypeRepository;
} else if (type == ValueType.class) {
return valueTypeRepository;
} else {
throw new IllegalArgumentException("No repository found for type " + type.getSimpleName());
}
}
protected SuperMapper getMapper() {
var type = getDtoClass();
if (type == AnalysisDto.class) {
return analysisMapper;
} else if (type == ImpactDto.class) {
return impactMapper;
} else if (type == RequirementDto.class) {
return requirementMapper;
} else if (type == RequirementDeltaDto.class) {
return requirementDeltaMapper;
} else if (type == StakeholderDto.class) {
return stakeholderMapper;
} else if (type == ValueDto.class) {
return valueMapper;
} else if (type == VariantDto.class) {
return variantMapper;
} else if (type == VariantTypeDto.class) {
return variantTypeMapper;
} else if (type == ValueTypeDto.class) {
return valueTypeMapper;
} else {
throw new IllegalArgumentException("No service found for type " + type.getSimpleName());
}
}
@Autowired
protected AnalysisMapper analysisMapper;
@Autowired
protected ImpactMapper impactMapper;
@Autowired
protected RequirementMapper requirementMapper;
@Autowired
protected RequirementDeltaMapper requirementDeltaMapper;
@Autowired
protected StakeholderMapper stakeholderMapper;
@Autowired
protected ValueTypeMapper valueTypeMapper;
@Autowired
protected ValueMapper valueMapper;
@Autowired
protected VariantTypeMapper variantTypeMapper;
@Autowired
protected VariantMapper variantMapper;
public T getPersistedDto() {
var type = getDtoClass();
if (type == AnalysisDto.class) {
return (T) getPersistedAnalysisDto();
} else if (type == ImpactDto.class) {
return (T) getPersistedImpactDto();
} else if (type == RequirementDto.class) {
return (T) getPersistedRequirementDto();
} else if (type == RequirementDeltaDto.class) {
return (T) getPersistedRequirementDeltaDto();
} else if (type == StakeholderDto.class) {
return (T) getPersistedStakeholderDto();
} else if (type == ValueDto.class) {
return (T) getPersistedValueDto();
} else if (type == VariantDto.class) {
return (T) getPersistedVariantDto();
} else if (type == VariantTypeDto.class) {
return (T) getPersistedVariantTypeDto();
} else if (type == ValueTypeDto.class) {
return (T) getPersistedValueTypeDto();
} else {
throw new IllegalArgumentException("No method found for type " + type.getSimpleName());
}
}
public T getFloatingDto() {
var type = getDtoClass();
if (type == AnalysisDto.class) {
return (T) getFloatingAnalysisDto();
} else if (type == ImpactDto.class) {
return (T) getFloatingImpactDto();
} else if (type == RequirementDto.class) {
return (T) getFloatingRequirementDto();
} else if (type == RequirementDeltaDto.class) {
return (T) getFloatingRequirementDeltaDto();
} else if (type == StakeholderDto.class) {
return (T) getFloatingStakeholderDto();
} else if (type == ValueDto.class) {
return (T) getFloatingValueDto();
} else if (type == VariantDto.class) {
return (T) getFloatingVariantDto();
} else if (type == VariantTypeDto.class) {
return (T) getFloatingVariantTypeDto();
} else if (type == ValueTypeDto.class) {
return (T) getFloatingValueTypeDto();
} else {
throw new IllegalArgumentException("No method found for type " + type.getSimpleName());
}
}
public void changeEntity(S _entity) {
var type = getEntityClass();
if (type == Analysis.class) {
var dto = (Analysis) _entity;
dto.setName("updated");
dto.setDescription("updated");
dto.setImageUrl("updated");
dto.setIsTemplate(true);
} else if (type == Impact.class) {
var dto = (Impact) _entity;
dto.setMerit(0.6f);
dto.setDescription("updated description");
} else if (type == Requirement.class) {
var dto = (Requirement) _entity;
dto.setDescription("updated");
} else if (type == RequirementDelta.class) {
var dto = (RequirementDelta) _entity;
dto.setOverwriteMerit(0.3f);
} else if (type == Stakeholder.class) {
var dto = (Stakeholder) _entity;
dto.setName("updated");
dto.setPriority(StakeholderPriority.TWO);
dto.setLevel(StakeholderLevel.ORGANIZATION);
} else if (type == Value.class) {
var dto = (Value) _entity;
dto.setName("updated");
dto.setDescription("updated");
dto.setArchived(true);
} else if (type == Variant.class) {
var dto = (Variant) _entity;
dto.setName("updated");
dto.setDescription("updated");
dto.setArchived(true);
} else if (type == VariantType.class) {
var dto = (VariantType) _entity;
dto.setName("updated");
dto.setDescription("updated");
} else if (type == ValueType.class) {
var dto = (ValueType) _entity;
dto.setName("updated");
dto.setDescription("updated");
} else {
throw new IllegalArgumentException("No method found for type " + type.getSimpleName());
}
}
public void changeDto(T _dto) {
var type = getDtoClass();
if (type == AnalysisDto.class) {
var dto = (AnalysisDto) _dto;
dto.setName("updated");
dto.setDescription("updated");
dto.setImageUrl("updated");
dto.setIsTemplate(true);
} else if (type == ImpactDto.class) {
var dto = (ImpactDto) _dto;
dto.setMerit(0.6f);
dto.setDescription("updated description");
} else if (type == RequirementDto.class) {
var dto = (RequirementDto) _dto;
dto.setDescription("updated");
} else if (type == RequirementDeltaDto.class) {
var dto = (RequirementDeltaDto) _dto;
dto.setOverwriteMerit(0.3f);
} else if (type == StakeholderDto.class) {
var dto = (StakeholderDto) _dto;
dto.setName("updated");
dto.setPriority(StakeholderPriority.TWO);
dto.setLevel(StakeholderLevel.ORGANIZATION);
} else if (type == ValueDto.class) {
var dto = (ValueDto) _dto;
dto.setName("updated");
dto.setDescription("updated");
dto.setArchived(true);
} else if (type == VariantDto.class) {
var dto = (VariantDto) _dto;
dto.setName("updated");
dto.setDescription("updated");
dto.setArchived(true);
} else if (type == VariantTypeDto.class) {
var dto = (VariantTypeDto) _dto;
dto.setName("updated");
dto.setDescription("updated");
} else if (type == ValueTypeDto.class) {
var dto = (ValueTypeDto) _dto;
dto.setName("updated");
dto.setDescription("updated");
} else {
throw new IllegalArgumentException("No method found for type " + type.getSimpleName());
}
}
public CrudServiceImpl getService() {
var type = getDtoClass();
if (type == AnalysisDto.class) {
return analysisService;
} else if (type == ImpactDto.class) {
return impactService;
} else if (type == RequirementDto.class) {
return requirementService;
} else if (type == RequirementDeltaDto.class) {
return requirementDeltaService;
} else if (type == StakeholderDto.class) {
return stakeholderService;
} else if (type == ValueDto.class) {
return valueService;
} else if (type == VariantDto.class) {
return variantService;
} else if (type == VariantTypeDto.class) {
return variantTypeService;
} else if (type == ValueTypeDto.class) {
return valueTypeService;
} else {
throw new IllegalArgumentException("No service found for type " + type.getSimpleName());
}
}
@Autowired
private AnalysisServiceImpl analysisService;
@Autowired
private ImpactServiceImpl impactService;
@Autowired
private RequirementServiceImpl requirementService;
@Autowired
private RequirementDeltaServiceImpl requirementDeltaService;
@Autowired
private StakeholderServiceImpl stakeholderService;
@Autowired
private ValueTypeServiceImpl valueTypeService;
@Autowired
private ValueServiceImpl valueService;
@Autowired
private VariantTypeServiceImpl variantTypeService;
@Autowired
private VariantServiceImpl variantService;
protected AnalysisDto getFloatingAnalysisDto() {
return analysisMapper.toDto(getFloatingAnalysis());
}
protected AnalysisDto getPersistedAnalysisDto() {
return analysisMapper.toDto(getPersistedAnalysis());
}
protected ImpactDto getFloatingImpactDto() {
return impactMapper.toDto(getFloatingImpact());
}
protected ImpactDto getPersistedImpactDto() {
return impactMapper.toDto(getPersistedImpact());
}
protected RequirementDto getFloatingRequirementDto() {
return requirementMapper.toDto(getFloatingRequirement());
}
protected RequirementDto getPersistedRequirementDto() {
return requirementMapper.toDto(getPersistedRequirement());
}
protected RequirementDeltaDto getFloatingRequirementDeltaDto() {
return requirementDeltaMapper.toDto(getFloatingRequirementDelta());
}
protected RequirementDeltaDto getPersistedRequirementDeltaDto() {
return requirementDeltaMapper.toDto(getPersistedRequirementDelta());
}
protected StakeholderDto getFloatingStakeholderDto() {
return stakeholderMapper.toDto(getFloatingStakeholder());
}
protected StakeholderDto getPersistedStakeholderDto() {
return stakeholderMapper.toDto(getPersistedStakeholder());
}
protected ValueTypeDto getFloatingValueTypeDto() {
return valueTypeMapper.toDto(getFloatingValueType());
}
protected ValueTypeDto getPersistedValueTypeDto() {
return valueTypeMapper.toDto(getPersistedValueType());
}
protected ValueDto getFloatingValueDto() {
return valueMapper.toDto(getFloatingValue());
}
protected ValueDto getPersistedValueDto() {
return valueMapper.toDto(getPersistedValue());
}
protected VariantTypeDto getFloatingVariantTypeDto() {
return variantTypeMapper.toDto(getFloatingVariantType());
}
protected VariantTypeDto getPersistedVariantTypeDto() {
return variantTypeMapper.toDto(getPersistedVariantType());
}
protected VariantDto getFloatingVariantDto() {
return variantMapper.toDto(getFloatingVariant());
}
protected VariantDto getPersistedVariantDto() {
return variantMapper.toDto(getPersistedVariant());
}
}
|
debesheedas/MyMargdarshaka-2
|
backend/models/feedback.model.js
|
//@ts-check
/*
* The feedback collection is used to store all issues submitted by users as feedback
* They will be fetched and available for the admin to resolve them
*/
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
let Feedback = new Schema({
phone: {
type: String,
required: true,
},
issueSubject: {
type: String,
required: true,
},
issueType: {
type: String,
enum: ["Report Abuse", "Platform Issue", "Question", "Other"],
},
issueBody: {
type: String,
},
username: {
type: String,
},
assignedTo: {
type: String,
},
status: {
type: Number,
},
timestamp: {
type: String,
},
});
module.exports = mongoose.model("Feedback", Feedback);
|
RuiDTLima/diffuzz
|
evaluation/themis_tourplanner_unsafe/src/main/java/com/graphhopper/storage/LockFactory.java
|
package com.graphhopper.storage;
import com.graphhopper.storage.Lock;
import java.io.File;
public interface LockFactory {
void setLockDir(File var1);
Lock create(String var1, boolean var2);
void forceRemove(String var1, boolean var2);
}
|
Charismara/BlutmondRPG
|
src/main/java/de/blutmondgilde/blutmondrpg/interfaces/IClassWeapon.java
|
<gh_stars>1-10
package de.blutmondgilde.blutmondrpg.interfaces;
import de.blutmondgilde.blutmondrpg.enums.ItemRarities;
import de.blutmondgilde.blutmondrpg.enums.WeaponAttributeTypes;
import de.blutmondgilde.blutmondrpg.enums.WeaponTypes;
import java.util.Map;
public interface IClassWeapon {
ItemRarities getRarity();
WeaponTypes getWeaponType();
Map<WeaponAttributeTypes, Float> getAttributeTypes();
float getDefaultStrength();
int getItemLevel();
}
|
wujia28762/Tmate
|
App/src/main/java/com/honyum/elevatorMan/utils/SQLiteUtils.java
|
package com.honyum.elevatorMan.utils;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
/**
* Created by chang on 2016/3/4.
*/
public class SQLiteUtils {
private static final String TAG = "SQLiteUtils";
/**
* 打开db文件
* @param context
* @param dbpath
* @return
*/
public static SQLiteDatabase opendb(Context context, String dbpath) {
SQLiteDatabase db = context.openOrCreateDatabase(dbpath, Context.MODE_PRIVATE, null);
return db;
}
/**
* 判断表是否存在
* @param db
* @param table
* @return
*/
public static boolean isTableExist(SQLiteDatabase db, String table) {
if (!db.isOpen()) {
Log.e(TAG, "db is not open");
return false;
}
boolean result = false;
String sql = "select name from sqlite_master where type='table'";
Cursor cursor = null;
try {
cursor = db.rawQuery(sql, null);
while(cursor.moveToNext()) {
String name = cursor.getString(0);
if (name.equals(table)) {
result = true;
break;
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
cursor.close();
}
return result;
}
}
|
loye168/tddl5
|
tddl-repo-hbase/src/main/java/com/taobao/ustore/repo/hbase/RowCoderSample3.java
|
package com.taobao.ustore.repo.hbase;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class RowCoderSample3 extends AbstractRowCoder {
public RowCoderSample3(TablePhysicalSchema schema){
super(schema);
}
@Override
public Map<String, Object> decodeRowKey(byte[] rowKey) {
String rowKeyStr[] = new String(rowKey).split("\4");
String host;
String type;
String url;
Date gmt_create = null;
host = rowKeyStr[0];
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
try {
gmt_create = sdf.parse(rowKeyStr[1]);
} catch (ParseException e) {
}
type = rowKeyStr[2];
url = rowKeyStr[3];
Map<String, Object> rowKeyColumnValues = new HashMap();
rowKeyColumnValues.put("HOST", host);
rowKeyColumnValues.put("URL", url);
rowKeyColumnValues.put("TYPE", type);
rowKeyColumnValues.put("GMT_CREATE", gmt_create);
return rowKeyColumnValues;
}
@Override
public byte[] encodeRowKey(Map<String, Object> rowKeyColumnValues) {
StringBuilder rowKeyStr = new StringBuilder();
String host = (String) rowKeyColumnValues.get("HOST");
Date gmt_create = (Date) rowKeyColumnValues.get("GMT_CREATE");
String type = (String) rowKeyColumnValues.get("TYPE");
String url = (String) rowKeyColumnValues.get("URL");
if (host != null) rowKeyStr.append(host);
else rowKeyStr.append('\0');
rowKeyStr.append((char) 4);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
String gmt_create_str = sdf.format(gmt_create);
if (gmt_create != null) rowKeyStr.append(gmt_create_str);
else rowKeyStr.append('\0');
rowKeyStr.append((char) 4);
if (type != null) rowKeyStr.append(type);
else rowKeyStr.append('\0');
rowKeyStr.append((char) 4);
if (url != null) rowKeyStr.append(url);
else rowKeyStr.append('\0');
return rowKeyStr.toString().getBytes();
}
}
|
HeadassHouse/loremaster-backend
|
src/resolvers/utils/__tests__/hash.test.js
|
const bcrypt = require('bcrypt');
const { hash } = require('..');
jest.mock('bcrypt');
describe('hashing functions', () => {
beforeEach(() => {
bcrypt.compare.mockReturnValue(true);
bcrypt.hash.mockReturnValue('abcdef');
});
it('should return a hashed value', async () => {
const hashedData = await hash.encrypt('hi');
expect(hashedData).toEqual('abcdef');
});
it('should return a boolean value', async () => {
const hashedData = await hash.compare('abcdef', 'hi');
expect(hashedData).toBeTruthy();
});
});
|
Minecraftian14/Webicity
|
src/main/java/everyos.browser.javadom/everyos/browser/javadom/intf/CharacterData.java
|
<reponame>Minecraftian14/Webicity<filename>src/main/java/everyos.browser.javadom/everyos/browser/javadom/intf/CharacterData.java
package everyos.browser.javadom.intf;
public interface CharacterData extends Node {
void appendData(String data);
default void appendData(char data) {
appendData(String.valueOf(data));
}
String getData();
}
|
Antolin1/TCRMG-GNN
|
java/rdsSimplified/src-gen/rdsSimplified/impl/RdsSimplifiedPackageImpl.java
|
/**
*/
package rdsSimplified.impl;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.impl.EPackageImpl;
import rdsSimplified.Column;
import rdsSimplified.Database;
import rdsSimplified.Element;
import rdsSimplified.Index;
import rdsSimplified.IndexColumn;
import rdsSimplified.RdsSimplifiedFactory;
import rdsSimplified.RdsSimplifiedPackage;
import rdsSimplified.Reference;
import rdsSimplified.Table;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Package</b>.
* <!-- end-user-doc -->
* @generated
*/
public class RdsSimplifiedPackageImpl extends EPackageImpl implements RdsSimplifiedPackage {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass databaseEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass elementEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass columnEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass tableEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass referenceEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass indexEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass indexColumnEClass = null;
/**
* Creates an instance of the model <b>Package</b>, registered with
* {@link org.eclipse.emf.ecore.EPackage.Registry EPackage.Registry} by the package
* package URI value.
* <p>Note: the correct way to create the package is via the static
* factory method {@link #init init()}, which also performs
* initialization of the package, or returns the registered package,
* if one already exists.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see org.eclipse.emf.ecore.EPackage.Registry
* @see rdsSimplified.RdsSimplifiedPackage#eNS_URI
* @see #init()
* @generated
*/
private RdsSimplifiedPackageImpl() {
super(eNS_URI, RdsSimplifiedFactory.eINSTANCE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static boolean isInited = false;
/**
* Creates, registers, and initializes the <b>Package</b> for this model, and for any others upon which it depends.
*
* <p>This method is used to initialize {@link RdsSimplifiedPackage#eINSTANCE} when that field is accessed.
* Clients should not invoke it directly. Instead, they should simply access that field to obtain the package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #eNS_URI
* @see #createPackageContents()
* @see #initializePackageContents()
* @generated
*/
public static RdsSimplifiedPackage init() {
if (isInited)
return (RdsSimplifiedPackage) EPackage.Registry.INSTANCE.getEPackage(RdsSimplifiedPackage.eNS_URI);
// Obtain or create and register package
Object registeredRdsSimplifiedPackage = EPackage.Registry.INSTANCE.get(eNS_URI);
RdsSimplifiedPackageImpl theRdsSimplifiedPackage = registeredRdsSimplifiedPackage instanceof RdsSimplifiedPackageImpl
? (RdsSimplifiedPackageImpl) registeredRdsSimplifiedPackage
: new RdsSimplifiedPackageImpl();
isInited = true;
// Create package meta-data objects
theRdsSimplifiedPackage.createPackageContents();
// Initialize created meta-data
theRdsSimplifiedPackage.initializePackageContents();
// Mark meta-data to indicate it can't be changed
theRdsSimplifiedPackage.freeze();
// Update the registry and return the package
EPackage.Registry.INSTANCE.put(RdsSimplifiedPackage.eNS_URI, theRdsSimplifiedPackage);
return theRdsSimplifiedPackage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getDatabase() {
return databaseEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getDatabase_Elements() {
return (EReference) databaseEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getElement() {
return elementEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getColumn() {
return columnEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getColumn_ForeignReferences() {
return (EReference) columnEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getColumn_PrimaryReferences() {
return (EReference) columnEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getTable() {
return tableEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getTable_Columns() {
return (EReference) tableEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getTable_Indexes() {
return (EReference) tableEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getReference() {
return referenceEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getReference_ForeignKeyColumns() {
return (EReference) referenceEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getReference_PrimaryKeyColumns() {
return (EReference) referenceEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getIndex() {
return indexEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getIndex_IndexColumns() {
return (EReference) indexEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getIndexColumn() {
return indexColumnEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getIndexColumn_Column() {
return (EReference) indexColumnEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RdsSimplifiedFactory getRdsSimplifiedFactory() {
return (RdsSimplifiedFactory) getEFactoryInstance();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isCreated = false;
/**
* Creates the meta-model objects for the package. This method is
* guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void createPackageContents() {
if (isCreated)
return;
isCreated = true;
// Create classes and their features
databaseEClass = createEClass(DATABASE);
createEReference(databaseEClass, DATABASE__ELEMENTS);
elementEClass = createEClass(ELEMENT);
columnEClass = createEClass(COLUMN);
createEReference(columnEClass, COLUMN__FOREIGN_REFERENCES);
createEReference(columnEClass, COLUMN__PRIMARY_REFERENCES);
tableEClass = createEClass(TABLE);
createEReference(tableEClass, TABLE__COLUMNS);
createEReference(tableEClass, TABLE__INDEXES);
referenceEClass = createEClass(REFERENCE);
createEReference(referenceEClass, REFERENCE__FOREIGN_KEY_COLUMNS);
createEReference(referenceEClass, REFERENCE__PRIMARY_KEY_COLUMNS);
indexEClass = createEClass(INDEX);
createEReference(indexEClass, INDEX__INDEX_COLUMNS);
indexColumnEClass = createEClass(INDEX_COLUMN);
createEReference(indexColumnEClass, INDEX_COLUMN__COLUMN);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isInitialized = false;
/**
* Complete the initialization of the package and its meta-model. This
* method is guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void initializePackageContents() {
if (isInitialized)
return;
isInitialized = true;
// Initialize package
setName(eNAME);
setNsPrefix(eNS_PREFIX);
setNsURI(eNS_URI);
// Create type parameters
// Set bounds for type parameters
// Add supertypes to classes
tableEClass.getESuperTypes().add(this.getElement());
referenceEClass.getESuperTypes().add(this.getElement());
// Initialize classes, features, and operations; add parameters
initEClass(databaseEClass, Database.class, "Database", !IS_ABSTRACT, !IS_INTERFACE,
IS_GENERATED_INSTANCE_CLASS);
initEReference(getDatabase_Elements(), this.getElement(), null, "elements", null, 0, -1, Database.class,
!IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE,
IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(elementEClass, Element.class, "Element", IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEClass(columnEClass, Column.class, "Column", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getColumn_ForeignReferences(), this.getReference(), this.getReference_ForeignKeyColumns(),
"foreignReferences", null, 0, -1, Column.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE,
!IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getColumn_PrimaryReferences(), this.getReference(), this.getReference_PrimaryKeyColumns(),
"primaryReferences", null, 0, -1, Column.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE,
!IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(tableEClass, Table.class, "Table", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getTable_Columns(), this.getColumn(), null, "columns", null, 0, -1, Table.class, !IS_TRANSIENT,
!IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED,
IS_ORDERED);
initEReference(getTable_Indexes(), this.getIndex(), null, "indexes", null, 0, -1, Table.class, !IS_TRANSIENT,
!IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED,
IS_ORDERED);
initEClass(referenceEClass, Reference.class, "Reference", !IS_ABSTRACT, !IS_INTERFACE,
IS_GENERATED_INSTANCE_CLASS);
initEReference(getReference_ForeignKeyColumns(), this.getColumn(), this.getColumn_ForeignReferences(),
"foreignKeyColumns", null, 1, 1, Reference.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE,
!IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getReference_PrimaryKeyColumns(), this.getColumn(), this.getColumn_PrimaryReferences(),
"primaryKeyColumns", null, 1, 1, Reference.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE,
!IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(indexEClass, Index.class, "Index", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getIndex_IndexColumns(), this.getIndexColumn(), null, "indexColumns", null, 0, -1, Index.class,
!IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE,
IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(indexColumnEClass, IndexColumn.class, "IndexColumn", !IS_ABSTRACT, !IS_INTERFACE,
IS_GENERATED_INSTANCE_CLASS);
initEReference(getIndexColumn_Column(), this.getColumn(), null, "column", null, 0, 1, IndexColumn.class,
!IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE,
IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
// Create resource
createResource(eNS_URI);
}
} //RdsSimplifiedPackageImpl
|
DearbhailRicePA/gateway-web
|
test/Loading.test.js
|
<gh_stars>1-10
import React from 'react';
import Loading from '../src/pages/commonComponents/Loading';
describe('<Loading /> rendering', () => {
it('renders without crashing', () => {
const wrapper = shallow(<Loading />);
});
});
|
gputtley/cmssw
|
DataFormats/ParticleFlowReco/interface/ConvBremSeed.h
|
#ifndef ConvBremSeed_h
#define ConvBremSeed_h 1
/** \class reco::ConvBremSeed
*
* ConvBremSeed is a seed object constructed from a supercluster and 2 PixelRecHits
*
* \author <NAME>
*
* \version 1st Version Oct 6, 2008
*
************************************************************/
#include "DataFormats/ParticleFlowReco/interface/GsfPFRecTrackFwd.h"
#include "DataFormats/TrajectorySeed/interface/TrajectorySeed.h"
#include "DataFormats/TrackingRecHit/interface/TrackingRecHit.h"
#include "DataFormats/Common/interface/Ref.h"
#include <vector>
namespace reco {
class ConvBremSeed : public TrajectorySeed {
public:
typedef edm::OwnVector<TrackingRecHit> recHitContainer;
ConvBremSeed() {}
~ConvBremSeed() override {}
/// Constructor from TrajectorySeed
ConvBremSeed(const TrajectorySeed& seed, edm::Ref<GsfPFRecTrackCollection>& pfgsf)
: TrajectorySeed(seed), pfGsf_(pfgsf) {}
/// reference to the GSDPFRecTrack
GsfPFRecTrackRef GsfPFTrack() const { return pfGsf_; }
ConvBremSeed* clone() const override { return new ConvBremSeed(*this); }
private:
//! Pointer to the electromagnetic super cluster.
GsfPFRecTrackRef pfGsf_;
};
// Class ConvBremSeed
} // namespace reco
#endif
|
cas-bigdatalab/vdb-4.0
|
src/main/java/vdb/mydb/jsp/RequestHandler.java
|
<reponame>cas-bigdatalab/vdb-4.0
package vdb.mydb.jsp;
import java.io.IOException;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public interface RequestHandler
{
/**
* @return DO NOT continue next handler or do?
*/
boolean handle(HttpServletRequest request, HttpServletResponse response,
FilterChain chain) throws ServletException, IOException;
}
|
cuebook/turnilo
|
build/common/utils/plywood/duration.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var chronoshift_1 = require("chronoshift");
function isValidDuration(input) {
try {
chronoshift_1.Duration.fromJS(input);
return true;
}
catch (_a) {
return false;
}
}
exports.isValidDuration = isValidDuration;
function isFloorableDuration(input) {
try {
return chronoshift_1.Duration.fromJS(input).isFloorable();
}
catch (_a) {
return false;
}
}
exports.isFloorableDuration = isFloorableDuration;
//# sourceMappingURL=duration.js.map
|
fuzzdota/chromeapp-native-messaging-sample
|
app/bower_components/neon-animation/demo/grid/index.html.0.js
|
<reponame>fuzzdota/chromeapp-native-messaging-sample
var scope = document.querySelector('template[is="dom-bind"]');
scope._onTileClick = function(event) {
this.$['fullsize-card'].color = event.detail.data.color;
this.$.pages.selected = 1;
};
scope._onFullsizeClick = function(event) {
this.$.pages.selected = 0;
};
|
alexras/boomslang
|
BrokenAxisPlot.py
|
import matplotlib.axes
import matplotlib.transforms
import matplotlib.ticker as mticker
from boomslang import Plot
import numpy as np
class BrokenAxesPiece(matplotlib.axes.Axes):
def __init__(self, fig, rect, axisbg=None, frameon=True,
sharex=None, sharey=None, label='', xscale=None,
yscale=None,
xform=None,
**kwargs):
if xform is None:
self.xform = lambda x: x
else:
self.xform = xform
super(BrokenAxesPiece,self).__init__(fig, rect, axisbg, frameon,
sharex, sharey, label, xscale, yscale, **kwargs)
def set_position(self, pos, which='both'):
if which == 'both' or which == 'original':
super(BrokenAxesPiece, self).set_position(pos, 'both')
elif which == 'active':
l,b,w,h = pos.get_points().flatten().tolist()
l,b,w,h = self.xform(l,b,w,h)
new_pos = matplotlib.transforms.Bbox([[l,b],[w,h]])
super(BrokenAxesPiece, self).set_position(new_pos, 'active')
class BrokenAxisPlot(Plot):
"""
Represents a single broken-axis plot. Not embeddable into nested
PlotLayouts at the moment.
"""
def __init__(self,
break_points,
break_line_size = 0.015,
break_hspace = 0.05,
break_ratio = 0.5):
super(BrokenAxisPlot,self).__init__()
self.break_points = break_points
"""
A 2-tuple that defines the start and end points of the gap on the y-axis
"""
self.break_line_size = break_line_size
"""
The size of the broken axis lines
"""
self.break_hspace = break_hspace
"""
The amount of horizontal space to put between the break points
"""
self.break_ratio = break_ratio
"""
Defines how to divvy up space between the two broken parts of the plot
"""
self.tight = False
self.allows_tight = False
def subplot(self, fig, row, column, position, projection):
# Step 0, plot in normal space to build up base fig and stats
orig_ax = fig.add_subplot(row, column, position)
# Save legend and set it to None
orig_legend = self.legend
self.legend = None
handles = self.drawPlot(fig, orig_ax)
nticks = len(orig_ax.get_yticks())
if True:
for side in ['top','bottom','left','right']:
orig_ax.spines[side].set_visible(False)
orig_ax.grid(False)
orig_ax.cla()
orig_ax.xaxis.set_visible(False)
orig_ax.set_yticks([0])
orig_ax.set_yticklabels([" "])
hs = self.break_hspace * 1
def top_xform(l,b,w,h):
return (l,b + (h-b+hs) * self.break_ratio,w,h)
def bot_xform(l,b,w,h):
return (l,b,w,h-(h-b+hs) * (1-self.break_ratio))
# Hork
r = orig_ax.transAxes._boxout._bbox
ax2 = BrokenAxesPiece(fig, r, sharex=orig_ax, xform=bot_xform)
fig.add_axes(ax2)
handles = self.drawPlot(fig, ax2)
# Re-enable the legend
self.legend = orig_legend
ax = BrokenAxesPiece(fig, r, sharex=orig_ax, xform=top_xform)
fig.add_axes(ax)
handles = self.drawPlot(fig, ax)
# Set Limits
(y_bot, y_top) = ax.get_ylim()
ax2.set_ylim(y_bot, self.break_points[0])
ax.set_ylim(self.break_points[1], y_top)
# Re-tick things
numticks2 = (self.break_ratio * nticks * (1 / (1+hs))) - 1
numticks = ((1 - self.break_ratio) * nticks * (1/(1+hs)))
numticks2 = max(numticks2, 2)
numticks = max(numticks, 2)
ax2.yaxis.set_major_locator(mticker.LinearLocator(numticks=numticks2))
ax.yaxis.set_major_locator(mticker.LinearLocator(numticks=numticks))
ax2.set_xticks(orig_ax.get_xticks())
ax2.set_xticklabels([t.get_text() for t in orig_ax.get_xticklabels()])
ax.spines['bottom'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax.xaxis.tick_top()
ax.tick_params(labeltop='off')
ax2.xaxis.tick_bottom()
orig_ax.set_ylabel(ax.get_ylabel())
ax.set_ylabel("")
ax2.set_ylabel("")
ax.set_xlabel("")
kwargs = dict(transform=ax.transAxes, color='k', clip_on=False)
ax.plot((-self.break_line_size,
self.break_line_size),
(-self.break_line_size/(1-self.break_ratio),
self.break_line_size/(1-self.break_ratio)),
**kwargs) # top-left
ax.plot((1-self.break_line_size,
1+self.break_line_size),
(-self.break_line_size/(1-self.break_ratio),
self.break_line_size/(1-self.break_ratio)),
**kwargs) # top-right
kwargs = dict(transform=ax2.transAxes, color='k', clip_on=False)
ax2.plot((-self.break_line_size,
self.break_line_size),
(1-self.break_line_size/(self.break_ratio),
1+self.break_line_size/(self.break_ratio)),
**kwargs) # bottom-left
ax2.plot((1-self.break_line_size,
1+self.break_line_size),
(1-self.break_line_size/(self.break_ratio),
1+self.break_line_size/(self.break_ratio)),
**kwargs) # bottom-right
ax2.set_xlim(*ax.get_xlim())
return handles
|
user1095108/generic
|
obsolete/arraydelegate.hpp
|
#ifndef ARRAYDELEGATE_HPP
# define ARRAYDELEGATE_HPP
# pragma once
#include <cassert>
#include <cstddef>
#include <new>
#include <type_traits>
#include <utility>
namespace generic
{
template <typename T> class arraydelegate;
template<class R, class ...A>
class arraydelegate<R (A...)>
{
static constexpr auto max_store_size = 10 * sizeof(::std::size_t);
using stub_ptr_type = R (*)(void*, A&&...);
arraydelegate(void* const o, stub_ptr_type const m) noexcept :
object_ptr_(o),
stub_ptr_(m)
{
}
public:
arraydelegate() = default;
arraydelegate(arraydelegate const& other) { *this = other; }
arraydelegate(arraydelegate&& other) { *this = ::std::move(other); }
arraydelegate(::std::nullptr_t const) noexcept : arraydelegate() { }
template <class C, typename =
typename ::std::enable_if< ::std::is_class<C>{}>::type>
explicit arraydelegate(C const* const o) noexcept :
object_ptr_(const_cast<C*>(o))
{
}
template <class C, typename =
typename ::std::enable_if< ::std::is_class<C>{}>::type>
explicit arraydelegate(C const& o) noexcept :
object_ptr_(const_cast<C*>(&o))
{
}
template <class C>
arraydelegate(C* const object_ptr, R (C::* const method_ptr)(A...))
{
*this = from(object_ptr, method_ptr);
}
template <class C>
arraydelegate(C* const object_ptr, R (C::* const method_ptr)(A...) const)
{
*this = from(object_ptr, method_ptr);
}
template <class C>
arraydelegate(C& object, R (C::* const method_ptr)(A...))
{
*this = from(object, method_ptr);
}
template <class C>
arraydelegate(C const& object, R (C::* const method_ptr)(A...) const)
{
*this = from(object, method_ptr);
}
template <
typename T,
typename = typename ::std::enable_if<
!::std::is_same<arraydelegate, typename ::std::decay<T>::type>{}
>::type
>
arraydelegate(T&& f)
{
using functor_type = typename ::std::decay<T>::type;
static_assert(sizeof(functor_type) <= sizeof(store_),
"increase store_ size");
new (store_) functor_type(::std::forward<T>(f));
object_ptr_ = store_;
stub_ptr_ = functor_stub<functor_type>;
deleter_ = deleter_stub<functor_type>;
copier_ = copier_stub<functor_type>;
mover_ = mover_stub<functor_type>;
}
~arraydelegate() { deleter_(this); }
arraydelegate& operator=(arraydelegate const& rhs)
{
rhs.copier_(*this, const_cast<arraydelegate&>(rhs));
return *this;
}
arraydelegate& operator=(arraydelegate&& rhs)
{
rhs.mover_(*this, rhs);
return *this;
}
template <class C>
arraydelegate& operator=(R (C::* const rhs)(A...))
{
return *this = from(static_cast<C*>(object_ptr_), rhs);
}
template <class C>
arraydelegate& operator=(R (C::* const rhs)(A...) const)
{
return *this = from(static_cast<C const*>(object_ptr_), rhs);
}
template <
typename T,
typename = typename ::std::enable_if<
!::std::is_same<arraydelegate, typename ::std::decay<T>::type>{}
>::type
>
arraydelegate& operator=(T&& f)
{
using functor_type = typename ::std::decay<T>::type;
deleter_(store_);
static_assert(sizeof(functor_type) <= sizeof(store_),
"increase store_ size");
new (store_) functor_type(::std::forward<T>(f));
object_ptr_ = store_;
stub_ptr_ = functor_stub<functor_type>;
deleter_ = deleter_stub<functor_type>;
copier_ = copier_stub<functor_type>;
mover_ = mover_stub<functor_type>;
return *this;
}
template <R (* const function_ptr)(A...)>
static arraydelegate from() noexcept
{
return { nullptr, function_stub<function_ptr> };
}
template <class C, R (C::* const method_ptr)(A...)>
static arraydelegate from(C* const object_ptr) noexcept
{
return { object_ptr, method_stub<C, method_ptr> };
}
template <class C, R (C::* const method_ptr)(A...) const>
static arraydelegate from(C const* const object_ptr) noexcept
{
return { const_cast<C*>(object_ptr), const_method_stub<C, method_ptr> };
}
template <class C, R (C::* const method_ptr)(A...)>
static arraydelegate from(C& object) noexcept
{
return { &object, method_stub<C, method_ptr> };
}
template <class C, R (C::* const method_ptr)(A...) const>
static arraydelegate from(C const& object) noexcept
{
return { const_cast<C*>(&object), const_method_stub<C, method_ptr> };
}
template <typename T>
static arraydelegate from(T&& f)
{
return ::std::forward<T>(f);
}
static arraydelegate from(R (* const function_ptr)(A...))
{
return function_ptr;
}
template <class C>
using member_pair =
::std::pair<C* const, R (C::* const)(A...)>;
template <class C>
using const_member_pair =
::std::pair<C const* const, R (C::* const)(A...) const>;
template <class C>
static arraydelegate from(C* const object_ptr,
R (C::* const method_ptr)(A...)) noexcept
{
return member_pair<C>(object_ptr, method_ptr);
}
template <class C>
static arraydelegate from(C const* const object_ptr,
R (C::* const method_ptr)(A...) const) noexcept
{
return const_member_pair<C>(object_ptr, method_ptr);
}
template <class C>
static arraydelegate from(C& object,
R (C::* const method_ptr)(A...)) noexcept
{
return member_pair<C>(&object, method_ptr);
}
template <class C>
static arraydelegate from(C const& object,
R (C::* const method_ptr)(A...) const) noexcept
{
return const_member_pair<C>(&object, method_ptr);
}
void reset() noexcept { stub_ptr_ = nullptr; }
void reset_stub() noexcept { stub_ptr_ = nullptr; }
void swap(arraydelegate& other) noexcept { ::std::swap(*this, other); }
bool operator==(arraydelegate const& rhs) const noexcept
{
return (object_ptr_ == rhs.object_ptr_) && (stub_ptr_ == rhs.stub_ptr_);
}
bool operator!=(arraydelegate const& rhs) const noexcept
{
return !operator==(rhs);
}
bool operator<(arraydelegate const& rhs) const noexcept
{
return (object_ptr_ < rhs.object_ptr_) ||
((object_ptr_ == rhs.object_ptr_) && (stub_ptr_ < rhs.stub_ptr_));
}
bool operator==(::std::nullptr_t const) const noexcept
{
return !stub_ptr_;
}
bool operator!=(::std::nullptr_t const) const noexcept
{
return stub_ptr_;
}
explicit operator bool() const noexcept { return stub_ptr_; }
R operator()(A... args) const
{
// assert(stub_ptr);
return stub_ptr_(object_ptr_, ::std::forward<A>(args)...);
}
private:
static void default_deleter_stub(void* const) { }
template <class T>
static void deleter_stub(void* const p)
{
static_cast<T*>(p)->~T();
}
static void default_copier_stub(arraydelegate& dst,
arraydelegate& src) noexcept
{
dst.object_ptr_ = src.object_ptr_;
dst.stub_ptr_ = src.stub_ptr_;
dst.deleter_ = default_deleter_stub;
dst.copier_ = default_copier_stub;
dst.mover_ = default_copier_stub;
}
template <typename T>
static void copier_stub(arraydelegate& dst,
arraydelegate& src)
{
new (dst.store_) T(*static_cast<T const*>(
static_cast<void const*>(src.store_)));
dst.stub_ptr_ = src.stub_ptr_;
dst.object_ptr_ = dst.store_;
dst.deleter_ = src.deleter_;
dst.copier_ = src.copier_;
dst.mover_ = src.mover_;
}
template <typename T>
static void mover_stub(arraydelegate& dst,
arraydelegate& src)
{
new (dst.store_) T(::std::move(*static_cast<T*>(
static_cast<void*>(src.store_))));
dst.stub_ptr_ = src.stub_ptr_;
dst.object_ptr_ = dst.store_;
dst.deleter_ = src.deleter_;
dst.copier_ = src.copier_;
dst.mover_ = src.mover_;
}
private:
friend struct ::std::hash<arraydelegate>;
using deleter_type = void (*)(void*);
using copier_type = void (*)(arraydelegate&, arraydelegate&);
using mover_type = void (*)(arraydelegate&, arraydelegate&);
void* object_ptr_;
stub_ptr_type stub_ptr_{};
deleter_type deleter_{default_deleter_stub};
copier_type copier_{default_copier_stub};
mover_type mover_{default_copier_stub};
alignas(::max_align_t) char store_[max_store_size];
template <R (*function_ptr)(A...)>
static R function_stub(void* const, A&&... args)
{
return function_ptr(::std::forward<A>(args)...);
}
template <class C, R (C::*method_ptr)(A...)>
static R method_stub(void* const object_ptr, A&&... args)
{
return (static_cast<C*>(object_ptr)->*method_ptr)(
::std::forward<A>(args)...);
}
template <class C, R (C::*method_ptr)(A...) const>
static R const_method_stub(void* const object_ptr, A&&... args)
{
return (static_cast<C const*>(object_ptr)->*method_ptr)(
::std::forward<A>(args)...);
}
template <typename>
struct is_member_pair : std::false_type { };
template <class C>
struct is_member_pair< ::std::pair<C* const,
R (C::* const)(A...)> > : std::true_type
{
};
template <typename>
struct is_const_member_pair : std::false_type { };
template <class C>
struct is_const_member_pair< ::std::pair<C const* const,
R (C::* const)(A...) const> > : std::true_type
{
};
template <typename T>
static typename ::std::enable_if<
!(is_member_pair<T>{} ||
is_const_member_pair<T>{}),
R
>::type
functor_stub(void* const object_ptr, A&&... args)
{
return (*static_cast<T*>(object_ptr))(::std::forward<A>(args)...);
}
template <typename T>
static typename ::std::enable_if<
is_member_pair<T>{} ||
is_const_member_pair<T>{},
R
>::type
functor_stub(void* const object_ptr, A&&... args)
{
return (static_cast<T*>(object_ptr)->first->*
static_cast<T*>(object_ptr)->second)(::std::forward<A>(args)...);
}
};
}
namespace std
{
template <typename R, typename ...A>
struct hash<::generic::arraydelegate<R (A...)> >
{
size_t operator()(
::generic::arraydelegate<R (A...)> const& d) const noexcept
{
auto const seed(hash<void*>()(d.object_ptr_));
return hash<decltype(d.stub_ptr_)>()(d.stub_ptr_) +
0x9e3779b9 + (seed << 6) + (seed >> 2);
}
};
}
#endif // ARRAYDELEGATE_HPP
|
mitodl/ol-django
|
tests/mitol/common/utils/test_currency.py
|
<filename>tests/mitol/common/utils/test_currency.py<gh_stars>1-10
"""Utils tests"""
from decimal import Decimal
import pytest
from mitol.common.utils.currency import format_price
@pytest.mark.parametrize(
"price,expected",
[[Decimal("0"), "$0.00"], [Decimal("1234567.89"), "$1,234,567.89"]],
)
def test_format_price(price, expected):
"""Format a decimal value into a price"""
assert format_price(price) == expected
|
nathan-slv017/Aulas
|
estruturas de dados e alg em js/cap2/index.js
|
let movie = 'Lord of the Rings'
function startWarsFan(){
const movie = 'Star War';
return movie
}
function marvelFan(){
movie = 'The avengers';
return movie
}
function blizzardFan(){
const isFan = true
let phase = 'Warcraft';
console.log('Before if ' + phase);
if(isFan){
let phase = 'initial text';
phase = 'For the Holde !'
console.log('inside if: ' + phase);
}
}
console.log(movie)
console.log(startWarsFan());
console.log(marvelFan());
console.log(movie);
blizzardFan();
|
Vishal1003/vHelp
|
Backend/models/item.js
|
<gh_stars>1-10
const mongoose = require("mongoose");
const itemSchema = mongoose.Schema({
name: {
type: String,
required: true
},
cost: {
type: Number,
required: true
},
category: {
type: mongoose.Schema.Types.ObjectId,
ref: "Category",
required: true
},
description: {
type: String,
required: true
},
imageUrl: {
type: String,
required: true
},
cloudinary_id: {
type: String
},
seller: {
type: mongoose.Schema.Types.ObjectId,
required: true,
ref: "Vendor"
}
});
const Item = mongoose.model("Item", itemSchema);
module.exports = Item;
|
Duli1986/coreui-free-react-admin-template
|
modules/customer-crm-app/node_modules/ramda-extension/es/__tests__/noop-test.js
|
<gh_stars>0
import { noop } from '../';
describe('noop', function () {
it('returns null', function () {
expect(noop()).toBe(null);
});
});
|
feihua666/feihua-framework
|
feihua-framework-scheduler-service/feihua-framework-scheduler-service-impl/src/main/java/com/feihua/framework/scheduler/impl/ApiSchedulerServiceImpl.java
|
package com.feihua.framework.scheduler.impl;
import com.feihua.exception.DataNotFoundException;
import com.feihua.framework.scheduler.QuartzSchedulerUtils;
import com.feihua.framework.scheduler.api.ApiBaseQuartzJobManager;
import com.feihua.framework.scheduler.api.ApiSchedulerPoService;
import com.feihua.framework.scheduler.api.ApiSchedulerService;
import com.feihua.framework.scheduler.dto.JobAndTriggerWrapperDto;
import com.feihua.framework.scheduler.dto.SchedulerDto;
import com.feihua.framework.scheduler.dto.TriggerDetailDto;
import com.feihua.framework.scheduler.po.SchedulerPo;
import com.feihua.utils.string.StringUtils;
import org.quartz.JobDetail;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.TriggerKey;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* Created by yangwei
* Created at 2019/4/19 10:52
*/
@Service
public class ApiSchedulerServiceImpl implements ApiSchedulerService {
@Autowired
private ApiSchedulerPoService apiSchedulerPoService;
@Autowired
private ApiBaseQuartzJobManager apiBaseQuartzJobManager;
private SchedulerDto getScheduler(String schedulerId){
SchedulerDto schedulerDto = apiSchedulerPoService.selectByPrimaryKey(schedulerId,false);
if (schedulerDto == null) {
throw new DataNotFoundException(StringUtils.messageFormat("schedulerNotFound with id={0}",schedulerId));
}
return schedulerDto;
}
private SchedulerPo getSchedulerPo(String schedulerId){
SchedulerPo schedulerDto = apiSchedulerPoService.selectByPrimaryKeySimple(schedulerId,false);
if (schedulerDto == null) {
throw new DataNotFoundException(StringUtils.messageFormat("schedulerNotFound with id={0}",schedulerId));
}
return schedulerDto;
}
@Override
public void publish(String schedulerId, boolean startNow) throws ClassNotFoundException, SchedulerException, DataNotFoundException {
SchedulerDto schedulerDto = getScheduler(schedulerId);
Trigger trigger = apiBaseQuartzJobManager.newCronTrigger(schedulerDto.getTriggerName(),schedulerDto.getTriggerGroup(),schedulerDto.getTriggerCronExpression(),startNow);
JobDetail jobDetail = apiBaseQuartzJobManager.newJob(schedulerDto.getJobName(),schedulerDto.getJobGroup(),schedulerDto.getJobClass());
apiBaseQuartzJobManager.schedulerStart(jobDetail,trigger);
}
@Override
public void pauseTrigger(String schedulerId) throws SchedulerException, DataNotFoundException {
SchedulerDto schedulerDto = getScheduler(schedulerId);
TriggerKey triggerKey = QuartzSchedulerUtils.getTriggerKey(schedulerDto.getTriggerName(),schedulerDto.getTriggerGroup());
apiBaseQuartzJobManager.pauseTrigger(triggerKey);
}
@Override
public void resumeTrigger(String schedulerId) throws SchedulerException, DataNotFoundException {
SchedulerDto schedulerDto = getScheduler(schedulerId);
TriggerKey triggerKey = QuartzSchedulerUtils.getTriggerKey(schedulerDto.getTriggerName(),schedulerDto.getTriggerGroup());
apiBaseQuartzJobManager.resumeTrigger(triggerKey);
}
@Override
public void removeJob(String schedulerId) throws SchedulerException, DataNotFoundException {
SchedulerDto schedulerDto = getScheduler(schedulerId);
apiBaseQuartzJobManager.removeJob(schedulerDto.getJobName(),schedulerDto.getJobGroup(),schedulerDto.getTriggerName(),schedulerDto.getTriggerGroup());
}
@Override
public void modifyTriggerTime(String schedulerId,String cron,String cronDesc,boolean startNow,String currentUserId) throws SchedulerException, DataNotFoundException {
SchedulerPo schedulerDto = getSchedulerPo(schedulerId);
schedulerDto.setTriggerCronExpression(cron);
schedulerDto.setTriggerCronExpressionDesc(cronDesc);
apiSchedulerPoService.preUpdate(schedulerDto,currentUserId);
apiSchedulerPoService.updateByPrimaryKeySelective(schedulerDto);
apiBaseQuartzJobManager.modifyJobTime(schedulerDto.getTriggerName(),schedulerDto.getTriggerGroup(),cron,startNow);
}
@Override
public TriggerDetailDto getTriggerDetail(String triggerName, String triggerGroupName) throws SchedulerException {
Trigger trigger = apiBaseQuartzJobManager.getTrigger(triggerName,triggerGroupName);
if (trigger != null) {
return new TriggerDetailDto(trigger);
}
return null;
}
@Override
public String getSchedulerStatus(String triggerName, String triggerGroupName) throws SchedulerException {
Trigger.TriggerState triggerState = apiBaseQuartzJobManager.getTriggerState(triggerName,triggerGroupName);
return QuartzSchedulerUtils.triggerStateToSchedulerStatus(triggerState);
}
@Override
public List<JobAndTriggerWrapperDto> getAllJobs() throws SchedulerException {
return apiBaseQuartzJobManager.getAllJobs();
}
}
|
bertt1t11/Java
|
Set 2/MyCanvas.java
|
import java.awt.*;
import javax.swing.JFrame;
public class MyCanvas extends Canvas{
public void paint(Graphics g) {
Toolkit t=Toolkit.getDefaultToolkit();
Image i=t.getImage("mobile-communication-home.png");
g.drawImage(i, 120,100,this);
}
public static void main(String[] args) {
MyCanvas m=new MyCanvas();
JFrame f=new JFrame();
f.add(m);
f.setSize(400,400);
f.setVisible(true);
}
}
|
akrisiun/vscode
|
out/vs/workbench/contrib/tasks/common/taskTemplates.js
|
<reponame>akrisiun/vscode<gh_stars>1-10
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
define(["require", "exports", "vs/nls"], function (require, exports, nls) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const dotnetBuild = {
id: 'dotnetCore',
label: '.NET Core',
sort: 'NET Core',
autoDetect: false,
description: nls.localize('dotnetCore', 'Executes .NET Core build command'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "2.0.0",',
'\t"tasks": [',
'\t\t{',
'\t\t\t"label": "build",',
'\t\t\t"command": "dotnet",',
'\t\t\t"type": "shell",',
'\t\t\t"args": [',
'\t\t\t\t"build",',
'\t\t\t\t// Ask dotnet build to generate full paths for file names.',
'\t\t\t\t"/property:GenerateFullPaths=true",',
'\t\t\t\t// Do not generate summary otherwise it leads to duplicate errors in Problems panel',
'\t\t\t\t"/consoleloggerparameters:NoSummary"',
'\t\t\t],',
'\t\t\t"group": "build",',
'\t\t\t"presentation": {',
'\t\t\t\t"reveal": "silent"',
'\t\t\t},',
'\t\t\t"problemMatcher": "$msCompile"',
'\t\t}',
'\t]',
'}'
].join('\n')
};
const msbuild = {
id: 'msbuild',
label: 'MSBuild',
autoDetect: false,
description: nls.localize('msbuild', 'Executes the build target'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "2.0.0",',
'\t"tasks": [',
'\t\t{',
'\t\t\t"label": "build",',
'\t\t\t"type": "shell",',
'\t\t\t"command": "msbuild",',
'\t\t\t"args": [',
'\t\t\t\t// Ask msbuild to generate full paths for file names.',
'\t\t\t\t"/property:GenerateFullPaths=true",',
'\t\t\t\t"/t:build",',
'\t\t\t\t// Do not generate summary otherwise it leads to duplicate errors in Problems panel',
'\t\t\t\t"/consoleloggerparameters:NoSummary"',
'\t\t\t],',
'\t\t\t"group": "build",',
'\t\t\t"presentation": {',
'\t\t\t\t// Reveal the output only if unrecognized errors occur.',
'\t\t\t\t"reveal": "silent"',
'\t\t\t},',
'\t\t\t// Use the standard MS compiler pattern to detect errors, warnings and infos',
'\t\t\t"problemMatcher": "$msCompile"',
'\t\t}',
'\t]',
'}'
].join('\n')
};
const command = {
id: 'externalCommand',
label: 'Others',
autoDetect: false,
description: nls.localize('externalCommand', 'Example to run an arbitrary external command'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "2.0.0",',
'\t"tasks": [',
'\t\t{',
'\t\t\t"label": "echo",',
'\t\t\t"type": "shell",',
'\t\t\t"command": "echo Hello"',
'\t\t}',
'\t]',
'}'
].join('\n')
};
const maven = {
id: 'maven',
label: 'maven',
sort: 'MVN',
autoDetect: false,
description: nls.localize('Maven', 'Executes common maven commands'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "2.0.0",',
'\t"tasks": [',
'\t\t{',
'\t\t\t"label": "verify",',
'\t\t\t"type": "shell",',
'\t\t\t"command": "mvn -B verify",',
'\t\t\t"group": "build"',
'\t\t},',
'\t\t{',
'\t\t\t"label": "test",',
'\t\t\t"type": "shell",',
'\t\t\t"command": "mvn -B test",',
'\t\t\t"group": "test"',
'\t\t}',
'\t]',
'}'
].join('\n')
};
let _templates = null;
function getTemplates() {
if (!_templates) {
_templates = [dotnetBuild, msbuild, maven].sort((a, b) => {
return (a.sort || a.label).localeCompare(b.sort || b.label);
});
_templates.push(command);
}
return _templates;
}
exports.getTemplates = getTemplates;
});
/** Version 1.0 templates
*
const gulp: TaskEntry = {
id: 'gulp',
label: 'Gulp',
autoDetect: true,
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "0.1.0",',
'\t"command": "gulp",',
'\t"isShellCommand": true,',
'\t"args": ["--no-color"],',
'\t"showOutput": "always"',
'}'
].join('\n')
};
const grunt: TaskEntry = {
id: 'grunt',
label: 'Grunt',
autoDetect: true,
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "0.1.0",',
'\t"command": "grunt",',
'\t"isShellCommand": true,',
'\t"args": ["--no-color"],',
'\t"showOutput": "always"',
'}'
].join('\n')
};
const npm: TaskEntry = {
id: 'npm',
label: 'npm',
sort: 'NPM',
autoDetect: false,
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "0.1.0",',
'\t"command": "npm",',
'\t"isShellCommand": true,',
'\t"showOutput": "always",',
'\t"suppressTaskName": true,',
'\t"tasks": [',
'\t\t{',
'\t\t\t"taskName": "install",',
'\t\t\t"args": ["install"]',
'\t\t},',
'\t\t{',
'\t\t\t"taskName": "update",',
'\t\t\t"args": ["update"]',
'\t\t},',
'\t\t{',
'\t\t\t"taskName": "test",',
'\t\t\t"args": ["run", "test"]',
'\t\t}',
'\t]',
'}'
].join('\n')
};
const tscConfig: TaskEntry = {
id: 'tsc.config',
label: 'TypeScript - tsconfig.json',
autoDetect: false,
description: nls.localize('tsc.config', 'Compiles a TypeScript project'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "0.1.0",',
'\t"command": "tsc",',
'\t"isShellCommand": true,',
'\t"args": ["-p", "."],',
'\t"showOutput": "silent",',
'\t"problemMatcher": "$tsc"',
'}'
].join('\n')
};
const tscWatch: TaskEntry = {
id: 'tsc.watch',
label: 'TypeScript - Watch Mode',
autoDetect: false,
description: nls.localize('tsc.watch', 'Compiles a TypeScript project in watch mode'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "0.1.0",',
'\t"command": "tsc",',
'\t"isShellCommand": true,',
'\t"args": ["-w", "-p", "."],',
'\t"showOutput": "silent",',
'\t"isBackground": true,',
'\t"problemMatcher": "$tsc-watch"',
'}'
].join('\n')
};
const dotnetBuild: TaskEntry = {
id: 'dotnetCore',
label: '.NET Core',
sort: 'NET Core',
autoDetect: false,
description: nls.localize('dotnetCore', 'Executes .NET Core build command'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "0.1.0",',
'\t"command": "dotnet",',
'\t"isShellCommand": true,',
'\t"args": [],',
'\t"tasks": [',
'\t\t{',
'\t\t\t"taskName": "build",',
'\t\t\t"args": [ ],',
'\t\t\t"isBuildCommand": true,',
'\t\t\t"showOutput": "silent",',
'\t\t\t"problemMatcher": "$msCompile"',
'\t\t}',
'\t]',
'}'
].join('\n')
};
const msbuild: TaskEntry = {
id: 'msbuild',
label: 'MSBuild',
autoDetect: false,
description: nls.localize('msbuild', 'Executes the build target'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "0.1.0",',
'\t"command": "msbuild",',
'\t"args": [',
'\t\t// Ask msbuild to generate full paths for file names.',
'\t\t"/property:GenerateFullPaths=true"',
'\t],',
'\t"taskSelector": "/t:",',
'\t"showOutput": "silent",',
'\t"tasks": [',
'\t\t{',
'\t\t\t"taskName": "build",',
'\t\t\t// Show the output window only if unrecognized errors occur.',
'\t\t\t"showOutput": "silent",',
'\t\t\t// Use the standard MS compiler pattern to detect errors, warnings and infos',
'\t\t\t"problemMatcher": "$msCompile"',
'\t\t}',
'\t]',
'}'
].join('\n')
};
const command: TaskEntry = {
id: 'externalCommand',
label: 'Others',
autoDetect: false,
description: nls.localize('externalCommand', 'Example to run an arbitrary external command'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "0.1.0",',
'\t"command": "echo",',
'\t"isShellCommand": true,',
'\t"args": ["Hello World"],',
'\t"showOutput": "always"',
'}'
].join('\n')
};
const maven: TaskEntry = {
id: 'maven',
label: 'maven',
sort: 'MVN',
autoDetect: false,
description: nls.localize('Maven', 'Executes common maven commands'),
content: [
'{',
'\t// See https://go.microsoft.com/fwlink/?LinkId=733558',
'\t// for the documentation about the tasks.json format',
'\t"version": "0.1.0",',
'\t"command": "mvn",',
'\t"isShellCommand": true,',
'\t"showOutput": "always",',
'\t"suppressTaskName": true,',
'\t"tasks": [',
'\t\t{',
'\t\t\t"taskName": "verify",',
'\t\t\t"args": ["-B", "verify"],',
'\t\t\t"isBuildCommand": true',
'\t\t},',
'\t\t{',
'\t\t\t"taskName": "test",',
'\t\t\t"args": ["-B", "test"],',
'\t\t\t"isTestCommand": true',
'\t\t}',
'\t]',
'}'
].join('\n')
};
export let templates: TaskEntry[] = [gulp, grunt, tscConfig, tscWatch, dotnetBuild, msbuild, npm, maven].sort((a, b) => {
return (a.sort || a.label).localeCompare(b.sort || b.label);
});
templates.push(command);
*/
//# sourceMappingURL=taskTemplates.js.map
|
banzsolt/wot-api
|
lib/wargaming_api/wargaming_net.rb
|
<reponame>banzsolt/wot-api
require 'wargaming_api/request'
module WargamingApi
class WargamingApi::WargamingNet < WargamingApi::Request
puts 'WargamingNet loaded.'
end
end
|
hispindia/BIHAR-2.7
|
dhis-2/dhis-api/src/main/java/org/hisp/dhis/message/MessageConversation.java
|
<gh_stars>0
package org.hisp.dhis.message;
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.hisp.dhis.common.BaseIdentifiableObject;
import org.hisp.dhis.common.Dxf2Namespace;
import org.hisp.dhis.common.adapter.UserXmlAdapter;
import org.hisp.dhis.user.User;
import javax.xml.bind.annotation.*;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import java.util.*;
/**
* @author <NAME>
*/
@XmlRootElement( name = "messageConversation", namespace = Dxf2Namespace.NAMESPACE )
@XmlAccessorType( value = XmlAccessType.NONE )
public class MessageConversation
extends BaseIdentifiableObject
{
private String subject;
private Set<UserMessage> userMessages = new HashSet<UserMessage>();
private List<Message> messages = new ArrayList<Message>();
private User lastSender;
private Date lastMessage;
private transient boolean read;
private transient boolean followUp;
private transient String lastSenderSurname;
private transient String lastSenderFirstname;
public MessageConversation()
{
}
public MessageConversation( String subject, User lastSender )
{
this.subject = subject;
this.lastSender = lastSender;
this.lastMessage = new Date();
}
@Override
public String getName()
{
return subject;
}
public void addUserMessage( UserMessage userMessage )
{
this.userMessages.add( userMessage );
}
public void addMessage( Message message )
{
if ( message != null )
{
message.setAutoFields();
}
this.messages.add( message );
}
public boolean toggleFollowUp( User user )
{
for ( UserMessage userMessage : userMessages )
{
if ( userMessage.getUser() != null && userMessage.getUser().equals( user ) )
{
userMessage.setFollowUp( !userMessage.isFollowUp() );
return userMessage.isFollowUp();
}
}
return false;
}
public boolean markRead( User user )
{
for ( UserMessage userMessage : userMessages )
{
if ( userMessage.getUser() != null && userMessage.getUser().equals( user ) )
{
boolean read = userMessage.isRead();
userMessage.setRead( true );
return !read;
}
}
return false;
}
public boolean markUnread( User user )
{
for ( UserMessage userMessage : userMessages )
{
if ( userMessage.getUser() != null && userMessage.getUser().equals( user ) )
{
boolean read = userMessage.isRead();
userMessage.setRead( false );
return read;
}
}
return false;
}
public void markReplied( User sender, Message message )
{
for ( UserMessage userMessage : userMessages )
{
if ( userMessage.getUser() != null && !userMessage.getUser().equals( sender ) )
{
userMessage.setRead( false );
}
}
addMessage( message );
this.lastSender = sender;
this.setLastMessage( new Date() );
}
public void remove( User user )
{
Iterator<UserMessage> iterator = userMessages.iterator();
while ( iterator.hasNext() )
{
UserMessage userMessage = iterator.next();
if ( userMessage.getUser() != null && userMessage.getUser().equals( user ) )
{
iterator.remove();
return;
}
}
}
public Set<User> getUsers()
{
Set<User> users = new HashSet<User>();
for ( UserMessage userMessage : userMessages )
{
users.add( userMessage.getUser() );
}
return users;
}
@XmlElement
@JsonProperty
public String getSubject()
{
return subject;
}
public void setSubject( String subject )
{
this.subject = subject;
}
public Set<UserMessage> getUserMessages()
{
return userMessages;
}
public void setUserMessages( Set<UserMessage> userMessages )
{
this.userMessages = userMessages;
}
@XmlElementWrapper( name = "messages" )
@XmlElement( name = "message" )
@JsonProperty
public List<Message> getMessages()
{
return messages;
}
public void setMessages( List<Message> messages )
{
this.messages = messages;
}
@XmlElement
@XmlJavaTypeAdapter( UserXmlAdapter.class )
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
public User getLastSender()
{
return lastSender;
}
public void setLastSender( User lastSender )
{
this.lastSender = lastSender;
}
@XmlElement
@JsonProperty
public Date getLastMessage()
{
return lastMessage;
}
public void setLastMessage( Date lastMessage )
{
this.lastMessage = lastMessage;
}
public boolean isRead()
{
return read;
}
public void setRead( boolean read )
{
this.read = read;
}
public boolean isFollowUp()
{
return followUp;
}
public void setFollowUp( boolean followUp )
{
this.followUp = followUp;
}
public String getLastSenderName()
{
return lastSenderFirstname + " " + lastSenderSurname;
}
public String getLastSenderSurname()
{
return lastSenderSurname;
}
public void setLastSenderSurname( String lastSenderSurname )
{
this.lastSenderSurname = lastSenderSurname;
}
public String getLastSenderFirstname()
{
return lastSenderFirstname;
}
public void setLastSenderFirstname( String lastSenderFirstname )
{
this.lastSenderFirstname = lastSenderFirstname;
}
@Override
public int hashCode()
{
return uid.hashCode();
}
@Override
public boolean equals( Object object )
{
if ( this == object )
{
return true;
}
if ( object == null )
{
return false;
}
if ( getClass() != object.getClass() )
{
return false;
}
final MessageConversation other = (MessageConversation) object;
return uid.equals( other.uid );
}
@Override
public String toString()
{
return "[" + subject + "]";
}
}
|
eyedeekay/rain
|
internal/pexlist/pexlist.go
|
<reponame>eyedeekay/rain
package pexlist
import (
"net"
"strings"
"github.com/cenkalti/rain/internal/tracker"
)
const (
// BEP 11: Except for the initial PEX message the combined amount of added v4/v6 contacts should not exceed 50 entries.
// The same applies to dropped entries.
maxPeers = 50
)
type PEXList struct {
added map[tracker.CompactPeer]struct{}
dropped map[tracker.CompactPeer]struct{}
flushed bool
}
func New() *PEXList {
return &PEXList{
added: make(map[tracker.CompactPeer]struct{}),
dropped: make(map[tracker.CompactPeer]struct{}),
}
}
func (l *PEXList) Add(addr *net.TCPAddr) {
p := tracker.NewCompactPeer(addr)
l.added[p] = struct{}{}
delete(l.dropped, p)
}
func (l *PEXList) Drop(addr *net.TCPAddr) {
peer := tracker.NewCompactPeer(addr)
l.dropped[peer] = struct{}{}
delete(l.added, peer)
}
func (l *PEXList) Flush() (added, dropped string) {
added = l.flush(l.added, l.flushed)
dropped = l.flush(l.dropped, l.flushed)
l.flushed = true
return
}
func (l *PEXList) flush(m map[tracker.CompactPeer]struct{}, limit bool) string {
count := len(m)
if limit && count > maxPeers {
count = maxPeers
}
var s strings.Builder
s.Grow(count * 6)
for p := range m {
if count == 0 {
break
}
count--
b, err := p.MarshalBinary()
if err != nil {
panic(err)
}
s.Write(b)
delete(m, p)
}
return s.String()
}
|
BlueCodeSystems/opensrp-client-chw
|
opensrp-chw/src/test/java/org/smartregister/chw/actionhelper/ANCCardActionTest.java
|
package org.smartregister.chw.actionhelper;
import android.content.Context;
import org.json.JSONObject;
import org.junit.Assert;
import org.junit.Test;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.util.ReflectionHelpers;
import org.smartregister.chw.BaseUnitTest;
import org.smartregister.chw.R;
import org.smartregister.chw.anc.model.BaseAncHomeVisitAction;
import org.smartregister.chw.core.utils.NativeFormProcessor;
import java.util.HashMap;
import java.util.Map;
public class ANCCardActionTest extends BaseUnitTest {
@Test
public void testOnPayloadReceive() throws Exception {
ANCCardAction ancCardAction = new ANCCardAction();
Context context = RuntimeEnvironment.application;
String formName = "anc_hv_anc_card_received";
Map<String, Object> values = new HashMap<>();
values.put("anc_card", "Yes");
JSONObject jsonObject = ReadFormHelper.getFormJson(context, formName);
NativeFormProcessor.createInstance(jsonObject)
.populateValues(values);
ancCardAction.onPayloadReceived(jsonObject.toString());
Assert.assertEquals("Yes", ReflectionHelpers.getField(ancCardAction, "anc_card"));
}
@Test
public void testEvaluateSubTitle() {
ANCCardAction ancCardAction = new ANCCardAction();
Context context = RuntimeEnvironment.application;
ReflectionHelpers.setField(ancCardAction, "context", context);
ReflectionHelpers.setField(ancCardAction, "anc_card", "No");
Assert.assertEquals(context.getString(R.string.no), ancCardAction.evaluateSubTitle());
}
@Test
public void testEvaluateStatusOnPayload() {
ANCCardAction ancCardAction = new ANCCardAction();
Assert.assertEquals(ancCardAction.evaluateStatusOnPayload(), BaseAncHomeVisitAction.Status.PENDING);
ReflectionHelpers.setField(ancCardAction, "anc_card", "Yes");
Assert.assertEquals(ancCardAction.evaluateStatusOnPayload(), BaseAncHomeVisitAction.Status.COMPLETED);
ReflectionHelpers.setField(ancCardAction, "anc_card", "No");
Assert.assertEquals(ancCardAction.evaluateStatusOnPayload(), BaseAncHomeVisitAction.Status.PARTIALLY_COMPLETED);
}
}
|
BestJex/redragon-erp
|
erp-parent/erp-masterData/src/main/java/com/erp/masterdata/vendor/dao/model/MdVendorLicenseCO.java
|
<reponame>BestJex/redragon-erp
package com.erp.masterdata.vendor.dao.model;
import com.erp.masterdata.vendor.dao.model.MdVendorLicense;
public class MdVendorLicenseCO extends MdVendorLicense implements java.io.Serializable {
// serialVersionUID
private static final long serialVersionUID = 1L;
}
|
labs12-section-calendar-template/Front-End
|
calendr/src/components/login/Register.js
|
<reponame>labs12-section-calendar-template/Front-End<filename>calendr/src/components/login/Register.js
import React, { Component } from 'react'
import axios from 'axios'
import styled from 'styled-components';
// Currently not being used. Meant for manual registration
const url = "https://calendrserver.herokuapp.com/"
export class Register extends Component {
state = {
fullname: '',
username: '',
email: '',
password: '',
verifypassword: ''
}
handleChanges = event => {
event.preventDefault();
this.setState({
[event.target.name]: event.target.value
})
}
register = event => {
event.preventDefault();
let { fullname, username, email, password } = this.state
if(this.state.password === this.state.verifypassword){
axios
.post(`${url}auth/register`, {
fullname: fullname,
username: username,
email: email,
password: password
})
.then(res => {
console.log('Its working', res)
alert('Account registration is successful')
})
.catch( error => console.log('OH NO', error));
this.setState({
fullname: '',
username: '',
email: '',
password: '',
verifypassword: ''
})} else {
alert('Password does not match!')
this.setState({
fullname: '',
username: '',
email: '',
password: '',
verifypassword: ''
})
}
}
render() {
return (
<div>
<h2>Welcome, Please Register</h2>
<FormContainer>
<Form type = 'submit'>
<Div>
<H4>Fullname:</H4>
<Input
type = "text"
value = {this.props.fullName}
name = "fullName"
onChange = {this.props.handleChanges}
placeholder = "Full Name..."
/>
</Div>
<Div>
<H4>Email:</H4>
<Input
type = "email"
value = {this.props.email}
name = "email"
onChange = {this.props.handleChanges}
placeholder = "Email..."
/>
</Div>
<Div>
<H4>Username:</H4>
<Input
type = "text"
value = {this.props.username}
name = "username"
onChange = {this.props.handleChanges}
placeholder = "username..."
/>
</Div>
<Div>
<H4>Password:</H4>
<Input
type = "password"
value = {this.props.password}
name = "password"
onChange = {this.props.handleChanges}
placeholder = "Password..."
/>
</Div>
<Div>
<H4>Verify Password:</H4>
<Input
type = "verifypassword"
value = {this.props.verifypassword}
name = "verifypassword"
onChange = {this.props.handleChanges}
placeholder = "verifypassword..."
/>
</Div>
</Form>
</FormContainer>
<Button onClick = {this.props.register}>Register</Button>
</div>
)
}
}
const FormContainer = styled.div`
width:24rem;
margin:auto;
background-color:lightgrey;
border-radius:10px;
padding:2rem;
`;
const Form = styled.form`
display: flex;
flex-direction: column;
align-items:flex-end;
`;
const Div = styled.div`
display: flex;
justify-content:center;
align-items:center;
`;
const H4 = styled.h4`
margin:0 0 .5rem 0;
`;
const Input = styled.input`
margin: 1rem;
height:3rem;
background: white;
`;
const Button = styled.button`
margin: 1rem;
`;
export default Register
|
getsproud/react-app
|
src/contexts/SettingsContext.js
|
<reponame>getsproud/react-app
import { createContext, useEffect, useState } from 'react';
import PropTypes from 'prop-types';
import { THEMES } from '../constants';
const initialSettings = {
compact: true,
responsiveFontSizes: true,
roundedCorners: true,
theme: THEMES.DARK
};
export const restoreSettings = () => {
let settings = null;
try {
const storedData = window.localStorage.getItem('settings');
if (storedData) {
settings = JSON.parse(storedData);
} else {
settings = {
compact: true,
responsiveFontSizes: true,
roundedCorners: true,
theme: window.matchMedia('(prefers-color-scheme: dark)').matches
? THEMES.DARK
: THEMES.LIGHT
};
}
} catch (err) {
console.error(err);
// If stored data is not a strigified JSON this will fail,
// that's why we catch the error
}
return settings;
};
export const storeSettings = (settings) => {
window.localStorage.setItem('settings', JSON.stringify(settings));
};
const SettingsContext = createContext({
settings: initialSettings,
saveSettings: () => { }
});
export const SettingsProvider = (props) => {
const { children } = props;
const [settings, setSettings] = useState(initialSettings);
useEffect(() => {
const restoredSettings = restoreSettings();
if (restoredSettings) {
setSettings(restoredSettings);
}
}, []);
const saveSettings = (updatedSettings) => {
setSettings(updatedSettings);
storeSettings(updatedSettings);
};
return (
<SettingsContext.Provider
value={{
settings,
saveSettings
}}
>
{children}
</SettingsContext.Provider>
);
};
SettingsProvider.propTypes = {
children: PropTypes.node.isRequired
};
export const SettingsConsumer = SettingsContext.Consumer;
export default SettingsContext;
|
cityofaustin/joplin
|
joplin/users/fixtures/administrative/superadmin.py
|
import os
from django.conf import settings
from django.core.exceptions import ValidationError
from users.fixtures.helpers.create_fixture import create_fixture
# An user for API Testing.
# Should be loaded on "test" Joplin branch only.
def superadmin():
user_data = {
"email": "<EMAIL>",
"is_superuser": True,
"password": os.getenv("<PASSWORD>"),
"first_name": "<NAME>",
"last_name": "Austin",
"is_active": True,
"is_staff": True,
}
# Extra safety check
if settings.IS_STAGING or settings.IS_PRODUCTION:
raise ValidationError("Do not load superadmin onto staging or production")
else:
return create_fixture(user_data, os.path.basename(__file__))
|
goline/lapi
|
body_test.go
|
package lapi
import (
"github.com/goline/errors"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"strings"
)
var _ = Describe("Body", func() {
It("NewBody should return an instance of Body", func() {
Expect(NewBag()).NotTo(BeNil())
})
})
type sampleBodyItem struct {
Price float64 `json:"price"`
}
type sampleParserForBody struct{}
func (p *sampleParserForBody) Encode(v interface{}) ([]byte, error) {
return make([]byte, 0), errors.New("UNABLE_TO_ENCODE", "")
}
func (p *sampleParserForBody) Decode(data []byte, v interface{}) error {
return errors.New("UNABLE_TO_DECODE", "")
}
func (p *sampleParserForBody) ContentType() string {
return CONTENT_TYPE_JSON
}
type sampleIOReader struct{}
func (r *sampleIOReader) Read(p []byte) (n int, err error) {
return 0, errors.New("", "")
}
type sampleIOWriter struct{}
func (r *sampleIOWriter) Write(p []byte) (n int, err error) {
return 0, errors.New("", "")
}
type sampleBodyParser struct{}
func (p *sampleBodyParser) ContentType() string {
return CONTENT_TYPE_JSON
}
func (p *sampleBodyParser) Decode(data []byte, v interface{}) error {
return nil
}
func (p *sampleBodyParser) Encode(v interface{}) ([]byte, error) {
return []byte(""), errors.New("", "")
}
var _ = Describe("FactoryBody", func() {
It("Charset should return UTF-8", func() {
b := &FactoryBody{}
b.charset = "UTF-8"
Expect(b.Charset()).To(Equal("UTF-8"))
})
It("WithCharset should allow to set charset", func() {
b := &FactoryBody{}
b.WithCharset("UTF-8")
Expect(b.charset).To(Equal("UTF-8"))
})
It("ContentType should return a string represents for content-type", func() {
b := &FactoryBody{}
b.contentType = CONTENT_TYPE_XML
Expect(b.ContentType()).To(Equal(CONTENT_TYPE_XML))
})
It("WithContentType should allow to set content-type", func() {
b := &FactoryBody{}
Expect(b.WithContentType(CONTENT_TYPE_XML).ContentType()).To(Equal(CONTENT_TYPE_XML))
})
It("Read should return error code ERR_BODY_READER_MISSING", func() {
b := &FactoryBody{}
err := b.Read(nil)
Expect(err).NotTo(BeNil())
Expect(err.Code()).To(Equal(ERR_BODY_READER_MISSING))
})
It("Read should return error code ERR_BODY_READER_FAILURE", func() {
b := &FactoryBody{reader: new(sampleIOReader)}
err := b.Read(nil)
Expect(err).NotTo(BeNil())
Expect(err.Code()).To(Equal(ERR_BODY_READER_FAILURE))
})
It("Read should return error code ERR_NO_PARSER_FOUND", func() {
b := &FactoryBody{reader: strings.NewReader(`{"status": true}`), ParserManager: NewParserManager()}
err := b.Read(nil)
Expect(err).NotTo(BeNil())
Expect(err.Code()).To(Equal(ERR_NO_PARSER_FOUND))
})
It("Read should return error code ERR_PARSE_DECODE_FAILURE", func() {
b := &FactoryBody{reader: strings.NewReader(`{"status": true}`), ParserManager: NewParserManager()}
b.WithParser(new(JsonParser))
b.WithContentType(CONTENT_TYPE_JSON)
err := b.Read(nil)
Expect(err).NotTo(BeNil())
Expect(err.Code()).To(Equal(ERR_PARSE_DECODE_FAILURE))
})
It("Read should return nil", func() {
b := &FactoryBody{reader: strings.NewReader(`{"price": 10.2}`), ParserManager: NewParserManager()}
b.WithParser(new(JsonParser))
b.WithContentType(CONTENT_TYPE_JSON)
i := new(sampleBodyItem)
err := b.Read(i)
Expect(err).To(BeNil())
Expect(i.Price).To(Equal(float64(10.2)))
})
It("Write should return nil when writing nil", func() {
b := &FactoryBody{ParserManager: NewParserManager()}
err := b.Write(nil)
Expect(err).To(BeNil())
})
It("Write should return nil when writing string", func() {
b := &FactoryBody{ParserManager: NewParserManager()}
err := b.Write("a string")
Expect(err).To(BeNil())
Expect(string(b.contentBytes)).To(Equal("a string"))
})
It("Write should return error code ERR_NO_PARSER_FOUND", func() {
b := &FactoryBody{ParserManager: NewParserManager()}
err := b.Write(new(sampleBodyItem))
Expect(err).NotTo(BeNil())
Expect(err.Code()).To(Equal(ERR_NO_PARSER_FOUND))
})
It("Write should return error code ERR_PARSE_ENCODE_FAILURE", func() {
b := &FactoryBody{ParserManager: NewParserManager()}
b.WithParser(new(sampleBodyParser))
b.WithContentType(CONTENT_TYPE_JSON)
err := b.Write(new(sampleBodyItem))
Expect(err).NotTo(BeNil())
Expect(err.Code()).To(Equal(ERR_PARSE_ENCODE_FAILURE))
})
It("Write should return nil when writing bytes", func() {
b := &FactoryBody{ParserManager: NewParserManager()}
err := b.Write([]byte("sample string"))
Expect(err).To(BeNil())
Expect(string(b.contentBytes)).To(Equal("sample string"))
})
It("Flush should return error code ERR_BODY_WRITER_MISSING", func() {
b := &FactoryBody{}
err := b.Flush()
Expect(err).NotTo(BeNil())
Expect(err.Code()).To(Equal(ERR_BODY_WRITER_MISSING))
})
})
|
micromall-team/micromall
|
micromall-service/src/main/java/run/micromall/micromall/service/shop/service/MicromallCommentService.java
|
<filename>micromall-service/src/main/java/run/micromall/micromall/service/shop/service/MicromallCommentService.java
package run.micromall.micromall.service.shop.service;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import run.micromall.micromall.db.shop.mapper.MicromallCommentMapper;
import run.micromall.micromall.db.shop.model.entity.MicromallComment;
/**
* <pre>
* 评论表 服务类
* </pre>
*
* @author songhaozhi
* @since 2021-07-05
*/
@Slf4j
@Service
@RequiredArgsConstructor(onConstructor_ = @Autowired)
public class MicromallCommentService {
private final MicromallCommentMapper commentMapper;
/**
* 保存评论表
*
* @author songhaozhi
* @param micromallComment
*/
public int insert(MicromallComment micromallComment){
return commentMapper.insert(micromallComment);
}
/**
* 通过ID修改评论表
*
* @author songhaozhi
* @param micromallComment
*/
public int updateById(MicromallComment micromallComment){
return commentMapper.updateById(micromallComment);
}
/**
* 通过ID删除评论表
*
* @author songhaozhi
* @param id
*/
public int deleteById(Long id){
return commentMapper.deleteById(id);
}
/**
* 根据ID获取查询评论表
*
* @author songhaozhi
* @param id
*/
public MicromallComment selectById(Long id){
return commentMapper.selectById(id);
}
/**
* 分页获取评论表
*
* @author songhaozhi
*/
public PageInfo<MicromallComment> list(Integer page, Integer limit, String sort, String order){
PageHelper.startPage(page, limit);
QueryWrapper<MicromallComment> wrapper = Wrappers.query();
wrapper.orderBy(StrUtil.isNotBlank(sort) && StrUtil.isNotBlank(order), !"desc".equals(order), sort);
wrapper.eq("deleted", false);
return new PageInfo<>(commentMapper.list(wrapper));
}
}
|
jefbed/jbwm
|
title_bar.h
|
<reponame>jefbed/jbwm
// jbwm - Minimalist Window Manager for X
// Copyright 2008-2020, <NAME> <<EMAIL>>
// Copyright 1999-2015, <NAME> <<EMAIL>>
// See README for license and other details.
#ifndef JBWM_TITLE_BAR_H
#define JBWM_TITLE_BAR_H
struct JBWMClient;
void jbwm_toggle_shade(struct JBWMClient * c);
void jbwm_update_title_bar(struct JBWMClient * c);
#endif /* JBWM_TITLE_BAR_H */
|
chenjiaxing6/pocket-shop
|
pocket-shop-web-admin/src/main/java/cn/ishangit/shop/web/admin/abstracts/AbstractBaseController.java
|
<filename>pocket-shop-web-admin/src/main/java/cn/ishangit/shop/web/admin/abstracts/AbstractBaseController.java
package cn.ishangit.shop.web.admin.abstracts;
import cn.ishangit.shop.commons.dto.BaseResult;
import cn.ishangit.shop.commons.dto.PageInfo;
import cn.ishangit.shop.commons.persistence.BaseEntity;
import cn.ishangit.shop.commons.persistence.BaseService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.ui.Model;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
import javax.servlet.http.HttpServletRequest;
/**
* @description: 通用Controller层封装
* @author: Chen
* @create: 2019-05-27 22:19
**/
public abstract class AbstractBaseController<T extends BaseEntity,S extends BaseService<T>> {
@Autowired
protected S service;
/**
* 跳转到列表页
* @param model
* @return
*/
public abstract String list(Model model);
/**
* 跳转到表单页
* @return
*/
public abstract String form();
/**
* 信息保存
* @param entity
* @return
*/
public abstract String save(T entity, RedirectAttributes redirectAttributes, Model model);
/**
* 批量删除
* @param ids
* @return
*/
public abstract BaseResult delete(String ids);
/**
* 分页查询
* @param request
* @param entity
* @return
*/
public abstract PageInfo<T> page(HttpServletRequest request, T entity);
/**
* 详情
* @param entity
* @return
*/
public abstract String detail(T entity);
}
|
Embodimentgeniuslm3/BDB11A0E2DE062D2E39E4C5301B2FE5E
|
script/lib/update-dependency/spec/pull-request-spec.js
|
const nock = require('nock');
const { createPR, findPR } = require('../pull-request');
const createPrResponse = require('./fixtures/create-pr-response.json');
const searchResponse = require('./fixtures/search-response.json');
describe('Pull Request', () => {
it('Should create a pull request', async () => {
const scope = nock('https://api.github.com')
.post('/repos/atom/atom/pulls', {
title: '⬆️ octocat@2.0.0',
body: 'Bumps octocat from 1.0.0 to 2.0.0',
head: 'octocat-2.0.0',
base: 'master'
})
.reply(200, createPrResponse);
const response = await createPR(
{
moduleName: 'octocat',
installed: '1.0.0',
latest: '2.0.0',
isCorePackage: false
},
'octocat-2.0.0'
);
scope.done();
expect(response.data).toEqual(createPrResponse);
});
it('Should search for a pull request', async () => {
const scope = nock('https://api.github.com')
.get('/search/issues')
.query({
q:
'octocat type:pr octocat@2.0.0 in:title repo:atom/atom head:octocat-2.0.0 state:open',
owner: 'atom',
repo: 'atom'
})
.reply(200, searchResponse);
const response = await findPR(
{
moduleName: 'octocat',
installed: '1.0.0',
latest: '2.0.0'
},
'octocat-2.0.0'
);
scope.done();
expect(response.data).toEqual(searchResponse);
});
});
|
bkachinthay/squash-ladder
|
app/containers/App/AlertContainer.js
|
import React from 'react';
import { connect } from 'react-redux';
import { createStructuredSelector } from 'reselect';
import Alert from 'components/Alert';
import { makeSelectAlertText } from './selectors';
import { setAlertMessage } from './actions';
const AlertContainer = (props) => (<Alert {...props} />);
const mapStateToProps = createStructuredSelector({
alertMessage: makeSelectAlertText(),
});
export default connect(
mapStateToProps, {
handleClose: () => setAlertMessage(''),
}
)(AlertContainer);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.