repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
kudocc/CCKit
CCKit/util/NSObject+CCKit.h
// // NSObject+CCKit.h // demo // // Created by KudoCC on 16/5/28. // Copyright © 2016年 KudoCC. All rights reserved. // #import <Foundation/Foundation.h> @interface NSObject (CCKit) - (id)cc_deepCopy; + (void)logMethodNames; @end
trevorcarlson/Graphite
contrib/orion/Buffer/WordlineUnit.cc
#include "WordlineUnit.h" #include "SRAM.h" #include "TechParameter.h" WordlineUnit::WordlineUnit( const string& wl_model_str_, const SRAM* sram_ptr_, const TechParameter* tech_param_ptr_ ) { if (wl_model_str_ == string("RW_WORDLINE")) { m_wl_model = RW_WORDLINE; } else if (wl_model_str_ == string("WO_WORDLINE")) { m_wl_model = WO_WORDLINE; } else { m_wl_model = NO_MODEL; } if (m_wl_model != NO_MODEL) { m_sram_ptr = sram_ptr_; m_tech_param_ptr = tech_param_ptr_; init(); } } WordlineUnit::~WordlineUnit() {} void WordlineUnit::init() { uint32_t num_port = m_sram_ptr->get_num_port(); uint32_t num_read_port = m_sram_ptr->get_num_read_port(); uint32_t num_col = m_sram_ptr->get_num_col(); uint32_t num_data_end = m_sram_ptr->get_num_data_end(); double RegCellWidth = m_tech_param_ptr->get_RegCellWidth(); double BitlineSpacing = m_tech_param_ptr->get_BitlineSpacing(); if (num_data_end == 2) { m_wl_len = num_col*(RegCellWidth + 2*num_port*BitlineSpacing); } else { m_wl_len = num_col*(RegCellWidth + (2*num_port-num_read_port)*BitlineSpacing); } double wl_cmetal; if (num_port > 1) { wl_cmetal = m_tech_param_ptr->get_CC3M3metal(); } else { wl_cmetal = m_tech_param_ptr->get_CM3metal(); } m_wl_wire_cap = m_wl_len*wl_cmetal; double e_factor = m_tech_param_ptr->get_EnergyFactor(); double Wmemcellr = m_tech_param_ptr->get_Wmemcellr(); double Wmemcellw = m_tech_param_ptr->get_Wmemcellw(); double Woutdrivern = m_tech_param_ptr->get_Woutdrivern(); double Woutdriverp = m_tech_param_ptr->get_Woutdriverp(); double NMOS_TAB_0 = m_tech_param_ptr->get_NMOS_TAB(0); double PMOS_TAB_0 = m_tech_param_ptr->get_PMOS_TAB(0); switch(m_wl_model) { case RW_WORDLINE: m_e_read = calc_wordline_cap(num_col*num_data_end, Wmemcellr) * e_factor; m_e_write = calc_wordline_cap(num_col*2, Wmemcellw) * e_factor; m_i_static = (Woutdrivern*NMOS_TAB_0 + Woutdriverp*PMOS_TAB_0); break; case WO_WORDLINE: m_e_read = 0; m_e_write = calc_wordline_cap(num_col*2, Wmemcellw)*e_factor; m_i_static = 0; break; default: printf("error\n"); } return; } double WordlineUnit::calc_wordline_cap( uint32_t num_mos_, double mos_width_ ) const { double total_cap; // part 1: line cap, including gate cap of pass tx's and metal cap double BitWidth = m_tech_param_ptr->get_BitWidth(); total_cap = m_tech_param_ptr->calc_gatecappass(mos_width_, BitWidth/2.0-mos_width_)*num_mos_ + m_wl_wire_cap; // part 2: input driver double period = m_tech_param_ptr->get_period(); double psize, nsize; psize = m_tech_param_ptr->calc_driver_psize(total_cap, period/16.0); double Wdecinvn = m_tech_param_ptr->get_Wdecinvn(); double Wdecinvp = m_tech_param_ptr->get_Wdecinvp(); nsize = psize*Wdecinvn/Wdecinvp; // WHS: 20 should go to PARM total_cap += m_tech_param_ptr->calc_draincap(nsize, TechParameter::NCH, 1) + m_tech_param_ptr->calc_draincap(psize, TechParameter::PCH, 1) + m_tech_param_ptr->calc_gatecap(psize+nsize, 20); return total_cap; }
Mastergatto/cen64-rsp
Interface.h
/* ============================================================================ * Interface.h: Reality Shader Processor (RSP) Interface. * * RSPSIM: Reality Signal Processor SIMulator. * Copyright (C) 2013, <NAME>. * All rights reserved. * * This file is subject to the terms and conditions defined in * file 'LICENSE', which is part of this source code package. * ========================================================================= */ #ifndef __RSP__INTERFACE_H__ #define __RSP__INTERFACE_H__ #include "Common.h" #include "CPU.h" int SPRegRead(void *, uint32_t, void *); int SPRegWrite(void *, uint32_t, void *); #endif
hdm/mac-tracker
data/js/8c/1f/64/7a/a0/00.36.js
macDetailCallback("8c1f647aa000/36",[{"d":"2022-02-09","t":"add","s":"ieee-oui36.csv","a":"133 12 Ave SE Calgary Alberta CA T2G 0Z9","c":"CA","o":"XSENSOR Technology Corp."}]);
ankitsumitg/python-projects
dnaSequence/test_candidateOverlapsTarget.py
""" Do Not Edit this file. You may and are encouraged to look at it for reference. """ import unittest import dnaSequencing class TestCandidateOverlapsTarget(unittest.TestCase): def test001_candidateOverlapsTargetExists(self): self.assertTrue('candidateOverlapsTarget' in dir(dnaSequencing), 'Function "candidateOverlapsTarget" was not defined, check your spelling') def test002_candidateOverlapsTarget(self): from dnaSequencing import candidateOverlapsTarget target = 'ABBBBBA' candidate = 'BABBBAA' overlaps = [False, True, False, False, False, False, False] for i in range(len(overlaps)): expected = overlaps[i] overlap = i + 1 actual = candidateOverlapsTarget(target, candidate, overlap) if expected: message = 'The last %s characters of the target strand DO NOT overlap the first %s characters of the candidate strand' % (overlap, overlap) else: message = 'The last %s characters of the target strand and the first %s characters of the candidate strand DO overlap' % (overlap, overlap) self.assertEqual(expected, actual, message) def test003_candidateOverlapsTarget(self): from dnaSequencing import candidateOverlapsTarget target = 'ABABABAB' candidate = 'BABABABA' overlaps = [True, False, True, False, True, False, True, False] for i in range(len(overlaps)): expected = overlaps[i] overlap = i + 1 actual = candidateOverlapsTarget(target, candidate, overlap) if expected: message = 'The last %s characters of the target strand DO NOT overlap the first %s characters of the candidate strand' % (overlap, overlap) else: message = 'The last %s characters of the target strand and the first %s characters of the candidate strand DO overlap' % (overlap, overlap) self.assertEqual(expected, actual, message) if __name__ == '__main__': unittest.main()
brittsikora/clinical-timeline
src/defaults/index.js
import settings from './settings'; import syncSettings from './syncSettings'; import controls from './controls'; import syncControls from './syncControls'; export default { settings: settings, syncSettings: syncSettings, controls: controls, syncControls: syncControls };
reneelpetit/codestream-server
api_server/modules/streams/test/put_stream/message_to_team_test.js
<filename>api_server/modules/streams/test/put_stream/message_to_team_test.js 'use strict'; const MessageToStreamTest = require('./message_to_stream_test'); class MessageToTeamTest extends MessageToStreamTest { get description () { return 'members of the team should receive a message with the stream when a public channel stream is updated'; } // set the name of the channel we expect to receive a message on setChannelName (callback) { // since it is a public stream, the channel will be the team channel this.channelName = `team-${this.team.id}`; callback(); } setTestOptions (callback) { super.setTestOptions(() => { this.streamOptions.privacy = 'public'; callback(); }); } } module.exports = MessageToTeamTest;
minimalsm/js-ipfs-repo
packages/ipfs-repo/test/options-test.js
/* eslint-env mocha */ import { expect } from 'aegir/utils/chai.js' import sinon from 'sinon' import tempDir from 'ipfs-utils/src/temp-dir.js' import rimraf from 'rimraf' import { createRepo } from '../src/index.js' import { loadCodec } from './fixtures/load-codec.js' import { createBackend } from './fixtures/create-backend.js' if (!rimraf.sync) { // browser rimraf.sync = noop } export default () => { describe('custom options tests', () => { const repoPath = tempDir() after(() => { rimraf.sync(repoPath) }) it('missing repoPath', () => { expect( // @ts-expect-error () => createRepo() ).to.throw('missing repo path') }) it('allows for a custom lock', async () => { const release = { close () { return Promise.resolve() } } const lock = { /** * @param {string} path */ lock: (path) => { return Promise.resolve(release) }, /** * @param {string} path */ locked: (path) => { return Promise.resolve(true) } } const lockSpy = sinon.spy(lock, 'lock') const releaseSpy = sinon.spy(release, 'close') const repo = createRepo(repoPath, loadCodec, createBackend(), { repoLock: lock }) await repo.init({}) await repo.open() await repo.close() expect(lockSpy.callCount).to.equal(1) expect(releaseSpy.callCount).to.equal(1) }) it('ensures a custom lock has a .close method', async () => { const lock = { /** * @param {any} path */ lock: async (path) => { return Promise.resolve({ shouldBeCalledClose () { return Promise.resolve() } }) }, /** * @param {any} path */ locked: async (path) => { return Promise.resolve(true) } } const repo = createRepo(repoPath, loadCodec, createBackend(), { // @ts-expect-error lock closer types are wrong repoLock: lock }) let error try { // @ts-ignore we should not be using private methods await repo._openLock(repo.path) } catch (/** @type {any} */ err) { error = err } expect(error.code).to.equal('ERR_NO_CLOSE_FUNCTION') }) }) } function noop () {}
jazzfool/Xu
docs/html/search/all_f.js
var searchData= [ ['theme_97',['Theme',['../classxu_1_1_theme.html',1,'xu']]], ['type_98',['type',['../structxu_1_1_draw_command.html#a115fae8e0556a13e6542f899a35cd365',1,'xu::DrawCommand']]] ];
huerta2502/DAVPD
Davpd/Time.java
import javax.swing.JPanel; public class Time extends Thread { private boolean stop; private int s; private Contest contest; public Time(JPanel contest) { stop = false; this.contest = (Contest) contest; } @Override public void run(){ while(!stop){ try { sleep(1000); s++; contest.timerJTextField.setText(String.valueOf(s)); } catch(InterruptedException ie) { System.out.println(ie); } } } public void setStop(boolean stop){ this.stop = stop; s = 0; } }
Diez-Canseco-Ramirez/bazil
util/edtls/client.go
package edtls import ( "crypto/subtle" "crypto/tls" "errors" "fmt" "net" "github.com/agl/ed25519" ) var ( // ErrNotEdTLS is returned if the TLS peer does not support edtls. ErrNotEdTLS = errors.New("peer does not support edtls") ) // WrongPublicKeyError is returned if the server public key did not // match. type WrongPublicKeyError struct { Pub *[ed25519.PublicKeySize]byte } var _ error = (*WrongPublicKeyError)(nil) func (e *WrongPublicKeyError) Error() string { return fmt.Sprintf("wrong public key: %x", e.Pub[:]) } func NewClient(rawConn net.Conn, config *tls.Config, peerPub *[ed25519.PublicKeySize]byte) (*tls.Conn, error) { c := tls.Client(rawConn, config) if err := c.Handshake(); err != nil { _ = c.Close() return nil, err } s := c.ConnectionState() if len(s.PeerCertificates) == 0 { // servers are not supposed to be able to do that _ = c.Close() return nil, ErrNotEdTLS } pub, ok := Verify(s.PeerCertificates[0]) if !ok { _ = c.Close() return nil, ErrNotEdTLS } if subtle.ConstantTimeCompare(pub[:], peerPub[:]) != 1 { _ = c.Close() return nil, &WrongPublicKeyError{Pub: pub} } return c, nil }
uk-gov-mirror/ministryofjustice.disclosure-checker
app/errors/errors.rb
module Errors class ResultsNotFound < StandardError; end class InvalidSession < StandardError; end class ReportCompleted < StandardError; end end
spadapet/ff_game_library
source/ff.dx12/source/access.h
<gh_stars>0 #pragma once namespace ff::dx12 { class commands; class fence; class gpu_descriptor_allocator; class heap; class queue; class resource; ID3D12GraphicsCommandListX* get_command_list(const ff::dx12::commands& obj); ID3D12CommandAllocatorX* get_command_allocator(const ff::dx12::commands& obj); std::unique_ptr<ff::dx12::fence>&& move_fence(ff::dx12::commands& obj); ID3D12CommandQueueX* get_command_queue(const ff::dx12::queue& obj); ID3D12DescriptorHeapX* get_descriptor_heap(const ff::dx12::gpu_descriptor_allocator& obj); ID3D12FenceX* get_fence(const ff::dx12::fence& obj); ID3D12HeapX* get_heap(const ff::dx12::heap& obj); ID3D12ResourceX* get_resource(ff::dx12::heap& obj); ID3D12ResourceX* get_resource(const ff::dx12::resource& obj); }
billyfrost418/james-project
server/queue/queue-activemq/src/main/java/org/apache/james/queue/activemq/ActiveMQMailQueueItem.java
<filename>server/queue/queue-activemq/src/main/java/org/apache/james/queue/activemq/ActiveMQMailQueueItem.java /**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.queue.activemq; import java.io.IOException; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.Session; import org.apache.activemq.command.ActiveMQBlobMessage; import org.apache.james.queue.api.MailQueue.MailQueueException; import org.apache.james.queue.api.MailQueue.MailQueueItem; import org.apache.james.queue.jms.JMSMailQueueItem; import org.apache.mailet.Mail; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * ActiveMQ {@link MailQueueItem} implementation which handles Blob-Messages as * well */ public class ActiveMQMailQueueItem extends JMSMailQueueItem implements ActiveMQSupport { private static final Logger LOGGER = LoggerFactory.getLogger(ActiveMQMailQueueItem.class); private final Message message; public ActiveMQMailQueueItem(Mail mail, Session session, MessageConsumer consumer, Message message) { super(mail, session, consumer); this.message = message; } @Override public void done(boolean success) throws MailQueueException { super.done(success); if (success) { if (message instanceof ActiveMQBlobMessage && !getMail().getAttribute(JAMES_REUSE_BLOB_URL).isPresent()) { // This should get removed once this jira issue was fixed // https://issues.apache.org/activemq/browse/AMQ-1529 try { ((ActiveMQBlobMessage) message).deleteFile(); } catch (IOException | JMSException e) { LOGGER.warn("Unable to delete blob message file for mail {}", getMail().getName()); } } getMail().removeAttribute(JAMES_REUSE_BLOB_URL); } } }
RHINO-VIP/RHINO-VIP.github.io
docs/html/search/files_3.js
var searchData= [ ['main_2ecpp',['main.cpp',['../main_8cpp.html',1,'']]], ['main_5fip_5ftest_2ecpp',['main_ip_test.cpp',['../main__ip__test_8cpp.html',1,'']]], ['main_5fip_5ftest_5fread_2ecpp',['main_ip_test_read.cpp',['../main__ip__test__read_8cpp.html',1,'']]], ['message_2ecpp',['Message.cpp',['../Message_8cpp.html',1,'']]], ['message_2eh',['Message.h',['../Message_8h.html',1,'']]], ['messagequeue_2ecpp',['MessageQueue.cpp',['../MessageQueue_8cpp.html',1,'']]], ['messagequeue_2eh',['MessageQueue.h',['../MessageQueue_8h.html',1,'']]] ];
kantega/Flyt-cms
modules/core/src/java/no/kantega/publishing/admin/taglib/PrintContentNavigatorTag.java
<gh_stars>1-10 /* * Copyright 2009 Kantega AS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package no.kantega.publishing.admin.taglib; import no.kantega.commons.util.LocaleLabels; import no.kantega.publishing.admin.AdminRequestParameters; import no.kantega.publishing.admin.util.NavigatorUtil; import no.kantega.publishing.common.Aksess; import no.kantega.publishing.common.data.NavigationMapEntry; import no.kantega.publishing.common.data.SiteMapEntry; import no.kantega.publishing.common.data.enums.ContentType; import javax.servlet.jsp.JspWriter; import java.io.IOException; public class PrintContentNavigatorTag extends PrintNavigatorTag { @Override protected void printBody(NavigationMapEntry item) throws IOException { SiteMapEntry currentItem = (SiteMapEntry)item; JspWriter out = getJspContext().getOut(); StringBuilder href = new StringBuilder(); href.append("/content.ap?"); href.append(AdminRequestParameters.THIS_ID).append("=").append(currentItem.getId()).append("&amp;"); href.append(AdminRequestParameters.CONTENT_ID).append("=").append(currentItem.getContentId()); if (currentItem.isHasChildren()) { String openState = currentItem.isOpen()? "open": "closed"; out.write("<span class=\"openState\"><a href=\"" + href + "\" class=\"" + openState + "\"></a></span>"); } else { out.write("<span class=\"openState\"><span class=\"noChildren\"></span></span>"); } ContentType type = currentItem.getType(); String title = NavigatorUtil.getNavigatorTitle(type, currentItem.getTitle()); String iconText; String iconClass; int visibilityStatus = currentItem.getVisibilityStatus(); if (currentItem.getParentId() == 0) { iconClass = "root"; iconText = title; } else { iconClass = NavigatorUtil.getIcon(type, visibilityStatus, currentItem.getStatus()); iconText = NavigatorUtil.getIconText(type, visibilityStatus, currentItem.getStatus()); } out.write("<span class=\"icon\"><a href=\"" + href + "\" class=\""+iconClass+"\" title=\""+iconText+"\"></a></span>"); boolean isSelected = false; // Mark object (shortcuts will not be marked since you go directly to the object) if (currentItem.getId() == getCurrentId() && type != ContentType.SHORTCUT) { isSelected = true; } String titleClass = NavigatorUtil.getContextMenuType(type, visibilityStatus, currentItem.getStatus()); if (isSelected && highlightCurrent) { titleClass += " selected"; } out.write("<span class=\"title\"><a href=\""+ href +"\" class=\""+ titleClass +"\" title=\"" + title + "\">" + title +"</a></span>"); if (currentItem.getNumberOfNotes() > 0) { String notesTxt = LocaleLabels.getLabel("aksess.navigator.notes", Aksess.getDefaultAdminLocale()); out.write("<span class=\"notes\"><a href=\"" + href + "\" title=\"" + + currentItem.getNumberOfNotes() + " " + notesTxt + "\"></a></span>"); } } }
rpuntaie/c-examples
cpp/language_default_comparisons_8.cpp
<gh_stars>0 /* g++ --std=c++20 -pthread -o ../_build/cpp/language_default_comparisons_8.exe ./cpp/language_default_comparisons_8.cpp && (cd ../_build/cpp/;./language_default_comparisons_8.exe) https://en.cppreference.com/w/cpp/language/default_comparisons */ #include <compare> #include <string> struct Base { std::string zip; auto operator<=>(const Base&) const = default; }; struct TotallyOrdered : Base { std::string tax_id; std::string first_name; std::string last_name; public: // custom operator<=> because we want to compare last names first: std::strong_ordering operator<=>(const TotallyOrdered& that) const { if (auto cmp = (Base&)(*this) <=> (Base&)that; cmp != 0) return cmp; if (auto cmp = last_name <=> that.last_name; cmp != 0) return cmp; if (auto cmp = first_name <=> that.first_name; cmp != 0) return cmp; return tax_id <=> that.tax_id; } // ... non-comparison functions ... }; // compiler generates all four relational operators #include <cassert> #include <set> int main() { TotallyOrdered to1{"a","b","c","d"}, to2{"a","b","d","c"}; std::set<TotallyOrdered> s; // ok s.insert(to1); // ok assert(to2 <= to1); // ok, single call to <=> }
node-dot-cpp/safe-memory
checker/test/instrument/fix_Z2_args.cpp
// RUN: %check_safememory_instrument --fix-only %s %t %p #include <safememory/safe_ptr.h> #include <safe_types.h> using namespace safememory; struct UnsafeType { int call(int i) { return i; } UnsafeType& operator<<(int) { return *this; } }; struct Bad { SafeType StMember; owning_ptr<SafeType> StPtr; owning_ptr<UnsafeType> UPtr; Bad() { StPtr = make_owning<SafeType>(); UPtr = make_owning<UnsafeType>(); } int release() { StPtr.reset(); UPtr.reset(); return 0; } void otherMethod(int i) {} SafeType& getSt() { return *StPtr; } UnsafeType& getU() { return *UPtr; } void verifyZombieArgs() { //both args may be zombie safeFunction(getSt(), getSt()); // CHECK-FIXES: { auto& nodecpp_0 = getSt(); auto& nodecpp_1 = getSt(); safeFunction(nodecpp_0, nodecpp_1); }; // literal can't zombie // nothing to do here safeFunction(getSt(), "hello!"); } };
mehdilaktaf/twitter-clone
client/components/Modal.js
<filename>client/components/Modal.js export default ({ onClose, children }) => ( <div> <div className="overlay" onClick={onClose} /> <div className="modal"> {children} </div> <style jsx>{` .overlay { background-color: rgba(65,65,85,0.58); position: fixed; top: 0; bottom: 0; left: 0; right: 0; z-index: 110; } .modal { position: fixed; left: 50%; top: 50%; transform: translateX(-50%) translateY(-50%); width: 538px; background-color: white; box-shadow: 0 1px 27px 0 rgba(0,0,0,0.19); border-radius: 3px; padding: 14px; z-index: 120; } .modal :global(h3) { text-align: center; margin: 10px 0; } `}</style> </div> )
gwr3n/jsdp
jsdp/src/main/java/jsdp/app/inventory/capital/CF_StateSpace.java
package jsdp.app.inventory.capital; import java.util.ArrayList; import java.util.Iterator; import java.util.function.Function; import jsdp.sdp.Action; import jsdp.sdp.HashType; import jsdp.sdp.State; import jsdp.sdp.StateSpace; public class CF_StateSpace extends StateSpace<CF_StateDescriptor> { public CF_StateSpace(int period, Function<State, ArrayList<Action>> buildActionList, HashType hashType, int stateSpaceSizeLowerBound, float loadFactor){ super(period, hashType, stateSpaceSizeLowerBound, loadFactor); CF_StateSpace.buildActionList = buildActionList; } public CF_StateSpace(int period, Function<State, ArrayList<Action>> buildActionList, HashType hashType){ super(period, hashType); CF_StateSpace.buildActionList = buildActionList; } public boolean exists(CF_StateDescriptor descriptor){ return states.get(descriptor) != null; } public State getState(CF_StateDescriptor descriptor){ State value = states.get(descriptor); if(value == null){ State state = new CF_State(descriptor); this.states.put(descriptor, state); return state; }else return (CF_State) value; } public Iterator<State> iterator() { throw new NullPointerException("Method not implemented"); } }
LarmIg/Algoritmos-Python
CAPITULO 7/Exercicio C.py
<reponame>LarmIg/Algoritmos-Python<gh_stars>1-10 # Elaborar um programa que leia oito elementos numéricos inteiros em uma matriz A de uma dimensão do tipo vetor. Construir uma matriz B de mesma dimensão e tipo com os elementos da matriz A multiplicados por 5. Montar uma rotina de pesquisa binária, para pesquisar os elementos armazenados na matriz B. A = [] B = [] for i in range(0, 8): A.append(int(input('Informe {}° valor de A: '.format(i + 1)))) for i in range(0, 8): B.append(A[i] * 5) # Pesquisa Binaria for i in range(0, 7): for j in range(0, 8): if(B[i] > B[j]): X = B[i] B[i] = B[j] B[j] = X R = "SIM" while(R == "SIM"): Pesq = input('Pesquisa: ') Com = 1 Fin = 8 Ach = False while(Com <= Fin) and (Ach == False): Mei = (Con + Fin) / 2 if(Pesq == B[Mei]): Ach = True else: if(Pesq < B[Mei]): Fin = Mei - 1 else: Fin = Mei + 1 if(Ach == True): print('{} foi localizado na posição {}'.format(Pesq, Mei)) else: print('{} não foi localizado'.format(Pesq)) R = input('R: ')a# Construir um programa que leia 15 elementos numéricos inteiros em uma matriz A de uma dimensão do tipo vetor. Construir uma matriz B de mesmo tipo e dimensão, em que cada elemento seja o fatorial do elemento correspondente armazenado na matriz A. Apresentar os elementos da matriz B ordenados de forma crescente. A = [] B = [] for i in range(0, 15): A.append(int(input('Informe o {}° Valor da Matriz A: '.format(i + 1)))) for i in range(0, 15): B.append(1) for j in range(0, A[i]): B[j] = B[j] * j B.append(A[i]) for i in range(0, 15): print('B[{}] = {}'.format(i + 1, B[i]))
quanticc/sentry
src/main/java/top/quantic/sentry/service/FlowService.java
package top.quantic.sentry.service; import org.reflections.Reflections; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.context.event.EventListener; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Service; import sx.blah.discord.api.internal.json.objects.EmbedObject; import top.quantic.sentry.config.Constants; import top.quantic.sentry.domain.Flow; import top.quantic.sentry.event.ContentSupplier; import top.quantic.sentry.event.SentryEvent; import top.quantic.sentry.event.SentryReadyEvent; import top.quantic.sentry.repository.FlowRepository; import top.quantic.sentry.service.dto.FlowDTO; import top.quantic.sentry.service.mapper.FlowMapper; import top.quantic.sentry.service.util.TaskException; import top.quantic.sentry.web.rest.vm.DatadogDowntime; import top.quantic.sentry.web.rest.vm.DatadogEvent; import top.quantic.sentry.web.rest.vm.DatadogPayload; import top.quantic.sentry.web.rest.vm.DiscordWebhook; import java.util.*; import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.isBlank; /** * Service Implementation for managing Flow. */ @Service public class FlowService implements InitializingBean { private static final Logger log = LoggerFactory.getLogger(FlowService.class); private static final String SENTRY_EVENT = "sentryEvent"; private static final String INBOUND_WEBHOOK = "inboundWebhook"; private final FlowRepository flowRepository; private final FlowMapper flowMapper; private final SubscriberService subscriberService; private final Set<Class<? extends SentryEvent>> eventTypeSet; @Autowired public FlowService(FlowRepository flowRepository, FlowMapper flowMapper, SubscriberService subscriberService) { this.flowRepository = flowRepository; this.flowMapper = flowMapper; this.subscriberService = subscriberService; this.eventTypeSet = new Reflections(Constants.EVENTS_PACKAGE).getSubTypesOf(SentryEvent.class); } @Override public void afterPropertiesSet() throws Exception { log.info("Event types available: {}", eventTypeSet.stream() .map(Class::getSimpleName) .collect(Collectors.joining(", "))); } @EventListener public SentryReadyEvent onApplicationReady(ApplicationReadyEvent event) { return new SentryReadyEvent(event); } @EventListener public void onSentryEvent(SentryEvent event) { String className = event.getClass().getSimpleName(); String content = event.asContent(new LinkedHashMap<>()); log.debug("[{}] {}", className, content != null ? content : ""); flowRepository.findByEnabledIsTrueAndInputAndMessage(SENTRY_EVENT, className) .forEach(flow -> executeEventFlow(flow, event)); } public void executeDatadogFlowsByKey(String key, DatadogPayload event) { flowRepository.findByEnabledIsTrueAndInput(INBOUND_WEBHOOK).stream() .filter(flow -> key.equals(flow.getVariables().get("key"))) .forEach(flow -> executeEventFlow(flow, event)); } private void executeEventFlow(Flow flow, ContentSupplier supplier) { log.info("Executing {} flow: {}", supplier.getClass().getSimpleName(), flow); String translatorType = flow.getTranslator(); switch (translatorType) { case "DiscordWebhook": publish(flow, supplier.getContentId(), asDiscordWebhook(flow, supplier)); break; case "DiscordMessage": publish(flow, supplier.getContentId(), supplier.asContent(flow.getVariables())); break; case "DiscordEmbed": publish(flow, supplier.getContentId(), asDiscordEmbed(flow, supplier)); break; case "DiscordMessageEmbed": publish(flow, supplier.getContentId(), supplier.asContent(flow.getVariables()), asDiscordEmbed(flow, supplier)); break; case "DatadogEvent": publish(flow, supplier.getContentId(), asDatadogEvent(flow, supplier)); break; case "DatadogDowntime": publish(flow, supplier.getContentId(), asDatadogDowntime(flow, supplier)); break; default: log.warn("Unknown translator type for this flow: {}", translatorType); break; } } private EmbedObject asDiscordEmbed(Flow flow, ContentSupplier supplier) { return supplier.asEmbed(flow.getVariables()); } private DiscordWebhook asDiscordWebhook(Flow flow, ContentSupplier supplier) { Map<String, Object> variables = flow.getVariables(); String content = supplier.asContent(variables); DiscordWebhook webhook = new DiscordWebhook(); Object username = variables.get("username"); Object avatarUrl = variables.get("avatarUrl"); if (username != null) { webhook.setUsername((String) username); } if (avatarUrl != null) { webhook.setAvatarUrl((String) avatarUrl); } webhook.setContent(content); // TODO: webhook with embeds return webhook; } @SuppressWarnings("unchecked") private DatadogEvent asDatadogEvent(Flow flow, ContentSupplier supplier) { Map<String, Object> variables = flow.getVariables(); Map<String, Object> map = supplier.asMap(variables); String title = (String) getFromMap("title", variables, map); String text = (String) getFromMap("text", variables, map); String alertType = (String) getFromMap("alert_type", variables, map); String aggregationKey = (String) getFromMap("aggregation_key", variables, map); String priority = (String) getFromMap("priority", variables, map); String host = (String) getFromMap("host", variables, map); String sourceTypeName = (String) getFromMap("source_type_name", variables, map); Long dateHappened = (Long) getFromMap("date_happened", variables, map); // decorate with markdown markers if needed Object markdown = getFromMap("markdown", variables, map); if (markdown != null && (boolean) markdown) { text = "%%%\n" + text + "\n%%%"; } // combine tags List<String> tags = (List<String>) variables.getOrDefault("tags", new ArrayList<String>()); tags.addAll((List<String>) map.getOrDefault("tags", new ArrayList<String>())); return new DatadogEvent(title, text, dateHappened, priority, host, tags, alertType, aggregationKey, sourceTypeName); } private DatadogDowntime asDatadogDowntime(Flow flow, ContentSupplier supplier) { Map<String, Object> variables = flow.getVariables(); Map<String, Object> map = supplier.asMap(variables); String scope = (String) getFromMap("scope", variables, map); String message = (String) getFromMap("message", variables, map); Long end = (Long) getFromMap("end", variables, map); return new DatadogDowntime(scope, null, end, message, null); } private Object getFromMap(String key, Map<String, Object> first, Map<String, Object> second) { return first.getOrDefault(key, second.get(key)); } private void publish(Flow flow, String id, String content) { if (isBlank(content)) { log.info("[{}] Not publishing blank content to {}", flow.getName(), id); } else { subscriberService.publish(flow.getOutput(), id, content); } } private void publish(Flow flow, String id, DiscordWebhook webhook) { if (isBlank(webhook.getContent())) { log.info("[{}] Not publishing blank webhook to {}", flow.getName(), id); } else { subscriberService.publish(flow.getOutput(), id, webhook); } } private void publish(Flow flow, String id, DatadogEvent event) { if (isBlank(event.getText()) || isBlank(event.getTitle())) { log.info("[{}] Missing title and text - Not publishing event to {}", flow.getName(), id); } else { subscriberService.publish(flow.getOutput(), id, event); } } private void publish(Flow flow, String id, DatadogDowntime event) { if (isBlank(event.getScope())) { log.info("[{}] Missing scope - Not publishing event to {}", flow.getName(), id); } else { subscriberService.publish(flow.getOutput(), id, event); } } private void publish(Flow flow, String id, EmbedObject embed) { if (embed == null) { log.info("[{}] Not publishing null embed to {}", flow.getName(), id); } else { subscriberService.publish(flow.getOutput(), id, embed); } } private void publish(Flow flow, String id, String content, EmbedObject embed) { if (embed == null && isBlank(content)) { log.info("[{}] Not publishing null and/or blank content/embed to {}", flow.getName(), id); } else { subscriberService.publish(flow.getOutput(), id, content, embed); } } /** * Save a flow. * * @param flowDTO the entity to save * @return the persisted entity */ public FlowDTO save(FlowDTO flowDTO) throws TaskException { log.debug("Request to save Flow : {}", flowDTO); Flow flow = flowMapper.flowDTOToFlow(flowDTO); if (flowDTO.getInput().equals(SENTRY_EVENT) && eventTypeSet.stream() .map(Class::getSimpleName) .noneMatch(name -> name.equals(flowDTO.getMessage()))) { throw new TaskException("Invalid event type: " + flow.getMessage()); } flow = flowRepository.save(flow); FlowDTO result = flowMapper.flowToFlowDTO(flow); return result; } /** * Get all the flows. * * @param pageable the pagination information * @return the list of entities */ public Page<FlowDTO> findAll(Pageable pageable) { log.debug("Request to get all Flows"); Page<Flow> result = flowRepository.findAll(pageable); return result.map(flow -> flowMapper.flowToFlowDTO(flow)); } /** * Get one flow by id. * * @param id the id of the entity * @return the entity */ public FlowDTO findOne(String id) { log.debug("Request to get Flow : {}", id); Flow flow = flowRepository.findOne(id); FlowDTO flowDTO = flowMapper.flowToFlowDTO(flow); return flowDTO; } /** * Delete the flow by id. * * @param id the id of the entity */ public void delete(String id) { log.debug("Request to delete Flow : {}", id); flowRepository.delete(id); } }
phpyandong/opentaobao
model/bus/TaobaoBusBusnumberGetResultSet.go
<reponame>phpyandong/opentaobao package bus // TaobaoBusBusnumberGetResultSet type TaobaoBusBusnumberGetResultSet struct { // errCode BUSNUMBER_SEARCH_NOBUS 找不到班次 POWER_D权限问题 ErrCode string `json:"err_code,omitempty" xml:"err_code,omitempty"` // errMsg ErrMsg string `json:"err_msg,omitempty" xml:"err_msg,omitempty"` // module Module string `json:"module,omitempty" xml:"module,omitempty"` // success Success bool `json:"success,omitempty" xml:"success,omitempty"` }
kmestry/PROBLEM_SOLVING_HACKERRANK_LEETCODE
src/com/geeksforgeeks/CountDigitsMaths.java
package com.geeksforgeeks; public class CountDigitsMaths { public static void main(String[] args) { int ans = new CountDigitsMaths().countDigitsLogarithmic(-2); System.out.println("ans = " + ans); int ans1 = new CountDigitsMaths().countDigits(-2121212187); System.out.println("ans = " + ans1); int ans2 = new CountDigitsMaths().countDigitsApproach2(2121212187); System.out.println("ans = " + ans2); } private int countDigits(int number) { if (number == 0) { return 0; } return 1 + countDigits(number / 10); } private int countDigitsApproach2(int num) { int count = 0; while (num > 0) { num /= 10; count++; } return count; } //works for number greater than 0; private int countDigitsLogarithmic(int number) { if (number == 0) return 0; return (int) Math.floor(Math.log10(number) + 1); } }
MikeFalowski/taurus
lib/taurus/test/__init__.py
<reponame>MikeFalowski/taurus #!/usr/bin/env python ############################################################################# ## # This file is part of Taurus ## # http://taurus-scada.org ## # Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain ## # Taurus is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. ## # Taurus is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. ## # You should have received a copy of the GNU Lesser General Public License # along with Taurus. If not, see <http://www.gnu.org/licenses/>. ## ############################################################################# """ Taurus provides a framework for testing. This framework intends to facilitate evaluation, bug finding and integration of contributed code/patches, as well as to promote test driven development in Taurus. The first implementation of this Framework is an outcome of the [Sardana Enhancement Proposal 5 (SEP5)](http://sourceforge.net/p/sardana/wiki/SEP5/) Ideally, bug reports should be accompanied by a test revealing the bug, whenever possible. The first tests implemented are focused on Unit Tests, but the same framework should be used for integration and system tests as well. The taurus.test.testsuite module provides an autodiscovered suite for all tests implemented in Taurus. The following are some key points to keep in mind when using this framework: - The Taurus test framework is based on :mod:`unittest` which should be imported from :mod:`taurus.external` in order to be compatible with all versions of python supported by Taurus. - all test-related code is contained in submodules named `test` which appear in any module of taurus. - test-related code falls in one of these three categories: - actual test code (classes that derive from unittest.TestCase) - utility classes/functions (code to simplify development of test code) - resources (accessory files required by some test). They are located in subdirectories named `res` For a more complete description of the conventions on how to write tests with the Taurus testing framework, please refer to the [SEP5](http://sourceforge.net/p/sardana/wiki/SEP5/). """ from .moduleexplorer import ModuleExplorer from .resource import getResourcePath from .base import insertTest from .fuzzytest import calculateTestFuzziness, loopSubprocess, loopTest
binghuang2018/aqml
io2/orca.py
#!/usr/bin/env python import re, os, sys import aqml.io2 as io2 import aqml.cheminfo as co import aqml.cheminfo.core as cc import numpy as np T, F = True, False uc = io2.Units() cardinal = {'vdz':2, 'vtz':3, 'vqz':4} bsts = ['aug-cc-pvdz', 'aug-cc-pvtz', 'aug-cc-pvqz', \ 'cc-pvdz', 'cc-pvtz', 'cc-pvqz', \ 'def2-sv(p)', 'def2-svp', 'def2-tzvp', 'def2-qzvp'] bsts_short = ['avdz', 'avtz', 'avqz', \ 'vdz', 'vtz','vqz', \ 'def2sv-p', 'def2svp', 'def2tzvp', 'def2qzvp'] dctbo = dict(zip(bsts, bsts_short)) dctbi = dict(zip(bsts_short, bsts)) hs_short = [ 'mp2', 'lmp2', 'lcc', 'lcc2', 'cc', 'cc2' ] # Hamitonian hs = [ 'RI-MP2', 'DLPNO-MP2', 'DLPNO-CCSD', 'CCSD', 'CCSD(T)' ] dcthi = dict(zip(hs_short, hs)) dctho = dict(zip(hs, hs_short)) cmdout = lambda cmd: os.popen(cmd).read().strip() cmdout1 = lambda cmd: os.popen(cmd).read().strip().split('\n') sgeom = lambda m: '\n'.join(['{:2s} {:12.6f} {:12.6f} {:12.6f}'.format(_,x,y,z) for _,(x,y,z) in zip(m.symbols,m.coords) ]) sgeom_cp = lambda m: '\n'.join(['{:2s} : {:12.6f} {:12.6f} {:12.6f}'.format(_,x,y,z) for _,(x,y,z) in zip(m.symbols,m.coords) ]) class orca(cc.molecule): def __int__(self, f, param): cc.molecule.__init__(self, f) label = f[:-4] self.lb = label assert f[-3:] in ['sdf', 'xyz', 'mol', 'pdb'] # task: energy, opt, copt, gdiis-copt, ... param = {'nproc':1, 'mem': 1000, 'method':'b3lyp', 'basis':'Def2-TZVP', 'tightscf':T, \ 'charge':None, 'mult':None, 'df':T, 'task':'opt', 'disp':F, 'wc':F, 'maxit':60, \ 'hess':0, 'gc':'default'} self.param = param def write_input(self): """ write orca input file """ param = self.param nproc = param['nproc'] mem = param['mem'] # memory per core bst = param['basis'] task = param['task'] _charge = param['charge'] charge = _charge if _charge else 0 _mult = param['mult'] sc = "%pal nprocs %d end\n%maxcore %d\n"%(nproc, mem) meth = param['meth'] if meth in ['hf-3c', 'pbeh-3c', 'hf3c', 'pbeh3c']: if meth[-3] != '-': meth = meth[:-2]+'-3c' sm = '! %s nopop\n'%meth sb = '' elif meth in ['pbe','tpss','bp86','b3lyp','wb97x']: sm = '! %s TIGHTSCF nopop'%meth # if grid: sm += ' Grid5 FinalGrid6' sd3 = ' D3BJ\n' if disp else '\n' sm += sd3 xtra = '' if meth in ['b3lyp','wb97x']: xtra = ' RIJCOSX' # for hybrid df if bst in ['sv(p)', 'svp', 'tzvp',]: sb = '! def2-%s def2/J%s\n'%(bst,xtra) elif bst in ['vdz','vtz',]: sb = '! cc-p%s def2/J%s\n'%(bst,xtra) else: raise Exception('Todo') elif meth in hs_short: if 'cbs' in bst: # ['ano-cbs-ep2','ano-cbs-ep3','cc-cbs-ep2','cc-cbs-ep3', 'cbs2','cbs3']: df = 'RI RIJCOSX ' if idf else 'Conv ' mb = {'lccsd(t)': 'DLPNO-CCSD(T)', 'mp2':'MP2', 'ccsd(t)':'CCSD(T)'}[meth] #dctbb = {'ano-cbs':'ano', 'cc-cbs':'cc', 'cbs':'cc'} ssb = bst.split('-') nc = len(ssb) if bst == 'cbs3': sm = '! RHF ExtrapolateEP3(CC) TightSCF Conv' elif bst == 'cbs2': bb = 'cc' sm = '! %sRHF ExtrapolateEP2(2/3,%s,%s) TightSCF'%(df,bb,mb) elif bst == 'cbs': sm = '! %s%s Extrapolate(2/3) TightSCF'%(df,mb) else: raise Exception('Todo') sm += ' nopop\n' sb = '' else: sm = '! ' + dcthi[meth] + ' TightSCF\n' if bst in ['tzvp',]: sb = '! def2-TZVP def2/J def2-TZVP/C RIJCOSX\n' elif bst in ['vdz', 'avdz', 'vtz','avtz']: bn = {'vdz':'cc-pVDZ', 'avdz':'aug-cc-pVDZ', 'vtz':'cc-pVTZ', 'avtz':'aug-cc-pVTZ'}[bst] sb = '! %s %s/C def2/J RIJCOSX\n'%(bn,bn) # #sb = '! %s %s/C def2/J\n'%(bn,bn) # else: raise Exception('Todo') else: raise Exception('Todo') dctj = {'e':'', 'optg':'! Opt\n', 'tightopt':'! TightOpt\n', 'force': '! ENGRAD\n'} assert task in dctj, '#ERROR: task not supported!' st = dctj[task] if task in ['optg',]: st += '\n%%geom\nmaxiter %s\n'%param['maxit'] nhess = param['hess'] if nhess: st += 'calc_hess true\nrecalc_hess %s\n'%nhess # calc Hess after `nhess ionic cycles if param['gc'] in ['tight']: st += 'TolE 1e-6\nTolRMSG 2e-4\nTolMaxG 3e-4\nTolRMSD 2e-4\nTolMaxD 3e-4\n' elif param['gc'] in ['loose']: st += 'TolE 1e-4\nTolRMSG 3e-4\nTolMaxG 4.5e-4\nTolRMSD 2e-2\nTolMaxD 3e-2\n' st += 'end' # NormalOpt (default) TolE=5e-6, TolRMSG=1e-4, TolMaxG=3e-4, TolRMSD=2e-3, TolMaxD=4e-3 # TIGHTOPT TolE=1e-6, TolRMSG=3e-5, TolMaxG=1e-4, TolRMSD=6e-4, TolMaxD=1e-3 # GAU (G09 default) 3e-4 4.5e-4 1.2e-3 1.8e-3 # GAU_LOOSE 1.7e-3 2.5e-3 6.7e-3 1.0e-2 if _mult: mult = _mult else: if self.na == 1: mult = {1:2, 3:2, 4:1, 5:2, 6:3, 7:4, 8:3, 9:2, \ 11:2, 12:0, 13:2, 14:3, 15:4, 16:3, 17:2,\ 33:4, 34:3, 35:2, 53:2}[self.zs[0]] else: mult = np.mod(np.sum(self.zs),2)+1 if icp: # calculate CP-corrected energy rawm = cmc.RawMol( self ) mols = rawm.monomers n = len(mols) assert task in ['e',] if n == 1: so = sc + sm + sb + st + '\n' s = so + '* xyzfile 0 %d %s.xyz'%(mult,self.lb) with open(self.lb+'.com', 'w') as fid: fid.write(s) elif n == 2: _sm = sm.strip() + ' PModel\n' so = sc + _sm + sb + st + '\n' _header = '$new_job\n' # monomers s1 = '' # monomer at dimer basis scp = '' # '%id "monomer_2"\n' mult = np.mod(np.sum(mols[0].zs),2)+1 s1_2 = so + scp + '*xyz 0 %d\n'%mult + sgeom(mols[0]) + '\n' + sgeom_cp(mols[1]) + '\n*\n\n\n' mult = np.mod(np.sum(mols[1].zs),2)+1 s2_2 = _header + so + scp + '*xyz 0 %d\n'%mult + sgeom_cp(mols[0]) + '\n' + sgeom(mols[1]) + '\n*\n\n\n' # dimer s = s1_2 + s2_2 with open(self.lb+'_cp.com','w') as fid: fid.write(s) else: raise Exception('Todo') else: so = sc + sm + sb + st + '\n\n' if self.param['wc']: # write coord to orca input file scoord = '*xyz %d %d\n'%(chg,mult) scoord += ''.join( ['{si} {ci[0]} {ci[1]} {ci[2]}\n'.format(si=mol.symbols[ia], ci=mol.coords[ia]) for ia in range(mol.na) ] ) scoord += '*\n' else: scoord = '* xyzfile %d %d %s.xyz\n'%(chg,mult,self.lb) s = so + scoord + '\n' with open(self.lb+'.com', 'w') as fid: fid.write(s) def run(self): assert 'orca4' in os.environ, '#ERROR: please specify env var "orca4"!' cmd = '$orca4 {lb}.com >{lb}.out'.format(lb=self.lb) iok = os.system(cmd) if not iok: sys.exit('Job failed') orcajob.__init__(self, lb) class orcajob(object): def __init__(self, lb): if lb[-4] == '.': lb = lb[:-4] self.f = lb + '.out' assert os.path.exists(self.f) self.label = lb @property def icbs(self): if not hasattr(self, '_icbs'): self._icbs = self.get_icbs() return self._icbs def get_icbs(self): key = 'Extrapolate' icbs = F if cmdout('grep "%s" %s'%(key, self.f)) != '': icbs = T return icbs @property def atoms(self): if not hasattr(self, '_atoms'): self._atoms = self.get_atoms() return self._atoms def get_atoms(self): cmd = "grep 'basis set group' %s | tail -1 | awk '{print $2}' | grep -o '[0-9]*'"%self.f na = int( cmdout(cmd) ) + 1 # atom idx in orca starts from 0 cmd = "grep -n 'CARTESIAN COORDINATES (ANGSTROEM)' %s | tail -1 | sed 's/:/ /g' | awk '{print $1}'"%self.f ln1 = 2 + int(cmdout(cmd)) ln2 = ln1+na-1 cmd = 'sed -n "%s,%sp" %s'%(ln1,ln2,self.f) zs = []; coords = [] for li in cmdout1(cmd): tsi = li.strip().split()[:4] zs.append( co.chemical_symbols.index(tsi[0]) ) coords.append( [ eval(vi) for vi in tsi[1:4] ] ) return co.atoms(zs,coords) @property def meth(self): if not hasattr(self, '_meth'): self._meth = self.get_meth() return self._meth def get_meth(self): _meth = None idft = F ifd = F for meth in ['b3lyp', 'wb97x', 'tpss', 'pbe0', 'pbe']: if cmdout('grep -i %s %s'%(meth,self.f)): _meth = meth idft = T ifd = T break #if not ifd: meths_i = ['dlpno-mp2', 'ri-mp2', 'mp2'] meths_o = ['lmp2', 'mp2', 'mp2' ] dct = dict(zip(meths_i, meths_o)) for meth in hs: ot = cmdout('grep -i "%s" %s'%(meth, self.f)) if ot and ('!' in ot): _meth = dct[meth] ifd = T break #if nof ifd: meths_i = ['dlpno-ccsd', 'dlpno-ccsd(t)', 'ccsd', 'ccsd(t)'] meths_o = ['lcc', 'lcc2', 'cc', 'cc2' ] dct = dict(zip(meths_i, meths_o)) for meth in meths_i: ot = cmdout('grep -i "%s" %s'%(meth, self.f)) if ot and ('!' in ot): _meth = dct[meth] ifd = T break assert _meth # dispersion interaction? if idft: ot = cmdout('grep D3BJ %s'%self.f) if ot and ('!' in ot): _meth += 'd3' return _meth @property def basis(self): if not hasattr(self, '_basis'): self._basis = self.get_basis() return self._basis def get_basis(self): _bst = None for b in bsts: if cmdout('grep -i %s %s'%(b,self.f)): _bst = dctbo[b] break assert _bst, 'Plz add more reference basis' return _bst @property def method(self): if not hasattr(self, '_method'): self._method = self.meth + self.basis return self._method @property def e(self): if not hasattr(self, '_e'): self._e = self.get_energy() return self._e def get_energy(self): if not self.icbs: # Extrapolate tp CBS cmd = "grep 'FINAL SINGLE POINT' %s | tail -n 1 | awk '{print $NF}'"%self.f #print(cmd) e = eval( cmdout(cmd) ) # in hartree es = {self.method: e} #, 'e':e} else: cmd = "grep 'SCF energy with basis' %s | awk '{print $5}'"%f bsts = [ bdct[si[:-1].lower()] for si in cmdout1(cmd) ]; #print(bsts) scfmeths = [ 'hf'+si for si in bsts ] n1, n2 = [ cardinal[bst] for bst in bsts ] scfcbsmeth = 'hfcbsv%d%dz'%(n1,n2) cmd = "grep 'SCF energy with basis' %s | awk '{print $NF}'"%f t = cmdout(cmd); #print('t=',t) es_hf = [ eval(ei) for ei in t.split('\n') ]; #print(es_hf) dct = dict(zip(scfmeths, es_hf)) cmd = "grep '^MP2 energy with basis' %s | awk '{print $6}'"%f t = cmdout(cmd) # print('t=',t) imp2 = F if t: imp2 = T es_corr = t.split('\n'); #print(es_corr) smeths = [ 'mp2'+si for si in bsts ] dct.update( dict(zip(smeths, [eval(ei)+es_hf[i] for i,ei in enumerate(es_corr)])) ) cbsmeth = None icc2 = F cmd = "grep '^MDCI energy with basis ' %s"%f t = cmdout(cmd) if t: icc2 = T es_cc2 = [] ts = t.split('\n') for i,ti in enumerate(ts): #print('ti=',ti) es_cc2.append( es_hf[i]+eval(re.findall('\-?[0-9][0-9]*\.[0-9][0-9]*', ti)[0]) ) smeths = ['cc2'+si for si in bsts ] dct.update( dict(zip(smeths, es_cc2)) ) cbsmeth = 'cc2cbsv%d%dz'%(n1,n2) icmp2 = F # corr from mp2? cmd = "grep 'CCSD(T) correlation energy with basis' %s | awk '{print $NF}'"%f t = cmdout(cmd) if t: icmp2 = T e_cc2_corr = eval(t.strip()) bst1 = bsts[0] dct.update( {'cc2%s'%bst1: es_hf[0]+e_cc2_corr} ) cbsmeth = 'cc2cbsmp2v%d%dz'%(n1,n2) assert cbsmeth, '#ERROR: `cbsmeth is None?' cmd = "grep ' CBS [Sct]' %s"%f #cmd = "grep 'Estimated CBS total energy' %s | awk '{print $NF}'"%f t = cmdout(cmd); #print('t=',t) e1, e1c, e2 = [ eval(re.findall('\-?[0-9][0-9]*\.[0-9][0-9]*', ei)[0]) for ei in t.split('\n') ] escbs = [e1, e2]; #print(escbs) meths = [scfcbsmeth, cbsmeth] # 'cc2cbs34'] dct.update( dict(zip(meths, escbs)) ) sdic = "'%s':{"%self.label, for k in dct: sdic += "'%s':%.8f, "%(k,dct[k]) sdic += '}' print( sdic, ',') es = dct return es @property def grad(self): if not hasattr(self, '_grad'): self._grad = self.get_grad() return self._grad def get_grad(self): """Read Forces from ORCA output file.""" file = open('%s.engrad'%self.lb, 'r') lines = file.readlines() file.close() igrad = F for i, line in enumerate(lines): if line.find('# The current gradient') >= 0: gradients = []; igrad = T; continue if igrad and "#" not in line: grad=line.split()[-1] tempgrad.append(float(grad)) if len(tempgrad)==3: gradients.append(tempgrad) tempgrad=[] if '# The at' in line: getgrad="no" return -np.array(gradients) * Hartree / Bohr def write(self, fo): e = self.e atoms = self.atoms atoms.props.update( es ) atoms.write(fo) if __name__ == "__main__": import ase, sys fs = sys.argv[1:] for f in fs: fmt = f[-3:] if fmt in ['xyz']: obj = orca(f, param) obj.write_input() obj.run() elif fmt in ['out']: obj = orcajob(f) obj.write(f[:-4]+'.xyz') else: raise Exception('file format not supported')
cryptable/ejbca-rootca
ejbca_ce_6_15_3_0/modules/cesecore-common/src/org/cesecore/certificates/ca/ApprovalRequestType.java
<filename>ejbca_ce_6_15_3_0/modules/cesecore-common/src/org/cesecore/certificates/ca/ApprovalRequestType.java /************************************************************************* * * * EJBCA: The OpenSource Certificate Authority * * * * This software is free software; you can redistribute it and/or * * modify it under the terms of the GNU Lesser General Public * * License as published by the Free Software Foundation; either * * version 2.1 of the License, or any later version. * * * * See terms of license at gnu.org. * * * *************************************************************************/ package org.cesecore.certificates.ca; import java.util.HashMap; import java.util.Map; /** * Represents a type of approval request. * * @version $Id: ApprovalRequestType.java 29813 2018-09-05 15:01:59Z bastianf $ * */ public enum ApprovalRequestType { ADDEDITENDENTITY(1, "APPROVEADDEDITENDENTITY"), KEYRECOVER(2, "APPROVEKEYRECOVER"), REVOCATION(3, "APPROVEREVOCATION"), ACTIVATECA(4, "APPROVEACTIVATECA"); private final int integerValue; private final String languageString; private static final Map<Integer, ApprovalRequestType> reverseLookupMap = new HashMap<>(); static { for(ApprovalRequestType approvalRequestType : ApprovalRequestType.values()) { reverseLookupMap.put(approvalRequestType.getIntegerValue(), approvalRequestType); } } private ApprovalRequestType(int integerValue, String languageString) { this.integerValue = integerValue; this.languageString = languageString; } public int getIntegerValue() { return integerValue; } public String getLanguageString() { return languageString; } public static ApprovalRequestType getFromIntegerValue(int integerValue) { return reverseLookupMap.get(integerValue); } }
raamatkeijo-test/rest-whois
app/models/domain.rb
class Domain include ActiveModel::Model STATUS_BLOCKED = 'Blocked'.freeze STATUS_RESERVED = 'Reserved'.freeze STATUS_DISCARDED = 'deleteCandidate'.freeze STATUS_AT_AUCTION = 'AtAuction'.freeze STATUS_PENDING_REGISTRATION = 'PendingRegistration'.freeze STATUS_DISPUTED = 'Disputed'.freeze INACTIVE_STATUSES = [STATUS_BLOCKED, STATUS_DISCARDED, STATUS_AT_AUCTION, STATUS_PENDING_REGISTRATION].freeze private_constant :INACTIVE_STATUSES attr_accessor :name attr_accessor :statuses attr_accessor :registered attr_accessor :changed attr_accessor :expire attr_accessor :outzone attr_accessor :delete attr_accessor :registration_deadline def active? return false if registered.blank? (statuses & INACTIVE_STATUSES).empty? end end
samya-ak/sync-it
client/src/routes/Test.js
<reponame>samya-ak/sync-it const Test = () => { return <div>Test page</div>; }; export default Test;
SurveyTools/FlightLogger
src/com/vulcan/flightlogger/util/SystemUtils.java
package com.vulcan.flightlogger.util; import android.content.ContextWrapper; import android.content.pm.PackageInfo; import android.content.pm.PackageManager.NameNotFoundException; import android.util.Log; public class SystemUtils { public static String getVersionString(ContextWrapper contextWrapper) { try { PackageInfo pInfo = contextWrapper.getPackageManager().getPackageInfo(contextWrapper.getPackageName(), 0); return (pInfo == null) ? null : pInfo.versionName; } catch (NameNotFoundException e) { Log.e("SystemUtils", e.getLocalizedMessage()); } // default return null; } }
Golangltd/www.Xshooting.com.vender
vendor/src/github.com/LollipopGo/lollipopgo/network/tcp_server.go
<gh_stars>10-100 package network import ( "FenDZ/glog-master" "net" "sync" "time" "github.com/LollipopGo/lollipopgo/log" ) type TCPServer struct { Addr string MaxConnNum int PendingWriteNum int NewAgent func(*TCPConn) Agent ln net.Listener conns ConnSet mutexConns sync.Mutex wgLn sync.WaitGroup wgConns sync.WaitGroup // msg parser LenMsgLen int MinMsgLen uint32 MaxMsgLen uint32 LittleEndian bool msgParser *MsgParser } func (server *TCPServer) Start() { glog.Info("Entry network Start") server.init() go server.run() } func (server *TCPServer) init() { glog.Info("Entry network Start", server.Addr) ln, err := net.Listen("tcp", server.Addr) if err != nil { log.Fatal("%v", err) } if server.MaxConnNum <= 0 { server.MaxConnNum = 100 log.Release("invalid MaxConnNum, reset to %v", server.MaxConnNum) } if server.PendingWriteNum <= 0 { server.PendingWriteNum = 100 log.Release("invalid PendingWriteNum, reset to %v", server.PendingWriteNum) } if server.NewAgent == nil { log.Fatal("NewAgent must not be nil") } server.ln = ln server.conns = make(ConnSet) // msg parser msgParser := NewMsgParser() msgParser.SetMsgLen(server.LenMsgLen, server.MinMsgLen, server.MaxMsgLen) msgParser.SetByteOrder(server.LittleEndian) server.msgParser = msgParser } func (server *TCPServer) run() { server.wgLn.Add(1) defer server.wgLn.Done() var tempDelay time.Duration for { conn, err := server.ln.Accept() if err != nil { if ne, ok := err.(net.Error); ok && ne.Temporary() { if tempDelay == 0 { tempDelay = 5 * time.Millisecond } else { tempDelay *= 2 } if max := 1 * time.Second; tempDelay > max { tempDelay = max } log.Release("accept error: %v; retrying in %v", err, tempDelay) time.Sleep(tempDelay) continue } return } tempDelay = 0 server.mutexConns.Lock() if len(server.conns) >= server.MaxConnNum { server.mutexConns.Unlock() conn.Close() log.Debug("too many connections") continue } server.conns[conn] = struct{}{} server.mutexConns.Unlock() server.wgConns.Add(1) tcpConn := newTCPConn(conn, server.PendingWriteNum, server.msgParser) agent := server.NewAgent(tcpConn) go func() { agent.Run() // 回到我们的agent的方法里面 // cleanup tcpConn.Close() server.mutexConns.Lock() delete(server.conns, conn) server.mutexConns.Unlock() agent.OnClose() server.wgConns.Done() }() } } func (server *TCPServer) Close() { server.ln.Close() server.wgLn.Wait() server.mutexConns.Lock() for conn := range server.conns { conn.Close() } server.conns = nil server.mutexConns.Unlock() server.wgConns.Wait() }
brightspace-bot/activities
scripts/remove_attest_from_project.js
import { readFileSync, writeFileSync } from 'fs'; //read in the package file const file_contents = JSON.parse(readFileSync('package.json')); if ('dependencies' in file_contents) { if ('attest' in file_contents.dependencies) { delete file_contents.dependencies.attest; } } writeFileSync('package.json', JSON.stringify(file_contents, null, '\t'));
sergey-shambir/cg_course_examples
chapter_1/lesson_03/DispatchEvent.cpp
#include "stdafx.h" #include "DispatchEvent.h" void sdl::DispatchEvent(const SDL_Event &event, IInputEventAcceptor &acceptor) { switch (event.type) { case SDL_KEYDOWN: acceptor.OnKeyDown(event.key); break; case SDL_KEYUP: acceptor.OnKeyUp(event.key); break; case SDL_MOUSEBUTTONDOWN: acceptor.OnMouseDown(event.button); break; case SDL_MOUSEBUTTONUP: acceptor.OnMouseUp(event.button); break; case SDL_MOUSEMOTION: acceptor.OnMouseMotion(event.motion); break; case SDL_MOUSEWHEEL: acceptor.OnMouseWheel(event.wheel); break; } }
vtex/react-jsonschema-table
src/components/endlessTable/views/Footer.react.js
<filename>src/components/endlessTable/views/Footer.react.js import React from 'react' import PropTypes from 'prop-types' import {FormattedMessage} from 'react-intl' const Footer = function(props) { return ( <div className="fixed bottom-0 f5 w-100 bt b--light-gray pa1 bg-navy"> {props.totalRows}&nbsp;<FormattedMessage id="Footer.records" /> </div> ) } Footer.propTypes = { totalRows: PropTypes.number, } export default Footer
davidraditya/OAI-Powder
cmake_targets/lte_build_oai/build/CMakeFiles/Rel14/NeighCellsPerBandclassCDMA2000.c
/* * Generated by asn1c-0.9.24 (http://lionet.info/asn1c) * From ASN.1 module "EUTRA-RRC-Definitions" * found in "fixed_grammar.asn" * `asn1c -gen-PER` */ #include "NeighCellsPerBandclassCDMA2000.h" static asn_TYPE_member_t asn_MBR_NeighCellsPerBandclassCDMA2000_1[] = { { ATF_NOFLAGS, 0, offsetof(struct NeighCellsPerBandclassCDMA2000, arfcn), (ASN_TAG_CLASS_CONTEXT | (0 << 2)), -1, /* IMPLICIT tag at current level */ &asn_DEF_ARFCN_ValueCDMA2000, 0, /* Defer constraints checking to the member type */ 0, /* No PER visible constraints */ 0, "arfcn" }, { ATF_NOFLAGS, 0, offsetof(struct NeighCellsPerBandclassCDMA2000, physCellIdList), (ASN_TAG_CLASS_CONTEXT | (1 << 2)), -1, /* IMPLICIT tag at current level */ &asn_DEF_PhysCellIdListCDMA2000, 0, /* Defer constraints checking to the member type */ 0, /* No PER visible constraints */ 0, "physCellIdList" }, }; static ber_tlv_tag_t asn_DEF_NeighCellsPerBandclassCDMA2000_tags_1[] = { (ASN_TAG_CLASS_UNIVERSAL | (16 << 2)) }; static asn_TYPE_tag2member_t asn_MAP_NeighCellsPerBandclassCDMA2000_tag2el_1[] = { { (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* arfcn at 3182 */ { (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 } /* physCellIdList at 3184 */ }; static asn_SEQUENCE_specifics_t asn_SPC_NeighCellsPerBandclassCDMA2000_specs_1 = { sizeof(struct NeighCellsPerBandclassCDMA2000), offsetof(struct NeighCellsPerBandclassCDMA2000, _asn_ctx), asn_MAP_NeighCellsPerBandclassCDMA2000_tag2el_1, 2, /* Count of tags in the map */ 0, 0, 0, /* Optional elements (not needed) */ -1, /* Start extensions */ -1 /* Stop extensions */ }; asn_TYPE_descriptor_t asn_DEF_NeighCellsPerBandclassCDMA2000 = { "NeighCellsPerBandclassCDMA2000", "NeighCellsPerBandclassCDMA2000", SEQUENCE_free, SEQUENCE_print, SEQUENCE_constraint, SEQUENCE_decode_ber, SEQUENCE_encode_der, SEQUENCE_decode_xer, SEQUENCE_encode_xer, SEQUENCE_decode_uper, SEQUENCE_encode_uper, SEQUENCE_decode_aper, SEQUENCE_encode_aper, SEQUENCE_compare, 0, /* Use generic outmost tag fetcher */ asn_DEF_NeighCellsPerBandclassCDMA2000_tags_1, sizeof(asn_DEF_NeighCellsPerBandclassCDMA2000_tags_1) /sizeof(asn_DEF_NeighCellsPerBandclassCDMA2000_tags_1[0]), /* 1 */ asn_DEF_NeighCellsPerBandclassCDMA2000_tags_1, /* Same as above */ sizeof(asn_DEF_NeighCellsPerBandclassCDMA2000_tags_1) /sizeof(asn_DEF_NeighCellsPerBandclassCDMA2000_tags_1[0]), /* 1 */ 0, /* No PER visible constraints */ asn_MBR_NeighCellsPerBandclassCDMA2000_1, 2, /* Elements count */ &asn_SPC_NeighCellsPerBandclassCDMA2000_specs_1 /* Additional specs */ };
abdelqader-alomari/amman-401d4-java
class-06/Demo/src/main/java/demo/SpaceShuttle.java
<reponame>abdelqader-alomari/amman-401d4-java<filename>class-06/Demo/src/main/java/demo/SpaceShuttle.java package demo; public class SpaceShuttle extends TransportMachine implements Flight { public SpaceShuttle(int doors) { super(doors); } public void goToMoon() { System.out.println("go to moon"); } @Override public void fly() { System.out.println("Blast off"); } }
koladeg/Hire-dev
backend/node_modules/cloudinary/lib/utils/consts.js
const DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION = { width: "auto", crop: "limit", }; const DEFAULT_POSTER_OPTIONS = { format: 'jpg', resource_type: 'video', }; const DEFAULT_VIDEO_SOURCE_TYPES = ['webm', 'mp4', 'ogv']; const CONDITIONAL_OPERATORS = { "=": 'eq', "!=": 'ne', "<": 'lt', ">": 'gt', "<=": 'lte', ">=": 'gte', "&&": 'and', "||": 'or', "*": "mul", "/": "div", "+": "add", "-": "sub", "^": "pow", }; let SIMPLE_PARAMS = [ ["audio_codec", "ac"], ["audio_frequency", "af"], ["bit_rate", 'br'], ["color_space", "cs"], ["default_image", "d"], ["delay", "dl"], ["density", "dn"], ["duration", "du"], ["end_offset", "eo"], ["fetch_format", "f"], ["gravity", "g"], ["page", "pg"], ["prefix", "p"], ["start_offset", "so"], ["streaming_profile", "sp"], ["video_codec", "vc"], ["video_sampling", "vs"], ]; const PREDEFINED_VARS = { "aspect_ratio": "ar", "aspectRatio": "ar", "current_page": "cp", "currentPage": "cp", "duration": "du", "face_count": "fc", "faceCount": "fc", "height": "h", "initial_aspect_ratio": "iar", "initial_height": "ih", "initial_width": "iw", "initialAspectRatio": "iar", "initialHeight": "ih", "initialWidth": "iw", "initial_duration": "idu", "initialDuration": "idu", "page_count": "pc", "page_x": "px", "page_y": "py", "pageCount": "pc", "pageX": "px", "pageY": "py", "tags": "tags", "width": "w", }; const TRANSFORMATION_PARAMS = [ 'angle', 'aspect_ratio', 'audio_codec', 'audio_frequency', 'background', 'bit_rate', 'border', 'color', 'color_space', 'crop', 'default_image', 'delay', 'density', 'dpr', 'duration', 'effect', 'end_offset', 'fetch_format', 'flags', 'fps', 'gravity', 'height', 'if', 'keyframe_interval', 'offset', 'opacity', 'overlay', 'page', 'prefix', 'quality', 'radius', 'raw_transformation', 'responsive_width', 'size', 'start_offset', 'streaming_profile', 'transformation', 'underlay', 'variables', 'video_codec', 'video_sampling', 'width', 'x', 'y', 'zoom', // + any key that starts with '$' ]; const LAYER_KEYWORD_PARAMS = { font_weight: "normal", font_style: "normal", text_decoration: "none", text_align: null, stroke: "none", }; module.exports = { DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION, DEFAULT_POSTER_OPTIONS, DEFAULT_VIDEO_SOURCE_TYPES, CONDITIONAL_OPERATORS, PREDEFINED_VARS, LAYER_KEYWORD_PARAMS, TRANSFORMATION_PARAMS, SIMPLE_PARAMS, };
CosmoConsole/PhotonicCraft
1.7/src/main/java/email/com/gmail/cosmoconsole/forge/photoniccraft/block/BlockMerger.java
<gh_stars>0 package email.com.gmail.cosmoconsole.forge.photoniccraft.block; import java.util.Random; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import email.com.gmail.cosmoconsole.forge.photoniccraft.LaserDirection; import email.com.gmail.cosmoconsole.forge.photoniccraft.ModPhotonicCraft; import email.com.gmail.cosmoconsole.forge.photoniccraft.tileentity.TileEntityLaser; import email.com.gmail.cosmoconsole.forge.photoniccraft.tileentity.TileEntityLaserMerger; import net.minecraft.block.Block; import net.minecraft.block.BlockFurnace; import net.minecraft.block.BlockPistonBase; import net.minecraft.block.ITileEntityProvider; import net.minecraft.block.material.Material; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.EnumFacing; import net.minecraft.util.IIcon; import net.minecraft.util.MathHelper; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; public class BlockMerger extends Block implements ITileEntityProvider { public BlockMerger() { super(Material.rock); this.setHardness(5.0F); this.setResistance(10.0F); this.setCreativeTab(CreativeTabs.tabMisc); this.setBlockName(ModPhotonicCraft.MODID + "_merger"); this.setBlockTextureName(ModPhotonicCraft.MODID + ":merger_side"); this.setCreativeTab(CreativeTabs.tabMisc); this.setHarvestLevel("pickaxe", 0); } @Override public void onBlockAdded(World p_149726_1_, int p_149726_2_, int p_149726_3_, int p_149726_4_) { super.onBlockAdded(p_149726_1_, p_149726_2_, p_149726_3_, p_149726_4_); } public static EnumFacing func_149937_b(int p_149937_0_) { return EnumFacing.getFront(p_149937_0_ & 7); } public static boolean acceptableDirection(int meta, LaserDirection dir) { if (meta == 2 || meta == 3) return dir == LaserDirection.WEST || dir == LaserDirection.EAST; if (meta == 4 || meta == 5) return dir == LaserDirection.NORTH || dir == LaserDirection.SOUTH; return false; } /** * Returns a new instance of a block's tile entity class. Called on placing the block. */ @Override public TileEntity createNewTileEntity(World p_149915_1_, int p_149915_2_) { return new TileEntityLaserMerger(); } /** * Gets the block's texture. Args: side, meta */ @Override @SideOnly(Side.CLIENT) public IIcon getIcon(int p_149691_1_, int p_149691_2_) { int k = p_149691_2_ & 7; // p_149691_1_ : // 0: down // 1: up // 2: north // 3: south // 4: west // 5: east if (k == p_149691_1_ || p_149691_2_ == -1) { return this.field_149935_N; } if (p_149691_1_ == 2 || p_149691_1_ == 3) if (p_149691_2_ == 4 || p_149691_2_ == 5) return field_149936_O; if (p_149691_1_ == 4 || p_149691_1_ == 5) if (p_149691_2_ == 2 || p_149691_2_ == 3) return field_149936_O; return this.field_149934_M; //return p_149691_1_ == k ? (k != 1 && k != 0 ? this.field_149935_N : this.field_149936_O) : (k != 1 && k != 0 ? (p_149691_1_ != 1 && p_149691_1_ != 0 ? this.blockIcon : this.field_149934_M) : ((k == 1 && p_149691_1_ == 0) || (k == 0 && p_149691_1_ == 1) ? this.field_149934_M : this.blockIcon)); } @SideOnly(Side.CLIENT) private IIcon field_149934_M; @SideOnly(Side.CLIENT) private IIcon field_149935_N; @SideOnly(Side.CLIENT) private IIcon field_149936_O; @SideOnly(Side.CLIENT) @Override public void registerBlockIcons(IIconRegister p_149651_1_) { this.blockIcon = p_149651_1_.registerIcon("photoniccraft:laser_top"); this.field_149934_M = p_149651_1_.registerIcon("photoniccraft:laser_top"); this.field_149935_N = p_149651_1_.registerIcon("photoniccraft:laser_top_n"); this.field_149936_O = p_149651_1_.registerIcon("photoniccraft:merger_side"); } /** * Called upon block activation (right click on the block.) */ /*public void func_146104_a(EntityPlayer p_149727_5_, TileEntityLaser p_146104_1_) { if (p_149727_5_ instanceof EntityPlayerSP) { EntityPlayerSP p = (EntityPlayerSP) p_149727_5_; p.openGui(ModPhotonicCraft.instance, p.getEntityId(), p_146104_1_.getWorldObj(), p_146104_1_.xCoord, p_146104_1_.yCoord, p_146104_1_.zCoord); //FMLClientHandler.instance().displayGuiScreen(p, new GuiLaser(p.inventory, p_146104_1_)); } else if (p_149727_5_ instanceof EntityPlayerMP) { EntityPlayerMP p = (EntityPlayerMP) p_149727_5_; p.getNextWindowId(); p.playerNetServerHandler.sendPacket(new S2DPacketOpenWindow(p.currentWindowId, 7, p_146104_1_.getInventoryName(), p_146104_1_.getSizeInventory(), p_146104_1_.hasCustomInventoryName())); p.openContainer = new ContainerLaser(p.inventory, p_146104_1_); p.openContainer.windowId = p.currentWindowId; p.openContainer.addCraftingToCrafters(p); } * Called when the block is placed in the world. */ @Override public void onBlockPlacedBy(World p_149689_1_, int p_149689_2_, int p_149689_3_, int p_149689_4_, EntityLivingBase p_149689_5_, ItemStack p_149689_6_) { int l = (int)Math.floor((double) (p_149689_5_.rotationYaw * 4.0F / 360.0F) + 0.5D) & 3; p_149689_1_.setBlockMetadataWithNotify(p_149689_2_, p_149689_3_, p_149689_4_, new int[]{2,5,3,4}[l], 2); super.onBlockPlacedBy(p_149689_1_, p_149689_2_, p_149689_3_, p_149689_4_, p_149689_5_, p_149689_6_); } }
alif-munim/restoration-bruh
middleware/verificationCheck.js
<gh_stars>1-10 const mongoose = require("mongoose"); const User = mongoose.model("User"); const bcrypt = require("bcryptjs"); exports.verificationCheck = (req, res, next) => { User.aggregate([ { $match: { $or: [{ email: req.body.email }, { username: req.body.email }], }, }, { $project: { password: 1, activated: 1, }, }, ]) .then((users) => { if (users.length < 1) { return res.status(400).json({ message: "Incorrect credentials." }); } else { bcrypt.compare(req.body.password, users[0].password, (err, result) => { if (err) { return res.status(400).json({ message: "Incorrect credentials." }); } if (result) { if (!users[0].activated) { return res.status(400).json({ message: "Account not activated" }); } return next(); } return res.status(400).json({ message: "Incorrect credentials." }); }); } }) .catch((err) => { console.log(err); return res.status(500).json({ message: err }); }); };
hsxyl/CowPredict
src/main/java/com/example/demo/model/param/JudgeStatusParam.java
package com.example.demo.model.param; import lombok.Data; import java.time.LocalDateTime; import java.util.List; /** * @author xushenbao * @desc 添加描述 * @create 2020/1/12 */ @Data public class JudgeStatusParam { LocalDateTime beginTime; LocalDateTime endTime; List<Double> valueList; }
NavigoSolutions/dry-api
dry-api-core/src/main/java/com/navigo3/dryapi/core/util/CollectionUtils.java
<reponame>NavigoSolutions/dry-api package com.navigo3.dryapi.core.util; import java.util.Collection; import java.util.Iterator; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.stream.Stream; public class CollectionUtils { public static <T, U> Stream<U> mapWithIndex(Stream<T> stream, BiFunction<T, Integer, U> block) { AtomicInteger i = new AtomicInteger(0); return stream.map(item->block.apply(item, i.getAndIncrement())); } public static <T> void eachWithIndex(Collection<T> coll, BiConsumer<T, Integer> block) { int i = 0; Iterator<T> iter = coll.iterator(); while (iter.hasNext()) { block.accept(iter.next(), i++); } } }
hypocrite30/LeetCode-Daily
src/1342. Number of Steps to Reduce a Number to Zero/Solution.java
<gh_stars>0 /* 1342. Number of Steps to Reduce a Number to Zero */ public class Solution { public int numberOfSteps(int num) { int res = 0; while (num > 0 && ++res > 0) num = num % 2 == 0 ? num / 2 : num - 1; return res; } }
gashirar/oci-cloud-controller-manager
vendor/github.com/oracle/oci-go-sdk/filestorage/update_mount_target_details.go
// Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved. // Code generated. DO NOT EDIT. // File Storage Service API // // The API for the File Storage Service. // package filestorage import ( "github.com/oracle/oci-go-sdk/common" ) // UpdateMountTargetDetails The representation of UpdateMountTargetDetails type UpdateMountTargetDetails struct { // A user-friendly name. Does not have to be unique, and it is changeable. // Avoid entering confidential information. // Example: `My mount target` DisplayName *string `mandatory:"false" json:"displayName"` } func (m UpdateMountTargetDetails) String() string { return common.PointerString(m) }
unibrew/jbpm-work-items
rest-service-workitem/src/test/java/org/jbpm/contrib/demoservices/dto/PreBuildRequest.java
package org.jbpm.contrib.demoservices.dto; /** * @author <a href="mailto:<EMAIL>"><NAME></a> * @author <NAME> */ public class PreBuildRequest { //protected static ObjectMapper objectMapper = new ObjectMapper(); private Scm scm; private Request callback; private Request heartBeat; private Boolean syncEnabled; public Scm getScm() { return scm; } public void setScm(Scm scm) { this.scm = scm; } public Request getCallback() { return callback; } public void setCallback(Request callback) { this.callback = callback; } public void setHeartBeat(Request heartBeat) { this.heartBeat = heartBeat; } public Request getHeartBeat() { return heartBeat; } public Boolean getSyncEnabled() { return syncEnabled; } public void setSyncEnabled(Boolean syncEnabled) { this.syncEnabled = syncEnabled; } }
l1uy0ng/sf_bmp
app/src/main/java/com/k2/mobile/app/model/cache/FileNameGenerator.java
/* * Copyright (c) 2015. OPPO Co., Ltd. */ package com.k2.mobile.app.model.cache; /** * Author: wyouflf * Date: 14-5-16 * Time: 上午11:25 */ public interface FileNameGenerator { public String generate(String key); }
gee-forr/morpheus-cli
lib/morpheus/cli/monitoring_incidents_command.rb
require 'morpheus/cli/cli_command' require 'morpheus/cli/mixins/monitoring_helper' class Morpheus::Cli::MonitoringIncidentsCommand include Morpheus::Cli::CliCommand include Morpheus::Cli::MonitoringHelper set_command_name :'monitor-incidents' register_subcommands :list, :stats, :get, :history, :notifications, :update, :close, :reopen, :mute, :unmute, :add register_subcommands :'mute-all' => :mute_all register_subcommands :'unmute-all' => :unmute_all def connect(opts) @api_client = establish_remote_appliance_connection(opts) @monitoring_interface = @api_client.monitoring @monitoring_incidents_interface = @api_client.monitoring.incidents end def handle(args) handle_subcommand(args) end def list(args) options = {} params = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage() opts.on('--status LIST', Array, "Filter by status. open, closed") do |list| params['status'] = list end opts.on('--severity LIST', Array, "Filter by severity. critical, warning, info") do |list| params['severity'] = list end build_common_options(opts, options, [:list, :query, :last_updated, :json, :yaml, :csv, :fields, :dry_run, :remote]) end optparse.parse!(args) connect(options) begin params.merge!(parse_list_options(options)) @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.list(params) return end json_response = @monitoring_incidents_interface.list(params) if options[:json] puts as_json(json_response, options, "incidents") return 0 elsif options[:csv] puts records_as_csv(json_response['incidents'], options) return 0 elsif options[:yaml] puts as_yaml(json_response, options, "incidents") return 0 end incidents = json_response['incidents'] title = "Morpheus Monitoring Incidents" subtitles = [] subtitles += parse_list_subtitles(options) print_h1 title, subtitles if incidents.empty? print cyan,"No incidents found.",reset,"\n" else print_incidents_table(incidents, options) print_results_pagination(json_response, {:label => "incident", :n_label => "incidents"}) end print reset,"\n" rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end # this show date range counts and current open incidents # it should be perhaps called 'summary' or 'dashboard' # it is not stats about a particular incident def stats(args) options = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage() #opts.on('-j','--json', "JSON Output") do opts.on( '-m', '--max MAX', "Max open incidents to display. Default is 25" ) do |max| if max.to_s == 'all' options[:max] = 10000 # 'all' else options[:max] = max.to_i end end opts.on( '-o', '--offset OFFSET', "Offset open incidents results for pagination." ) do |offset| options[:offset] = offset.to_i.abs end build_common_options(opts, options, [:list, :json, :yaml, :csv, :fields, :dry_run, :remote]) end optparse.parse!(args) connect(options) begin params = {} params.merge!(parse_list_options(options)) @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.stats(params) return end json_response = @monitoring_incidents_interface.stats(params) if options[:json] puts as_json(json_response, options, "openIncidents") return 0 elsif options[:csv] puts records_as_csv(json_response['openIncidents'], options) return 0 elsif options[:yaml] puts as_yaml(json_response, options, "openIncidents") return 0 end open_incidents = json_response['openIncidents'] open_incidents_count = json_response['openIncidentCount'] stats = json_response['incidentStats'] print_h1 "Morpheus Incidents: Stats" print cyan # print_h2 "Counts" # print_description_list({ # "Today" => 'today', # "Week" => 'week', # "Month" => 'month', # }, stats) if stats print justify_string("Today: #{stats['today']}", 20) print justify_string("Week: #{stats['week']}", 20) print justify_string("Month: #{stats['month']}", 20) print "\n" else puts "No stats" end if !open_incidents || open_incidents.size() == 0 print bold,green,"0 open incidents",reset,"\n" else if open_incidents.size() == 1 #print bold,yellow,"#{open_incidents.size()} open incident",reset,"\n" print_h2 "#{open_incidents.size()} open incident" else #print bold,yellow,"#{open_incidents.size()} open incidents",reset,"\n" print_h2 "#{open_incidents.size()} open incidents" end options[:max] ||= 20 print_incidents_table(open_incidents) if open_incidents.size > 0 print_results_pagination(size: open_incidents.size, total: open_incidents_count, offset: options[:offset]) end end print reset,"\n" rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def add(args) options = {} params = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage("[id]") opts.on("-c", "--comment STRING", String, "Comment on this incident. Updates summary field.") do |val| params['comment'] = val == 'null' ? nil : val end opts.on("--resolution STRING", String, "Description of the resolution to this incident") do |val| params['resolution'] = val == 'null' ? nil : val end opts.on("--status STATUS", String, "Set status (open or closed)") do |val| params['status'] = val end opts.on("--severity STATUS", String, "Set severity (critical, warning or info)") do |val| params['severity'] = val end opts.on("--name STRING", String, "Set display name (subject)") do |val| params['name'] = val == 'null' ? nil : val end opts.on("--startDate TIME", String, "Set start time") do |val| begin params['startDate'] = parse_time(val).utc.iso8601 rescue => e raise OptionParser::InvalidArgument.new "Failed to parse --startDate '#{val}'. Error: #{e}" end end opts.on("--endDate TIME", String, "Set end time") do |val| begin params['endDate'] = parse_time(val).utc.iso8601 rescue => e raise OptionParser::InvalidArgument.new "Failed to parse --endDate '#{val}'. Error: #{e}" end end opts.on("--inUptime BOOL", String, "Set 'In Availability'") do |val| params['inUptime'] = ['true','on'].include?(val.to_s.strip) end build_common_options(opts, options, [:json, :dry_run, :quiet, :remote]) end optparse.parse!(args) connect(options) begin params['name'] = params['name'] || 'No subject' params['startDate'] = params['startDate'] || Time.now.utc.iso8601 payload = { 'incident' => params } @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.create(payload) return end json_response = @monitoring_incidents_interface.create(payload) if options[:json] puts as_json(json_response, options) elsif !options[:quiet] print_green_success "Created incident #{json_response['incident']['id']}" _get(json_response['incident']['id'], options) end rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def get(args) options = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage("[id list]") opts.on(nil,'--history', "Display Incident History") do |val| options[:show_history] = true end opts.on(nil,'--notifications', "Display Incident Notifications") do |val| options[:show_notifications] = true end opts.on('-a','--all', "Display All Details (History, Notifications)") do options[:show_history] = true options[:show_notifications] = true end build_common_options(opts, options, [:json, :yaml, :csv, :fields, :query, :dry_run, :remote]) end optparse.parse!(args) if args.count < 1 puts optparse exit 1 end connect(options) id_list = parse_id_list(args) return run_command_for_each_arg(id_list) do |arg| _get(arg, options) end end def _get(id, options) begin incident = find_incident_by_id(id) @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.get(incident['id']) return end json_response = @monitoring_incidents_interface.get(incident['id']) incident = json_response['incident'] if options[:json] puts as_json(json_response, options, "incident") return 0 elsif options[:csv] puts records_as_csv([json_response['incident']], options) return 0 elsif options[:yaml] puts as_yaml(json_response, options, "incident") return 0 end print_h1 "Incident Details" print cyan description_cols = { "ID" => 'id', "Severity" => lambda {|it| format_severity(it['severity']) }, "Name" => lambda {|it| it['displayName'] || it['name'] || 'No Subject' }, "Start" => lambda {|it| format_local_dt(it['startDate']) }, "End" => lambda {|it| format_local_dt(it['endDate']) }, "Duration" => lambda {|it| format_duration(it['startDate'], it['endDate']) }, "Status" => lambda {|it| format_monitoring_issue_status(it) }, "Muted" => lambda {|it| it['inUptime'] ? 'No' : 'Yes' }, "Visibility" => 'visibility', "Last Check" => lambda {|it| format_local_dt(it['lastCheckTime']) }, "Last Error" => lambda {|it| it['lastError'] }, "Comment" => 'comment', "Resolution" => 'resolution' } # description_cols.delete("End") if incident['endDate'].nil? description_cols.delete("Comment") if incident['comment'].empty? description_cols.delete("Resolution") if incident['resolution'].empty? # description_cols.delete("Last Check") if incident['lastCheckTime'].empty? # description_cols.delete("Last Error") if incident['lastError'].empty? print_description_list(description_cols, incident) # puts as_vertical_table(incident, description_cols) ## Issues issues = json_response["issues"] if issues && !issues.empty? print_h2 "Issues" print_incident_issues_table(issues, options) else print "\n" puts "No checks involved in this incident" end ## History (MonitorIncidentEvent) if options[:show_history] # history_items = json_response["history"] # gotta go get it history_json_response = @monitoring_incidents_interface.history(incident["id"], {}) history_items = history_json_response["history"] || history_json_response["events"] || history_json_response["issues"] issues = history_items if history_items && !history_items.empty? print_h2 "History" print_incident_history_table(history_items, options) print_results_pagination(history_json_response, {:label => "event", :n_label => "events"}) else print "\n" puts "No history found for this incident" end end ## Members (MonitorIncidentNotifyEvent) if options[:show_notifications] # history_items = json_response["history"] # gotta go get it notifications_json_response = @monitoring_incidents_interface.notifications(incident["id"], {max: 10}) notification_items = notifications_json_response["notifications"] if notification_items && !notification_items.empty? print_h2 "Notifications" print_incident_notifications_table(notification_items, options) print_results_pagination(notifications_json_response, {:label => "notification", :n_label => "notifications"}) else print "\n" puts "Nobody has been notified about this incident." end end print reset,"\n" rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def history(args) options = {} params = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage("[id] [options]") opts.on('--severity LIST', Array, "Filter by severity. critical, warning, info") do |list| params['severity'] = list end build_common_options(opts, options, [:list, :last_updated, :json, :csv, :yaml, :fields, :json, :dry_run, :remote]) end optparse.parse!(args) if args.count < 1 puts optparse exit 1 end connect(options) begin incident = find_incident_by_id(args[0]) return 1 if incident.nil? params.merge!(parse_list_options(options)) @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.history(incident['id'], params) return end json_response = @monitoring_incidents_interface.history(incident['id'], params) if options[:json] puts as_json(json_response, options, "history") return 0 elsif options[:csv] puts records_as_csv(json_response['history'], options) return 0 elsif options[:yaml] puts as_yaml(json_response, options, "history") return 0 end history_items = json_response['history'] title = "Incident History: #{incident['id']}: #{incident['displayName'] || incident['name']}" subtitles = [] subtitles += parse_list_subtitles(options) print_h1 title, subtitles if history_items.empty? print cyan,"No history found.",reset,"\n" else print_incident_history_table(history_items, options) print_results_pagination(json_response, {:label => "event", :n_label => "events"}) end print reset,"\n" rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def notifications(args) options = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage("[id] [options]") build_common_options(opts, options, [:list, :json, :csv, :yaml, :fields, :json, :dry_run, :remote]) end optparse.parse!(args) if args.count < 1 puts optparse exit 1 end connect(options) begin incident = find_incident_by_id(args[0]) # return false if incident.nil? params = {} params.merge!(parse_list_options(options)) @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.notifications(incident['id'], params) return end json_response = @monitoring_incidents_interface.notifications(incident['id'], params) if options[:json] puts as_json(json_response, options, "notifications") return 0 elsif options[:csv] puts records_as_csv(json_response['notifications'], options) return 0 elsif options[:yaml] puts as_yaml(json_response, options, "notifications") return 0 end notification_items = json_response['notifications'] title = "Incident Notifications [#{incident['id']}] #{incident['displayName'] || incident['name']}" subtitles = [] subtitles += parse_list_subtitles(options) print_h1 title, subtitles if notification_items.empty? print cyan,"No notifications found.",reset,"\n" else print_incident_notifications_table(notification_items, options) print_results_pagination(json_response, {:label => "notification", :n_label => "notifications"}) end print reset,"\n" rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def update(args) options = {} params = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage("[id]") opts.on("-c", "--comment STRING", String, "Comment on this incident. Updates summary field.") do |val| params['comment'] = val == 'null' ? nil : val end opts.on("--resolution STRING", String, "Description of the resolution to this incident") do |val| params['resolution'] = val == 'null' ? nil : val end opts.on("--status STATUS", String, "Set status (open or closed)") do |val| params['status'] = val end opts.on("--severity STATUS", String, "Set severity (critical, warning or info)") do |val| params['severity'] = val end opts.on("--name STRING", String, "Set display name (subject)") do |val| params['name'] = val == 'null' ? nil : val end opts.on("--startDate TIME", String, "Set start time") do |val| begin params['startDate'] = parse_time(val).utc.iso8601 rescue => e raise OptionParser::InvalidArgument.new "Failed to parse --startDate '#{val}'. Error: #{e}" end end opts.on("--endDate TIME", String, "Set end time") do |val| begin params['endDate'] = parse_time(val).utc.iso8601 rescue => e raise OptionParser::InvalidArgument.new "Failed to parse --endDate '#{val}'. Error: #{e}" end end opts.on("--inUptime BOOL", String, "Set 'In Availability'") do |val| params['inUptime'] = ['true','on'].include?(val.to_s.strip) end build_common_options(opts, options, [:json, :dry_run, :quiet, :remote]) end optparse.parse!(args) if args.count < 1 puts optparse exit 1 end connect(options) begin incident = find_incident_by_id(args[0]) if params['status'] == 'closed' unless options[:yes] || ::Morpheus::Cli::OptionTypes::confirm("Are you sure you would like to close the incident '#{incident['id']}'?", options) return false end end if params.empty? print_red_alert "Specify at least one option to update" puts optparse exit 1 end payload = { 'incident' => {id: incident["id"]} } payload['incident'].merge!(params) @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.update(incident["id"], payload) return end json_response = @monitoring_incidents_interface.update(incident["id"], payload) if options[:json] puts as_json(json_response, options) elsif !options[:quiet] print_green_success "Updated incident #{incident['id']}" _get(incident['id'], options) end rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def mute(args) options = {} params = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage("[id]") opts.on(nil, "--disable", "Disable mute state instead, the same as unmute") do params['muted'] = false params['enabled'] = false end build_common_options(opts, options, [:options, :payload, :json, :dry_run, :quiet, :remote]) opts.footer = "Mute an incident." + "\n" + "[id] is required. This is the id of an incident." end optparse.parse!(args) if args.count != 1 puts optparse return 1 end connect(options) begin incident = find_incident_by_id(args[0]) # construct payload payload = nil if options[:payload] payload = options[:payload] else payload = params end @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.quarantine(incident["id"], payload) return 0 end json_response = @monitoring_incidents_interface.quarantine(incident["id"], payload) if options[:json] puts as_json(json_response, options) elsif !options[:quiet] if params['muted'] != false print_green_success "Muted incident #{incident['id']}" else print_green_success "Unmuted incident #{incident['id']}" end _get(incident['id'], options) end return 0 rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def unmute(args) options = {} params = {'muted' => false, 'enabled' => false} # enabled was used pre 3.6.5 optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage("[id]") build_common_options(opts, options, [:payload, :json, :dry_run, :quiet, :remote]) opts.footer = "Unmute an incident." + "\n" + "[id] is required. This is the id of an incident." end optparse.parse!(args) if args.count != 1 puts optparse return 1 end connect(options) begin incident = find_incident_by_id(args[0]) # construct payload payload = nil if options[:payload] payload = options[:payload] else payload = params end @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.quarantine(incident["id"], payload) return 0 end json_response = @monitoring_incidents_interface.quarantine(incident["id"], payload) if options[:json] puts as_json(json_response, options) elsif !options[:quiet] print_green_success "Unmuted incident #{incident['id']}" _get(incident['id'], options) end return 0 rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def mute_all(args) options = {} params = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage() opts.on(nil, "--disable", "Disable mute state instead, the same as unmute-all") do params['muted'] = false params['enabled'] = false end build_common_options(opts, options, [:options, :payload, :json, :dry_run, :quiet, :remote]) opts.footer = "Mute all open incidents." end optparse.parse!(args) if args.count != 0 puts optparse return 1 end connect(options) begin # construct payload payload = nil if options[:payload] payload = options[:payload] else payload = params end @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.quarantine_all(payload) return 0 end json_response = @monitoring_incidents_interface.quarantine_all(payload) if options[:json] puts as_json(json_response, options) elsif !options[:quiet] num_updated = json_response['updated'] if params['muted'] != false print_green_success "Muted #{num_updated} open incidents" else print_green_success "Unmuted #{num_updated} open incidents" end end return 0 rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def unmute_all(args) options = {} params = {'muted' => false, 'enabled' => false} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage() build_common_options(opts, options, [:payload, :json, :dry_run, :quiet, :remote]) opts.footer = "Unmute all open incidents." end optparse.parse!(args) if args.count != 0 puts optparse return 1 end connect(options) begin # construct payload payload = nil if options[:payload] payload = options[:payload] else payload = params end @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.quarantine_all(payload) return 0 end json_response = @monitoring_incidents_interface.quarantine_all(payload) if options[:json] puts as_json(json_response, options) elsif !options[:quiet] num_updated = json_response['updated'] print_green_success "Unmuted #{num_updated} open incidents" end return 0 rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def close(args) options = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage("[id list]") build_common_options(opts, options, [:auto_confirm, :quiet, :json, :dry_run, :remote]) end optparse.parse!(args) if args.count < 1 puts optparse exit 1 end connect(options) id_list = parse_id_list(args) unless options[:yes] || ::Morpheus::Cli::OptionTypes::confirm("Are you sure you would like to close #{id_list.size == 1 ? 'incident' : 'incidents'} #{anded_list(id_list)}?", options) exit 1 end return run_command_for_each_arg(id_list) do |arg| _close(arg, options) end end def _close(id, options) begin incident = find_incident_by_id(id) already_closed = incident['status'] == 'closed' @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.close(incident['id']) return end json_response = @monitoring_incidents_interface.close(incident['id']) if options[:json] print JSON.pretty_generate(json_response) print "\n" elsif !options[:quiet] print_green_success json_response["msg"] || "Incident #{incident['id']} is now closed" # _get(incident['id'] options) end rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end def reopen(args) options = {} optparse = Morpheus::Cli::OptionParser.new do |opts| opts.banner = subcommand_usage("[id list]") build_common_options(opts, options, [:auto_confirm, :quiet, :json, :dry_run, :remote]) end optparse.parse!(args) if args.count < 1 puts optparse exit 1 end connect(options) id_list = parse_id_list(args) unless options[:yes] || ::Morpheus::Cli::OptionTypes::confirm("Are you sure you would like to reopen #{id_list.size == 1 ? 'incident' : 'incidents'} #{anded_list(id_list)}?", options) exit 1 end return run_command_for_each_arg(id_list) do |arg| _reopen(arg, options) end end def _reopen(id, options) begin incident = find_incident_by_id(id) already_open = incident['status'] == 'open' if already_open print bold,yellow,"Incident #{incident['id']} is already open",reset,"\n" return false end @monitoring_incidents_interface.setopts(options) if options[:dry_run] print_dry_run @monitoring_incidents_interface.dry.reopen(incident['id']) return end json_response = @monitoring_incidents_interface.reopen(incident['id']) if options[:json] print JSON.pretty_generate(json_response) print "\n" elsif !options[:quiet] print_green_success json_response["msg"] || "Incident #{incident['id']} is now open" # _get(incident['id'] options) end rescue RestClient::Exception => e print_rest_exception(e, options) exit 1 end end private end
Devteamvietnam/iTap
src/main/java/com/devteam/core/util/dataformat/DataSerializer.java
package com.devteam.core.util.dataformat; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import java.nio.charset.Charset; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.MappingJsonFactory; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; public class DataSerializer { final static public Charset UTF8 = Charset.forName("UTF-8"); final static public DateFormat COMPACT_DATE_TIME = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss 'GMT'Z"); final static public DataSerializer JSON = new DataSerializer(new MappingJsonFactory()) ; final static public DataSerializer YAML = new DataSerializer(new YAMLFactory()) ; private ObjectMapper mapper ; public DataSerializer(JsonFactory factory) { mapper = new ObjectMapper(factory); // can reuse, share globally configure(mapper) ; } public DataSerializer(JsonFactory factory, Module ... module) { this(factory) ; for(Module selModule : module) { mapper.registerModule(selModule); } } public void register(Module module) { mapper.registerModule(module); } public <T> byte[] toBytes(T idoc) { try { ByteArrayOutputStream outStream = new ByteArrayOutputStream(); OutputStreamWriter writer = new OutputStreamWriter(outStream); mapper.writeValue(writer, idoc); writer.close() ; return outStream.toByteArray() ; } catch(IOException e) { throw new RuntimeException(e) ; } } public <T> T fromBytes(byte[] data, Class<T> type) { try { ByteArrayInputStream inStream = new ByteArrayInputStream(data); Reader reader = new InputStreamReader(inStream); return mapper.readValue(reader , type); } catch (IOException e) { throw new RuntimeException(e) ; } } public <T> T fromBytes(byte[] data, TypeReference<T> typeRef) { try { ByteArrayInputStream inStream = new ByteArrayInputStream(data); Reader reader = new InputStreamReader(inStream); return mapper.readValue(reader , typeRef); } catch (IOException e) { throw new RuntimeException(e) ; } } public <T> String toString(T idoc) { if(idoc == null) return "" ; try { ObjectWriter owriter = mapper.writerWithDefaultPrettyPrinter() ; return owriter.writeValueAsString(idoc) ; } catch(IOException ex) { throw new RuntimeException(ex) ; } } public <T> T fromString(String data, Class<T> type) { try { StringReader reader = new StringReader(data) ; return mapper.readValue(reader , type); } catch (IOException e) { throw new RuntimeException(e) ; } } public <T> T fromInputStream(InputStream is, String encoding, Class<T> type) { try { Reader reader = new InputStreamReader(is, encoding) ; T value = mapper.readValue(reader , type); is.close(); return value; } catch (IOException e) { throw new RuntimeException(e) ; } } public <T> T fromString(String data, TypeReference<T> typeRef) { try { StringReader reader = new StringReader(data) ; return mapper.readValue(reader , typeRef); } catch (IOException e) { throw new RuntimeException(e) ; } } public JsonNode fromString(String data) throws IOException { StringReader reader = new StringReader(data) ; return mapper.readTree(reader); } public <T> T clone(T obj) { return clone((Class<T>)obj.getClass(), obj); } public <T> T clone(Class<T> type, T obj) { try { Writer writer = new StringWriter() ; ObjectWriter owriter = mapper.writerWithDefaultPrettyPrinter() ; owriter.writeValue(writer, obj); String json = writer.toString() ; return (T) fromString(json, type) ; } catch(IOException ex) { throw new RuntimeException(ex) ; } } public <T> List<T> cloneList(List<T> list) { ArrayList<T> holder = new ArrayList<>(); for(T sel : list) { holder.add(clone(sel)); } return holder; } static public <T> T jsonClone(T obj) { return JSON.clone(obj); } static public <T> T jsonClone(Class<T> type, T obj) { return JSON.clone(type, obj); } static public void configure(ObjectMapper mapper) { mapper.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false) ; mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); mapper.setDateFormat(COMPACT_DATE_TIME) ; mapper.enable(SerializationFeature.INDENT_OUTPUT); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); } }
bbejeck/ksql
ksql-metastore/src/main/java/io/confluent/ksql/metastore/model/StructuredDataSource.java
<gh_stars>1-10 /* * Copyright 2018 Confluent Inc. * * Licensed under the Confluent Community License (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at * * http://www.confluent.io/confluent-community-license * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package io.confluent.ksql.metastore.model; import static java.util.Objects.requireNonNull; import com.google.common.collect.ImmutableSet; import com.google.errorprone.annotations.Immutable; import io.confluent.ksql.execution.ddl.commands.KsqlTopic; import io.confluent.ksql.execution.timestamp.TimestampColumn; import io.confluent.ksql.name.SourceName; import io.confluent.ksql.schema.ksql.LogicalSchema; import io.confluent.ksql.serde.SerdeOption; import io.confluent.ksql.util.SchemaUtil; import java.util.Optional; import java.util.Set; @Immutable abstract class StructuredDataSource<K> implements DataSource { private final SourceName dataSourceName; private final DataSourceType dataSourceType; private final LogicalSchema schema; private final KeyField keyField; private final Optional<TimestampColumn> timestampColumn; private final KsqlTopic ksqlTopic; private final String sqlExpression; private final ImmutableSet<SerdeOption> serdeOptions; private final boolean casTarget; StructuredDataSource( final String sqlExpression, final SourceName dataSourceName, final LogicalSchema schema, final Set<SerdeOption> serdeOptions, final KeyField keyField, final Optional<TimestampColumn> tsExtractionPolicy, final DataSourceType dataSourceType, final boolean casTarget, final KsqlTopic ksqlTopic ) { this.sqlExpression = requireNonNull(sqlExpression, "sqlExpression"); this.dataSourceName = requireNonNull(dataSourceName, "dataSourceName"); this.schema = requireNonNull(schema, "schema"); this.keyField = requireNonNull(keyField, "keyField") .validateKeyExistsIn(schema); this.timestampColumn = requireNonNull(tsExtractionPolicy, "tsExtractionPolicy"); this.dataSourceType = requireNonNull(dataSourceType, "dataSourceType"); this.ksqlTopic = requireNonNull(ksqlTopic, "ksqlTopic"); this.serdeOptions = ImmutableSet.copyOf(requireNonNull(serdeOptions, "serdeOptions")); this.casTarget = casTarget; if (schema.valueContainsAny(SchemaUtil.systemColumnNames())) { throw new IllegalArgumentException("Schema contains system columns in value schema"); } } @Override public SourceName getName() { return dataSourceName; } @Override public DataSourceType getDataSourceType() { return this.dataSourceType; } @Override public LogicalSchema getSchema() { return schema; } @Override public Set<SerdeOption> getSerdeOptions() { return serdeOptions; } @Override public KeyField getKeyField() { return keyField; } @Override public KsqlTopic getKsqlTopic() { return ksqlTopic; } @Override public boolean isCasTarget() { return casTarget; } public Optional<TimestampColumn> getTimestampColumn() { return timestampColumn; } @Override public String getKafkaTopicName() { return ksqlTopic.getKafkaTopicName(); } @Override public String getSqlExpression() { return sqlExpression; } @Override public String toString() { return getClass().getSimpleName() + " name:" + getName(); } }
wraseman/wtp-optimization
src/wtp/save_wtp.cpp
/* save_wtp.c */ #include "wtp.h" int save_wtp(char *file_name, struct ProcessTrain *train, FILE *ferr) /* * Purpose: Save process train data to a data file.. * * Input: * file_name : The calling routine is responsibal for insuring that the * file name is valid... such as having a .wtp extension. * * Return: * TRUE : file was successfuly saved. * FALSE: an error occoured. * * Note: * 1. This routine was orginally developed to support cli_wtp(), * specifically the -s<filename> switch. * * <NAME> * July 1993 */ { FILE *fp; int success = FALSE; if (file_name != NULL && strlen(file_name) > 0) { fp = fopen(file_name, "w"); if (fp == NULL) { if (ferr != NULL) fprintf(ferr, "Could not open %s\n", file_name); success = FALSE; } else { success = writewtp(fp, train, ferr); if (success == TRUE) { strncpy(train->file_name, file_name, sizeof(train->file_name) - 1); } fflush(fp); fclose(fp); } } return (success); }
JackChan1999/boohee_v5.6
src/main/java/com/boohee/user/view/UserHeightView.java
<reponame>JackChan1999/boohee_v5.6 package com.boohee.user.view; import android.content.Context; import android.util.AttributeSet; import android.view.LayoutInflater; import android.widget.FrameLayout; import android.widget.LinearLayout; import android.widget.TextView; import com.boohee.model.User; import com.boohee.myview.IntWheelView; import com.boohee.one.R; public class UserHeightView extends FrameLayout { static final String TAG = UserHeightView.class.getName(); Context ctx; private User user; IntWheelView wheelView; public UserHeightView(Context context) { super(context); init(); } public UserHeightView(Context context, User user) { super(context); this.user = user; init(); } public UserHeightView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(); } public UserHeightView(Context context, AttributeSet attrs) { super(context, attrs); init(); } private void init() { this.ctx = getContext(); addView(LayoutInflater.from(this.ctx).inflate(R.layout.nz, null)); setUserProperty(); setBottomPicker(); } private void setUserProperty() { ((TextView) findViewById(R.id.user_property_text)).setText(this.ctx.getString(R.string .o6) + "(cm)"); } private void setBottomPicker() { LinearLayout picker_layout = (LinearLayout) findViewById(R.id.picker_layout); if (this.user != null) { this.wheelView = new IntWheelView(this.ctx, (int) this.user.height); } else { this.wheelView = new IntWheelView(this.ctx); } picker_layout.addView(this.wheelView); } public int getUserHeight() { return this.wheelView.getIntNum(); } }
Hork-Engine/Hork-Source
Source/Runtime/PunctualLightComponent.cpp
<gh_stars>10-100 /* Hork Engine Source Code MIT License Copyright (C) 2017-2022 <NAME>. This file is part of the Hork Engine Source Code. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include "PunctualLightComponent.h" #include "World.h" #include "DebugRenderer.h" HK_CLASS_META(APunctualLightComponent) APunctualLightComponent::APunctualLightComponent() { AABBWorldBounds.Clear(); OBBTransformInverse.Clear(); Primitive = AVisibilitySystem::AllocatePrimitive(); Primitive->Owner = this; Primitive->Type = VSD_PRIMITIVE_SPHERE; Primitive->VisGroup = VISIBILITY_GROUP_DEFAULT; Primitive->QueryGroup = VSD_QUERY_MASK_VISIBLE | VSD_QUERY_MASK_VISIBLE_IN_LIGHT_PASS; } APunctualLightComponent::~APunctualLightComponent() { AVisibilitySystem::DeallocatePrimitive(Primitive); } void APunctualLightComponent::InitializeComponent() { Super::InitializeComponent(); GetWorld()->VisibilitySystem.AddPrimitive(Primitive); } void APunctualLightComponent::DeinitializeComponent() { Super::DeinitializeComponent(); GetWorld()->VisibilitySystem.RemovePrimitive(Primitive); } void APunctualLightComponent::SetEnabled(bool _Enabled) { Super::SetEnabled(_Enabled); if (_Enabled) { Primitive->QueryGroup |= VSD_QUERY_MASK_VISIBLE; Primitive->QueryGroup &= ~VSD_QUERY_MASK_INVISIBLE; } else { Primitive->QueryGroup &= ~VSD_QUERY_MASK_VISIBLE; Primitive->QueryGroup |= VSD_QUERY_MASK_INVISIBLE; } }
Kvazikot/VideoProjects
VideoCube/VideoCubeCV/libs/app7-master/3PartyLibs/qwt-6.1.4/doc/html/class_qwt_painter_command.js
<filename>VideoCube/VideoCubeCV/libs/app7-master/3PartyLibs/qwt-6.1.4/doc/html/class_qwt_painter_command.js var class_qwt_painter_command = [ [ "Type", "class_qwt_painter_command.html#a6619a454c4332c02412611467935b7ba", [ [ "Invalid", "class_qwt_painter_command.html#a6619a454c4332c02412611467935b7baa501f44c9ca82165fd3c76fd3f50d07dd", null ], [ "Path", "class_qwt_painter_command.html#a6619a454c4332c02412611467935b7baa8f98e03699c40458ed0c2007dca698ca", null ], [ "Pixmap", "class_qwt_painter_command.html#a6619a454c4332c02412611467935b7baa02455f25a984a7dde5992e748af34487", null ], [ "Image", "class_qwt_painter_command.html#a6619a454c4332c02412611467935b7baab7dfdaa4ca3c9e6d57bdb27f5dd27669", null ], [ "State", "class_qwt_painter_command.html#a6619a454c4332c02412611467935b7baaecdaa394f26072749a5f2e1a41639bac", null ] ] ], [ "QwtPainterCommand", "class_qwt_painter_command.html#a0a3ce67b97475d9ff41c26542d216e22", null ], [ "QwtPainterCommand", "class_qwt_painter_command.html#aa67dd2e6a432635c101295de585ffdcd", null ], [ "QwtPainterCommand", "class_qwt_painter_command.html#a8648ff991175d5f06bae6b04df06bd03", null ], [ "QwtPainterCommand", "class_qwt_painter_command.html#a7dae6c078fdb8d173358e988f06e2163", null ], [ "QwtPainterCommand", "class_qwt_painter_command.html#a3830b0c0f920588107a3acc1ab05853b", null ], [ "QwtPainterCommand", "class_qwt_painter_command.html#adcd99c908be8b5e57dee2f7dbed73dc3", null ], [ "~QwtPainterCommand", "class_qwt_painter_command.html#af2b2cc7b6d5ce371b3d2456c231f846e", null ], [ "imageData", "class_qwt_painter_command.html#acb12c36d4b9df791bd4f2089e6c147d9", null ], [ "imageData", "class_qwt_painter_command.html#a273cecb4b3c0bb12e42ab1352be363b3", null ], [ "operator=", "class_qwt_painter_command.html#a08dc6d9612be3a2e3abf2366935e7370", null ], [ "path", "class_qwt_painter_command.html#a19fa09138a8775e721817d4ca309f5ac", null ], [ "path", "class_qwt_painter_command.html#a60bbfdef0e37450f9b5cb4489a1f4fa6", null ], [ "pixmapData", "class_qwt_painter_command.html#aa85782270cf4ba4c9c20036e5e9780b3", null ], [ "pixmapData", "class_qwt_painter_command.html#a8283879022dbb87496e8894da1694013", null ], [ "stateData", "class_qwt_painter_command.html#ae801f205610698ebc9f704cdcedafb68", null ], [ "stateData", "class_qwt_painter_command.html#afb99f1cae5d81177d8511f38c1390ed8", null ], [ "type", "class_qwt_painter_command.html#a9a916635d802e0906ac60d17585257d1", null ], [ "d_imageData", "class_qwt_painter_command.html#a2a3d3b501f944ceff338d954ee4faa9a", null ], [ "d_path", "class_qwt_painter_command.html#a906082589c9a078af1d31639822bf52d", null ], [ "d_pixmapData", "class_qwt_painter_command.html#a94f16765ac084ad858a2ecc5783c8d4e", null ], [ "d_stateData", "class_qwt_painter_command.html#a1fde3d07d05caa9b9c40944385feb3e0", null ] ];
adolci/nagios-plugins
check_atlas_status.py
<gh_stars>0 #!/usr/bin/env python # vim:ts=4:sts=4:sw=4:et # # Author: <NAME> # Date: 2017-06-20 17:24:55 +0200 (Tue, 20 Jun 2017) # # https://github.com/harisekhon/nagios-plugins # # License: see accompanying Hari Sekhon LICENSE file # # If you're using my code you're welcome to connect with me on LinkedIn # and optionally send me feedback # # https://www.linkedin.com/in/harisekhon # """ Nagios Plugin to check the status of an Atlas metadata server instance via the HTTP Rest API By default it expects Atlas to be in an active state, for --high-availability setups it will permit a passive state. If you want to ensure at least one of the Atlas servers is active you can either check a load balancer endpoint or combine this check with find_active_server.py from DevOps Python Tools (see project README.md for more details). This plugin will raise a Warning if the Atlas instance is transitioning between active and passive states as that means a failover is occurring. Tested on Atlas 0.8.0 on Hortonworks HDP 2.6.0 """ from __future__ import absolute_import from __future__ import division from __future__ import print_function #from __future__ import unicode_literals import json import os import sys import traceback libdir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'pylib')) sys.path.append(libdir) try: # pylint: disable=wrong-import-position from harisekhon.utils import UnknownError, support_msg_api from harisekhon.utils import isJson from harisekhon import RequestHandler from harisekhon import StatusNagiosPlugin except ImportError as _: print(traceback.format_exc(), end='') sys.exit(4) __author__ = '<NAME>' __version__ = '0.6.0' class CheckAtlasStatus(StatusNagiosPlugin): def __init__(self): # Python 2.x super(CheckAtlasStatus, self).__init__() # Python 3.x # super().__init__() self.name = 'Atlas' self.default_port = 21000 self.protocol = 'http' self.high_availability = False self.ok() def add_options(self): super(CheckAtlasStatus, self).add_options() self.add_opt('-S', '--ssl', action='store_true', help='Use SSL') self.add_opt('-A', '--high-availability', action='store_true', help='High Availability setup, allow either ACTIVE or PASSIVE status') def process_options(self): super(CheckAtlasStatus, self).process_options() self.high_availability = self.get_opt('high_availability') def get_status(self): if self.get_opt('ssl'): self.protocol = 'https' url = '%(protocol)s://%(host)s:%(port)s/api/atlas/admin/status' % self.__dict__ req = RequestHandler().get(url) return self.parse(req) def get_key(self, json_data, key): try: return json_data[key] except KeyError: raise UnknownError('\'{0}\' key was not returned in output from '.format(key) + 'Atlas metadata server instance at {0}:{1}. {2}'\ .format(self.host, self.port, support_msg_api())) def parse(self, req): if not isJson(req.content): raise UnknownError('non-JSON returned by Atlas metadata server instance at {0}:{1}'\ .format(self.host, self.port)) _ = json.loads(req.content) status = self.get_key(_, 'Status') if status == 'ACTIVE': pass elif self.high_availability and status == 'PASSIVE': pass elif status in ('BECOMING_ACTIVE', 'BECOMING_PASSIVE'): self.warning() else: self.critical() return status if __name__ == '__main__': CheckAtlasStatus().main()
PowerlessReimplemented/HarmonicsCore
src/main/java/powerlessri/harmonics/gui/widget/slot/ItemSlotPanel.java
<gh_stars>0 package powerlessri.harmonics.gui.widget.slot; import com.google.common.base.Preconditions; import net.minecraft.item.ItemStack; import powerlessri.harmonics.gui.debug.RenderEventDispatcher; import powerlessri.harmonics.gui.widget.AbstractContainer; import javax.annotation.Nonnull; import java.util.*; import java.util.function.Function; import java.util.function.Supplier; public class ItemSlotPanel extends AbstractContainer<AbstractItemSlot> { private int width; private int height; private List<AbstractItemSlot> children; public ItemSlotPanel(int width, int height) { this(width, height, DefaultSlot::new); } public ItemSlotPanel(int width, int height, Supplier<AbstractItemSlot> factory) { this.width = width; this.height = height; this.children = new ArrayList<>(); int size = width * height; for (int i = 0; i < size; i++) { addChildren(factory.get()); } adjustMinContent(); reflow(); } public ItemSlotPanel(int width, int height, List<ItemStack> stacks) { this(width, height, stacks, DefaultSlot::new); } public ItemSlotPanel(int width, int height, List<ItemStack> stacks, Function<ItemStack, AbstractItemSlot> factory) { int size = width * height; Preconditions.checkArgument(size == stacks.size()); this.width = width; this.height = height; this.children = new ArrayList<>(); for (int i = 0; i < size; i++) { addChildren(factory.apply(stacks.get(i))); } adjustMinContent(); reflow(); } @Override public List<AbstractItemSlot> getChildren() { return children; } @Override public ItemSlotPanel addChildren(AbstractItemSlot widget) { Preconditions.checkState(isValid()); children.add(widget); widget.attach(this); return this; } @Override public ItemSlotPanel addChildren(Collection<AbstractItemSlot> widgets) { Preconditions.checkState(isValid()); children.addAll(widgets); for (AbstractItemSlot widget : widgets) { widget.attach(this); } return this; } @Override public void reflow() { int x = 0; int y = 0; int i = 0; for (int yi = 0; yi < height; yi++) { int maxHeight = 0; for (int xi = 0; xi < width; xi++) { AbstractItemSlot slot = children.get(i); slot.setLocation(x, y); y += slot.getWidth(); maxHeight = Math.max(maxHeight, slot.getHeight()); i++; } y += maxHeight; } } @Override public void render(int mouseX, int mouseY, float particleTicks) { RenderEventDispatcher.onPreRender(this, mouseX, mouseY); renderChildren(mouseX, mouseY, particleTicks); RenderEventDispatcher.onPostRender(this, mouseX, mouseY); } static class DefaultSlot extends AbstractItemSlot { private ItemStack stack; public DefaultSlot() { this(ItemStack.EMPTY); } public DefaultSlot(ItemStack stack) { this.stack = stack; } @Override public ItemStack getRenderedStack() { return stack; } @Nonnull @Override public ItemSlotPanel getParent() { return (ItemSlotPanel) Objects.requireNonNull(super.getParent()); } } }
ParsIOT/Find_BLE
app/src/main/java/com/parsin/bletool/internal/Constants.java
<reponame>ParsIOT/Find_BLE<gh_stars>0 package com.parsin.bletool.internal; public class Constants { public static int DEFAULT_BAR = 60; private Constants() { } // public static double[] weightArr = {0.1995, 0.1760, 0.1210, 0.0648, 0.027, 0.005}; public static double[] weightArr = {0.1995, 0.1760, 0.1210}; private static final String PACKAGE_NAME = "om.find.wifitool"; // Shared prefs public static final String PREFS_NAME = PACKAGE_NAME + "com.find.wifitool.Prefs"; public static final String USER_NAME = PACKAGE_NAME + "user"; public static final String GROUP_NAME = PACKAGE_NAME + "group"; public static final String SERVER_NAME = PACKAGE_NAME + "server"; public static final String LOCATION_NAME = PACKAGE_NAME + "location"; public static final String TRACK_INTERVAL = PACKAGE_NAME + "trackInterval"; public static final String LEARN_INTERVAL = PACKAGE_NAME + "learnInterval"; public static final String LEARN_PERIOD = PACKAGE_NAME + "learnPeriod"; public static final String IS_FIRST_RUN = PACKAGE_NAME + "isFirstRun"; public static final String TrackCounterName = PACKAGE_NAME + "trackCounter"; public static final String ONE_SCAN_PERIOD_NAME = PACKAGE_NAME + "oneScanPeriod"; public static final String HOW_MANY_SCAN_NAME = PACKAGE_NAME + "howManyScan"; public static final String HOW_MANY_LEARNING_NAME = PACKAGE_NAME + "howManyLearning"; //Default values public static final String DEFAULT_USERNAME = "hadi"; public static String DEFAULT_GROUP = "arman3"; public static String DEFAULT_SERVER = "http://192.168.127.12:18003/"; public static String DEFAULT_LOCATION_NAME = "location"; public static int DEFAULT_TRACKING_INTERVAL = 3; public static int DEFAULT_LEARNING_INTERVAL = 3; public static int DEFAULT_TRACKING_COUNTER = 30; public static int DEFAULT_LEARNING_PERIOD =5; public static int ALT_BEACON_TRACK_INTERVAL_AMOUNT = 10; public static int ONE_SCAN_PERIOD = 300; public static int HOW_MANY_SCAN = 10; public static int HOW_MANY_LEARNING_DEFAULT = 100; public static int SEND_PAYLOAD_PERIOD = 3000; // BRaodcast message tag public static final String TRACK_BCAST = "com.find.wifitool.track"; public static final String TRACK_TAG = "track"; public static final String LEARN_TAG = "learn"; // Web URLs public static final String FIND_GITHUB_URL = "https://github.com/schollz/find"; public static final String FIND_APP_URL = " https://github.com/uncleashi/find-client-android"; public static final String FIND_WEB_URL = "https://www.internalpositioning.com/"; public static final String FIND_ISSUES_URL = "https://github.com/schollz/find/issues"; }
stoyanov7/SoftwareUniversity
JavaScriptCore/JavaScriptApplications/AJAX-and-jQuery-AJAX/03.Github-Repos/loadRepos.js
<reponame>stoyanov7/SoftwareUniversity function loadRepos() { let reposUl = $("#repos"); reposUl.html(""); let username = $("#username").val(); $.ajax({ method: "GET", url: `https://api.github.com/users/${username}/repos`, dataType: "json", error: () => { reposUl.append("<li>Error</li>"); }, success: (repos) => { $(repos).each((index, element) => { reposUl.append("<li><a href='" + element.html_url + "'>" + element.full_name + "</a></li>"); }); } }); }
janpio/prisma-engines
query-engine/connector-test-kit/src/test/scala/queries/filters/ListFilterSpec.scala
<filename>query-engine/connector-test-kit/src/test/scala/queries/filters/ListFilterSpec.scala package queries.filters import org.scalatest.{FlatSpec, Matchers} import util.ConnectorCapability.JoinRelationLinksCapability import util.ConnectorTag.PostgresConnectorTag import util._ // RS: Ported class ListFilterSpec extends FlatSpec with Matchers with ApiSpecBase with ConnectorAwareTest { override def runOnlyForConnectors: Set[ConnectorTag] = Set(PostgresConnectorTag) val project: Project = ProjectDsl.fromString { """ |model Test { | id String @id | strList String[] | intList Int[] | floatList Float[] | bIntList BigInt[] | decList Decimal[] | dtList DateTime[] | boolList Boolean[] | jsonList Json[] | bytesList Bytes[] | enumList TestEnum[] |} | |enum TestEnum { | A | B |} """.stripMargin } override protected def beforeAll(): Unit = { super.beforeAll() database.setup(project) createTestData() } "The `equals` operation" should "work as expected" in { query("strList", "equals", """["a", "A", "c"]""", Some(1)) query("intList", "equals", """[1, 2, 3]""", Some(1)) query("floatList", "equals", """[1.1, 2.2, 3.3]""", Some(1)) query("bIntList", "equals", """["100", "200", "300"]""", Some(1)) query("decList", "equals", """["11.11", "22.22", "33.33"]""", Some(1)) query("dtList", "equals", """["1969-01-01T10:33:59.000Z", "2018-12-05T12:34:23.000Z"]""", Some(1)) query("boolList", "equals", """[true]""", Some(1)) query("jsonList", "equals", """["{}", "{\"int\":5}", "[1, 2, 3]"]""", Some(1)) query("bytesList", "equals", """["dGVzdA==", "dA=="]""", Some(1)) query("enumList", "equals", """[A, B, B, A]""", Some(1)) } "The `has` operation" should "work as expected" in { query("strList", "has", """"A"""", Some(1)) query("intList", "has", """2""", Some(1)) query("floatList", "has", """1.1""", Some(1)) query("bIntList", "has", """"200"""", Some(1)) query("decList", "has", """33.33""", Some(1)) query("dtList", "has", """"2018-12-05T12:34:23.000Z"""", Some(1)) query("boolList", "has", """true""", Some(1)) query("jsonList", "has", """"[1, 2, 3]"""", Some(1)) query("bytesList", "has", """"dGVzdA=="""", Some(1)) query("enumList", "has", """A""", Some(1)) } "The `hasSome` operation" should "work as expected" in { query("strList", "hasSome", """["A", "c"]""", Some(1)) query("intList", "hasSome", """[2, 10]""", Some(1)) query("floatList", "hasSome", """[1.1, 5.5]""", Some(1)) query("bIntList", "hasSome", """["200", "5000"]""", Some(1)) query("decList", "hasSome", """[55.55, 33.33]""", Some(1)) query("dtList", "hasSome", """["2018-12-05T12:34:23.000Z", "2019-12-05T12:34:23.000Z"]""", Some(1)) query("boolList", "hasSome", """[true, false]""", Some(1)) query("jsonList", "hasSome", """["{}", "[1]"]""", Some(1)) query("bytesList", "hasSome", """["dGVzdA==", "bG9va2luZyBmb3Igc29tZXRoaW5nPw=="]""", Some(1)) query("enumList", "hasSome", """[A]""", Some(1)) query("strList", "hasSome", """[]""", None) } "The `hasEvery` operation" should "work as expected" in { query("strList", "hasEvery", """["A", "d"]""", None) query("strList", "hasEvery", """["A"]""", Some(1)) query("intList", "hasEvery", """[2, 10]""", None) query("intList", "hasEvery", """[2]""", Some(1)) query("floatList", "hasEvery", """[1.1, 5.5]""", None) query("floatList", "hasEvery", """[1.1]""", Some(1)) query("bIntList", "hasEvery", """["200", "5000"]""", None) query("bIntList", "hasEvery", """["200"]""", Some(1)) query("decList", "hasEvery", """[55.55, 33.33]""", None) query("decList", "hasEvery", """[33.33]""", Some(1)) query("dtList", "hasEvery", """["2018-12-05T12:34:23.000Z", "2019-12-05T12:34:23.000Z"]""", None) query("dtList", "hasEvery", """["2018-12-05T12:34:23.000Z"]""", Some(1)) query("boolList", "hasEvery", """[true, false]""", None) query("boolList", "hasEvery", """[true]""", Some(1)) query("jsonList", "hasEvery", """["{}", "[1]"]""", None) query("jsonList", "hasEvery", """["{}"]""", Some(1)) query("bytesList", "hasEvery", """["dGVzdA==", "bG9va2luZyBmb3Igc29tZXRoaW5nPw=="]""", None) query("bytesList", "hasEvery", """["dGVzdA=="]""", Some(1)) query("enumList", "hasEvery", """[A, B]""", Some(1)) } "Querying `hasEvery` with an empty input" should "return all" in { val result = server.query( s""" |query { | findManyTest(where: { | strList: { hasEvery: [] } | }) { | id | } |} |""".stripMargin, project, legacy = false ) result.toString() should be("""{"data":{"findManyTest":[{"id":"1"},{"id":"2"}]}}""") } "The `isEmpty` operation" should "work as expected" in { query("strList", "isEmpty", "true", Some(2)) query("intList", "isEmpty", "true", Some(2)) query("floatList", "isEmpty", "true", Some(2)) query("bIntList", "isEmpty", "true", Some(2)) query("decList", "isEmpty", "true", Some(2)) query("dtList", "isEmpty", "true", Some(2)) query("boolList", "isEmpty", "true", Some(2)) query("jsonList", "isEmpty", "true", Some(2)) query("bytesList", "isEmpty", "true", Some(2)) query("enumList", "isEmpty", "true", Some(2)) query("strList", "isEmpty", "false", Some(1)) query("intList", "isEmpty", "false", Some(1)) query("floatList", "isEmpty", "false", Some(1)) query("bIntList", "isEmpty", "false", Some(1)) query("decList", "isEmpty", "false", Some(1)) query("dtList", "isEmpty", "false", Some(1)) query("boolList", "isEmpty", "false", Some(1)) query("jsonList", "isEmpty", "false", Some(1)) query("bytesList", "isEmpty", "false", Some(1)) query("enumList", "isEmpty", "false", Some(1)) } def query(field: String, operation: String, comparator: String, expectedId: Option[Int]): Unit = { val result = server.query( s""" |query { | findManyTest(where: { | $field: { $operation: $comparator } | }) { | id | } |} |""".stripMargin, project, legacy = false ) expectedId match { case Some(id) => result.toString() should be(s"""{"data":{"findManyTest":[{"id":"$id"}]}}""") case None => result.pathAsSeq("data.findManyTest").length should be(0) } } // 1 with full data // 1 empty def createTestData(): Unit = { server.query( s"""mutation { |createOneTest(data: { | id: "1", | strList: ["a", "A", "c"], | intList: [1, 2, 3], | floatList: [1.1, 2.2, 3.3], | bIntList: ["100", "200", "300"], | decList: ["11.11", "22.22", "33.33"], | dtList: ["1969-01-01T10:33:59.000Z", "2018-12-05T12:34:23.000Z"], | boolList: [true], | jsonList: ["{}", "{\\"int\\":5}", "[1, 2, 3]"], | bytesList: ["dGVzdA==", "dA=="], | enumList: [A, B, B, A] |}) { id } |} |""".stripMargin, project, legacy = false ) server.query( s"""mutation { |createOneTest(data: { | id: "2", | strList: [], | intList: [], | floatList: [], | bIntList: [], | decList: [], | dtList: [], | boolList: [], | jsonList: [], | bytesList: [], | enumList: [] |}) { id } |} |""".stripMargin, project, legacy = false ) } }
roldanx/oskar
oskar-spark/src/main/java/org/opencb/oskar/spark/variant/analysis/wrappers/PlinkWrapper.java
<reponame>roldanx/oskar<gh_stars>1-10 package org.opencb.oskar.spark.variant.analysis.wrappers; import org.apache.commons.lang.StringUtils; import org.apache.spark.sql.SparkSession; import org.opencb.biodata.models.metadata.Individual; import org.opencb.biodata.models.metadata.Sample; import org.opencb.biodata.models.variant.metadata.VariantStudyMetadata; import org.opencb.biodata.tools.variant.metadata.VariantMetadataManager; import org.opencb.commons.datastore.core.Query; import org.opencb.oskar.analysis.exceptions.AnalysisExecutorException; import org.opencb.oskar.analysis.exceptions.AnalysisToolException; import org.opencb.oskar.analysis.executor.Executor; import org.opencb.oskar.core.config.OskarConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; public class PlinkWrapper extends VariantAnalysisWrapper { public static final String ANALYSIS_NAME = "plink"; private String inFilename; private String metaFilename; private Query query; private Map<String, String> plinkParams; private Logger logger; public PlinkWrapper(String studyId, String inFilename, String metaFilename, Query query, Map<String, String> plinkParams, OskarConfiguration configuration) { super(studyId, configuration); this.inFilename = inFilename; this.metaFilename = metaFilename; this.query = query; this.plinkParams = plinkParams; this.logger = LoggerFactory.getLogger(PlinkWrapper.class); } public void execute() throws AnalysisExecutorException { // Sanity check Path binPath; try { binPath = Paths.get(configuration.getAnalysis().get(ANALYSIS_NAME).getPath()); if (binPath == null || !binPath.toFile().exists()) { String msg = "PLINK binary path is missing or does not exist: '" + binPath + "'."; logger.error(msg); throw new AnalysisExecutorException(msg); } } catch (Exception e) { logger.error(e.getMessage()); throw new AnalysisExecutorException(e.getMessage()); } // Get output dir Path outDir = Paths.get("/tmp"); if (plinkParams.get("out") != null) { outDir = Paths.get(plinkParams.get("out")).getParent(); } // Generate VCF file by calling VCF exporter from query and query options VariantMetadataManager manager = new VariantMetadataManager(); try { manager.load(Paths.get(metaFilename)); SparkSession sparkSession = SparkSession.builder().appName("variant-plink").getOrCreate(); // VariantDataset vd = new VariantDataset(sparkSession); // vd.load(inFilename); // vd.createOrReplaceTempView("vcf"); // // if (query != null) { // vd.setQuery(query); // } // out filename VariantStudyMetadata studyMetadata = manager.getVariantMetadata().getStudies().get(0); // exportPedMapFile(vd, studyMetadata, outDir + "/plink"); // close sparkSession.stop(); } catch (Exception e) { e.printStackTrace(); logger.error("Error executing PLINK tool when retrieving variants to PED and MAP files: {}", e.getMessage()); return; } // Execute PLINK StringBuilder sb = new StringBuilder(); sb.append(binPath); sb.append(" --file ").append(outDir).append("/plink"); for (String key : plinkParams.keySet()) { sb.append(" --").append(key); String value = plinkParams.get(key); if (!StringUtils.isEmpty(value)) { sb.append(" ").append(value); } } try { Executor.execute(sb.toString(), outDir, true); } catch (AnalysisToolException e) { logger.error(e.getMessage()); throw new AnalysisExecutorException(e); } } // public void exportPedMapFile(VariantDataset variantDataset, VariantStudyMetadata studyMetadata, // String prefix) throws FileNotFoundException { // Path pedPath = Paths.get(prefix + ".ped"); // Path mapPath = Paths.get(prefix + ".map"); // // StringBuilder sb = new StringBuilder(); // PrintWriter pedWriter = new PrintWriter(pedPath.toFile()); // PrintWriter mapWriter = new PrintWriter(mapPath.toFile()); // Iterator<Variant> iterator = variantDataset.iterator(); // // List<String> sampleNames = VariantMetadataUtils.getSampleNames(studyMetadata); // //List<String> ms = new ArrayList<>(sampleNames.size()); // StringBuilder[] markers = new StringBuilder[sampleNames.size()]; // while (iterator.hasNext()) { // Variant variant = iterator.next(); // // genotypes // List<List<String>> sampleData = variant.getStudiesMap().get(studyMetadata.getId()).getSamplesData(); // assert(sampleData.size() == sampleNames.size()); // for (int i = 0; i < sampleData.size(); i++) { // String[] gt = sampleData.get(i).get(0).split("[|/]"); // if (markers[i] == null) { // markers[i] = new StringBuilder(); // } // markers[i].append("\t" // + (gt[0].equals("1") ? variant.getAlternate() : variant.getReference()) // + "\t" // + (gt[1].equals("1") ? variant.getAlternate() : variant.getReference())); // } // // // map file line // mapWriter.println(variant.getChromosome() + "\t" + variant.getId() + "\t0\t" + variant.getStart()); // } // // // ped file line // for (int i = 0; i < sampleNames.size(); i++) { // sb.setLength(0); // String sampleName = sampleNames.get(i); // Individual individual = getIndividualBySampleName(sampleName, studyMetadata); // if (individual == null) { // // sample not found, what to do?? // sb.append(0).append("\t"); // sb.append(sampleName).append("\t"); // sb.append(0).append("\t"); // sb.append(0).append("\t"); // sb.append(0).append("\t"); // sb.append(0); // } else { // int sex = org.opencb.biodata.models.core.pedigrees.Individual.Sex // .getEnum(individual.getSex()).getValue(); // int phenotype = org.opencb.biodata.models.core.pedigrees.Individual.AffectionStatus // .getEnum(individual.getPhenotype()).getValue(); // sb.append(individual.getFamily() == null ? 0 : individual.getFamily()).append("\t"); // sb.append(sampleName).append("\t"); // sb.append(individual.getFather() == null ? 0 : individual.getFather()).append("\t"); // sb.append(individual.getMother() == null ? 0 : individual.getMother()).append("\t"); // sb.append(sex).append("\t"); // sb.append(phenotype); // } // sb.append(markers[i]); // pedWriter.println(sb.toString()); // } // // // close // pedWriter.close(); // mapWriter.close(); // } private Individual getIndividualBySampleName(String sampleName, VariantStudyMetadata studyMetadata) { for (Individual individual: studyMetadata.getIndividuals()) { for (Sample sample: individual.getSamples()) { if (sampleName.equals(sample.getId())) { return individual; } } } return null; } }
AntoineJT/NazaraEngine
src/Nazara/Graphics/DeferredFogPass.cpp
<gh_stars>0 // Copyright (C) 2017 <NAME> // This file is part of the "Nazara Engine - Graphics module" // For conditions of distribution and use, see copyright notice in Config.hpp #include <Nazara/Graphics/DeferredFogPass.hpp> #include <Nazara/Graphics/AbstractViewer.hpp> #include <Nazara/Graphics/SceneData.hpp> #include <Nazara/Renderer/Renderer.hpp> #include <Nazara/Renderer/RenderTexture.hpp> #include <Nazara/Graphics/Debug.hpp> namespace Nz { namespace { /*! * \brief Builds the shader for the fog * \return Reference to the shader newly created */ ShaderRef BuildFogShader() { /*const UInt8 fragmentSource[] = { #include <Nazara/Graphics/Resources/DeferredShading/Shaders/FXAA.frag.h> };*/ const char* fragmentSource = "#version 140\n" "out vec4 RenderTarget0;\n" "uniform sampler2D ColorTexture;\n" "uniform sampler2D GBuffer2;\n" "uniform mat4 InvViewProjMatrix;\n" "uniform vec2 InvTargetSize;\n" "uniform vec3 EyePosition;\n" "float n = 0.1;" "float f = 1000.0;" "float color_to_float(vec3 color)\n" "{\n" "const vec3 byte_to_float = vec3(1.0, 1.0/256, 1.0/(256*256));\n" "return dot(color, byte_to_float);\n" "}\n" "void main()\n" "{" "vec2 texCoord = gl_FragCoord.xy * InvTargetSize;\n" "\t" "vec3 color = texture(ColorTexture, texCoord).xyz;\n" "vec4 gVec2 = textureLod(GBuffer2, texCoord, 0.0);\n" "float depth = color_to_float(gVec2.xyz)*2.0 - 1.0;\n" "float linearDepth = (2 * n) / (f + n - depth * (f - n));" "vec3 viewSpace = vec3(texCoord*2.0 - 1.0, depth);\n" "vec4 worldPos = InvViewProjMatrix * vec4(viewSpace, 1.0);\n" "worldPos.xyz /= worldPos.w;\n" /*"float lumThreshold = 0.1;" "float lumMultipler = 2.0;" //"float lumFactor = max(dot(color, vec3(0.299, 0.587, 0.114)) - lumThreshold, 0.0) / (1.0-lumThreshold);" "float fogFactor = (1.0 - clamp(worldPos.y-2.0, 0.0, 1.0)) - lumFactor*lumMultipler;" "fogFactor += (1.0 - clamp(EyePosition.y-2.5, 0.0, 1.0));" "fogFactor = clamp(fogFactor, 0.0, 1.0);"*/ "float lumThreshold = 0.8;" "float lumMultipler = 2.0;" "float luminosity = dot(color, vec3(0.299, 0.587, 0.114));" "float lumFactor = max(luminosity - lumThreshold, 0.0) / (1.0-lumThreshold);" "vec4 fogColor = vec4(0.5, 0.5, 0.5, 1.0);\n" "vec2 fogrange = vec2(0, 50);\n" "float fogeffect = clamp( 1.0 - (fogrange.y - linearDepth*0.5*f) / (fogrange.y - fogrange.x) , 0.0, 1.0 ) * fogColor.w;\n" "fogeffect = max(fogeffect-lumFactor, 0.0);" //fogeffect*=(1.0 - int(depth)); "\t" "vec3 fragmentColor = color*(1.0-fogeffect) + fogColor.rgb * fogeffect;\n" "\t" "RenderTarget0 = vec4(fragmentColor, 1.0);\n" "}"; const char* vertexSource = "#version 140\n" "in vec3 VertexPosition;\n" "void main()\n" "{\n" "\t" "gl_Position = vec4(VertexPosition, 1.0);" "\n" "}\n"; ///TODO: Remplacer ça par des ShaderNode ShaderRef shader = Shader::New(); if (!shader->Create()) { NazaraError("Failed to load create shader"); return nullptr; } if (!shader->AttachStageFromSource(ShaderStageType_Fragment, fragmentSource/*String(reinterpret_cast<const char*>(fragmentSource), sizeof(fragmentSource))*/)) { NazaraError("Failed to load fragment shader"); return nullptr; } if (!shader->AttachStageFromSource(ShaderStageType_Vertex, vertexSource)) { NazaraError("Failed to load vertex shader"); return nullptr; } if (!shader->Link()) { NazaraError("Failed to link shader"); return nullptr; } shader->SendInteger(shader->GetUniformLocation("ColorTexture"), 0); shader->SendInteger(shader->GetUniformLocation("GBuffer2"), 1); return shader; } } /*! * \ingroup graphics * \class Nz::DeferredFogPass * \brief Graphics class that represents the pass for fog in deferred rendering */ /*! * \brief Constructs a DeferredFogPass object by default */ DeferredFogPass::DeferredFogPass() { m_pointSampler.SetAnisotropyLevel(1); m_pointSampler.SetFilterMode(SamplerFilter_Nearest); m_pointSampler.SetWrapMode(SamplerWrap_Clamp); m_shader = BuildFogShader(); m_shaderEyePositionLocation = m_shader->GetUniformLocation("EyePosition"); m_states.depthBuffer = false; } DeferredFogPass::~DeferredFogPass() = default; /*! * \brief Processes the work on the data while working with textures * \return true * * \param sceneData Data for the scene * \param firstWorkTexture Index of the first texture to work with * \param firstWorkTexture Index of the second texture to work with */ bool DeferredFogPass::Process( const SceneData& sceneData, unsigned int firstWorkTexture, unsigned int secondWorkTexture) const { NazaraAssert(sceneData.viewer, "Invalid viewer"); m_workRTT->SetColorTarget(firstWorkTexture); Renderer::SetTarget(m_workRTT); Renderer::SetViewport(Recti(0, 0, m_dimensions.x, m_dimensions.y)); Renderer::SetShader(m_shader); m_shader->SendVector(m_shaderEyePositionLocation, sceneData.viewer->GetEyePosition()); Renderer::SetRenderStates(m_states); Renderer::SetTexture(0, m_workTextures[secondWorkTexture]); Renderer::SetTexture(1, m_GBuffer[2]); Renderer::SetTextureSampler(0, m_pointSampler); Renderer::SetTextureSampler(1, m_pointSampler); Renderer::DrawFullscreenQuad(); return true; } }
bg1bgst333/Sample
designpattern/mvvm/mvvm/src/mvvm/member.h
<gh_stars>1-10 // 二重インクルード防止 #ifndef __MEMBER_H_ #define __MEMBER_H_ // ヘッダのインクルード // 独自のヘッダ #include "mediator.h" // interface_mediator // 前方宣言 class interface_mediator; // インターフェースinterface_member class interface_member{ // 公開メンバ public: // 公開メンバ関数 // コンストラクタとデストラクタ interface_member(){}; // コンストラクタinterface_member virtual ~interface_member(){}; // デストラクタ~interface_member // メンバ関数 virtual void set_mediator(interface_mediator *mediator) = 0; // 純粋仮想関数set_mediator virtual void notify() = 0; // 純粋仮想関数notify }; #endif
JoseHerminioCollas/rgb-client
src/goatstone/components/effect-select.js
/* EffectSelect : a component for selecting an effect from a series of choices */ import { h, input, label } from '@cycle/dom' function EffectSelect(sources) { const inputClassName = '.effect' // TODO should copy be another stream????? const copy = sources.copy const effects = [copy.effect.options.glow, copy.effect.options.chase, copy.effect.options.redBlue] // const startValue = null const newValue$ = sources.DOM .select('[name=effect-choice]') .events('change') .map(ev => ev.target.value) .startWith('off') const state$ = newValue$ .map(val => ({ value: val, displayValue: effects[val] })) .remember() const vdom$ = state$.map(({ value, displayValue }) => h('article', {}, [ h('h3', { style: { fontWeight: 900 } }, [copy.effect.title, h('span', { style: { color: 'black', fontSize: '.7em', fontWeight: 'normal' } }, [' ', displayValue]), ]), label('', {}, [ copy.effect.options.glow, input(inputClassName, { attrs: { type: 'radio', name: 'effect-choice', value: 'glow', checked: value === 'glow' } }), ]), label('', {}, [ copy.effect.options.chase, input(inputClassName, { attrs: { type: 'radio', name: 'effect-choice', value: 'chase', checked: value === 'chase' } }), ]), label('', {}, [ copy.effect.options.redBlue, input(inputClassName, { attrs: { type: 'radio', name: 'effect-choice', value: 'red-blue', checked: value === 'red-blue' } }), ]), label('', {}, [ copy.effect.options.off, input(inputClassName, { attrs: { type: 'radio', name: 'effect-choice', value: 'off', checked: value === 'off' } }), ]), ]) ) return { val: newValue$, DOM: vdom$ } } export default EffectSelect
henanewind/rpc
rpc-common/src/main/java/com/github/houbb/rpc/common/util/IpUtils.java
package com.github.houbb.rpc.common.util; /** * IP 工具類 * @since 0.2.0 */ public final class IpUtils { /** * 构建对应的 ip:port 结果 * @param ip 地址 * @param port 端口 * @return 结果 * @since 0.2.0 */ public static String ipPort(String ip, int port) { return ip+":"+port; } }
pedritoelcabra/busyville
src/js/game/classes/items/chainhat.js
<filename>src/js/game/classes/items/chainhat.js 'use strict'; var Head = require('./head'); var Randomizer = require('../randomizer'); var ChainHat = function () { Head.call(this); this.name = 'Chain Hat'; this.className = 'ChainHat'; this.graphic = 'chainhat'; }; ChainHat.prototype = Object.create(Head.prototype); ChainHat.prototype.constructor = ChainHat; module.exports = ChainHat;
tue-robotics/ed
include/ed/variant.h
<filename>include/ed/variant.h #ifndef ED_VARIANT_H_ #define ED_VARIANT_H_ // Directly taken from http://stackoverflow.com/questions/5319216/implementing-a-variant-class #include <boost/shared_ptr.hpp> #include <string> namespace ed { template <typename T> struct TypeWrapper { typedef T TYPE; typedef const T CONSTTYPE; typedef T& REFTYPE; typedef const T& CONSTREFTYPE; }; template <typename T> struct TypeWrapper<const T> { typedef T TYPE; typedef const T CONSTTYPE; typedef T& REFTYPE; typedef const T& CONSTREFTYPE; }; template <typename T> struct TypeWrapper<const T&> { typedef T TYPE; typedef const T CONSTTYPE; typedef T& REFTYPE; typedef const T& CONSTREFTYPE; }; template <typename T> struct TypeWrapper<T&> { typedef T TYPE; typedef const T CONSTTYPE; typedef T& REFTYPE; typedef const T& CONSTREFTYPE; }; class Variant { public: Variant() { } template<class T> Variant(T inValue) : mImpl(new VariantImpl<typename TypeWrapper<T>::TYPE>(inValue)) { } template<class T> typename TypeWrapper<T>::REFTYPE getValue() { return dynamic_cast<VariantImpl<typename TypeWrapper<T>::TYPE>&>(*mImpl.get()).mValue; } template<class T> typename TypeWrapper<T>::CONSTREFTYPE getValue() const { return dynamic_cast<VariantImpl<typename TypeWrapper<T>::TYPE>&>(*mImpl.get()).mValue; } template<class T> void setValue(typename TypeWrapper<T>::CONSTREFTYPE inValue) { mImpl.reset(new VariantImpl<typename TypeWrapper<T>::TYPE>(inValue)); } private: struct AbstractVariantImpl { virtual ~AbstractVariantImpl() {} }; template<class T> struct VariantImpl : public AbstractVariantImpl { VariantImpl(T inValue) : mValue(inValue) { } ~VariantImpl() {} T mValue; }; boost::shared_ptr<AbstractVariantImpl> mImpl; }; } // end namespace #endif
hsav20/RemoteDevice
kcdevice/src/main/java/ltd/kcdevice/view/SpeakerSetupView.java
<reponame>hsav20/RemoteDevice<filename>kcdevice/src/main/java/ltd/kcdevice/view/SpeakerSetupView.java package ltd.kcdevice.view; import android.content.Context; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.widget.SeekBar; import ltd.advskin.MSKIN; import ltd.advskin.base.KcTwoListener; import ltd.advskin.view.KcBtnProgress; import ltd.advskin.view.KcBtnText; import ltd.kcdevice.R; import main.MAPI; import static main.MAPI.MSTRING; public class SpeakerSetupView extends androidx.constraintlayout.widget.ConstraintLayout { private int gIndex; private KcTwoListener mKcTwoListener; private KcBtnText kbtSpeakerSetup; private KcBtnProgress kbpSpeakerSetupA, kbpSpeakerSetupB, kbpSpeakerSetupC; private KcBtnProgress[] mSpeakerView; private final int[] Tab_SpeakerView = new int[]{ R.id.kbpSpeakerSetupA, R.id.kbpSpeakerSetupB, R.id.kbpSpeakerSetupC, }; // 所有的页面都接受同样的方式打开LOG public static boolean logEnable; public void MLOG(String text) { if (logEnable) { MSTRING(MAPI.GET_LOG_NAME(this) + text); } } public SpeakerSetupView(Context context) { this(context, null); } public SpeakerSetupView(Context context, AttributeSet attrs) { super(context, attrs); LayoutInflater.from(context).inflate(R.layout.item_speaker_setup, this, true); kbtSpeakerSetup = (KcBtnText) findViewById(R.id.kbtSpeakerSetup); kbpSpeakerSetupA = (KcBtnProgress) findViewById(R.id.kbpSpeakerSetupA); kbpSpeakerSetupB = (KcBtnProgress) findViewById(R.id.kbpSpeakerSetupB); kbpSpeakerSetupC = (KcBtnProgress) findViewById(R.id.kbpSpeakerSetupC); mSpeakerView = new KcBtnProgress[Tab_SpeakerView.length]; for (int index = 0; index < Tab_SpeakerView.length; index++) { mSpeakerView[index] = (KcBtnProgress) findViewById(Tab_SpeakerView[index]); mSpeakerView[index].setTag(index); mSpeakerView[index].setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { int index = (int)v.getTag(); for (int counter = 0; counter < mSpeakerView.length; counter++){ if (index != counter){ mSpeakerView[counter].setChecked(false); }else { mSpeakerView[counter].setChecked(true); } if (mKcTwoListener != null){ mKcTwoListener.onMessage(gIndex, index); } MLOG(String.format("SpeakerSetupView %s", index)); } } }); } } public void setTypeText(String text){ MSKIN.setText(text, kbtSpeakerSetup); } public void setProgressA(String text){ MSKIN.setText(text, kbpSpeakerSetupA); } public void setProgressB(String text){ MSKIN.setText(text, kbpSpeakerSetupB); } public void setProgressC(String text){ if (MAPI.isEmpty(text)){ kbpSpeakerSetupC.setVisibility(GONE); }else { kbpSpeakerSetupC.setVisibility(VISIBLE); MSKIN.setText(text, kbpSpeakerSetupC); } } public void setListener(int index, KcTwoListener kcListener){ gIndex = index; mKcTwoListener = kcListener; } }
TimCook1/trident
storage_drivers/ontap/api/rest/client/s_a_n/iscsi_service_delete_responses.go
// Code generated by go-swagger; DO NOT EDIT. package s_a_n // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "fmt" "io" "github.com/go-openapi/runtime" "github.com/go-openapi/strfmt" "github.com/netapp/trident/storage_drivers/ontap/api/rest/models" ) // IscsiServiceDeleteReader is a Reader for the IscsiServiceDelete structure. type IscsiServiceDeleteReader struct { formats strfmt.Registry } // ReadResponse reads a server response into the received o. func (o *IscsiServiceDeleteReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { case 200: result := NewIscsiServiceDeleteOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil default: result := NewIscsiServiceDeleteDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } if response.Code()/100 == 2 { return result, nil } return nil, result } } // NewIscsiServiceDeleteOK creates a IscsiServiceDeleteOK with default headers values func NewIscsiServiceDeleteOK() *IscsiServiceDeleteOK { return &IscsiServiceDeleteOK{} } /* IscsiServiceDeleteOK describes a response with status code 200, with default header values. OK */ type IscsiServiceDeleteOK struct { } func (o *IscsiServiceDeleteOK) Error() string { return fmt.Sprintf("[DELETE /protocols/san/iscsi/services/{svm.uuid}][%d] iscsiServiceDeleteOK ", 200) } func (o *IscsiServiceDeleteOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { return nil } // NewIscsiServiceDeleteDefault creates a IscsiServiceDeleteDefault with default headers values func NewIscsiServiceDeleteDefault(code int) *IscsiServiceDeleteDefault { return &IscsiServiceDeleteDefault{ _statusCode: code, } } /* IscsiServiceDeleteDefault describes a response with status code -1, with default header values. ONTAP Error Response Codes | Error Code | Description | | ---------- | ----------- | | 2621462 | An SVM with the specified UUID does not exist. | | 5373960 | The iSCSI service is enabled. The iSCSI service must be disabled before it can be deleted. | | 5374078 | The SVM does not have an iSCSI service. | */ type IscsiServiceDeleteDefault struct { _statusCode int Payload *models.ErrorResponse } // Code gets the status code for the iscsi service delete default response func (o *IscsiServiceDeleteDefault) Code() int { return o._statusCode } func (o *IscsiServiceDeleteDefault) Error() string { return fmt.Sprintf("[DELETE /protocols/san/iscsi/services/{svm.uuid}][%d] iscsi_service_delete default %+v", o._statusCode, o.Payload) } func (o *IscsiServiceDeleteDefault) GetPayload() *models.ErrorResponse { return o.Payload } func (o *IscsiServiceDeleteDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models.ErrorResponse) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil }
lisaong/Accera
accera/ir/include/exec/ExecutionPlanOps.h
//////////////////////////////////////////////////////////////////////////////////////////////////// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE in the project root for license information. //////////////////////////////////////////////////////////////////////////////////////////////////// #pragma once #include <optional> #include <variant> #include "CacheAccessMaps.h" #include "ExecutionPlanAttributes.h" #include "ExecutionPlanEnums.h" #include "ir/include/nest/Index.h" #include "ir/include/nest/IndexRange.h" #include "ir/include/nest/LoopNestAttributes.h" #include "ir/include/nest/LoopNestOps.h" #include "ir/include/value/ValueEnums.h" #include <utilities/include/MemoryLayout.h> #include <mlir/Dialect/Affine/IR/AffineOps.h> #include <mlir/Dialect/Affine/IR/AffineValueMap.h> #include <mlir/IR/AffineMap.h> #include <mlir/IR/Attributes.h> #include <mlir/IR/Builders.h> #include <mlir/IR/BuiltinOps.h> #include <mlir/IR/Dialect.h> #include <mlir/IR/OpDefinition.h> #include <mlir/IR/PatternMatch.h> #include <mlir/Interfaces/LoopLikeInterface.h> namespace accera::ir { // mlir-tblgen currently creates files with the assumption that the following // symbols are present in the current namespace, so we have to import them // explicitly using llvm::ArrayRef; using llvm::iterator_range; using llvm::StringRef; using mlir::AffineMap; using mlir::AffineMapAttr; using mlir::AffineValueMap; using mlir::ArrayAttr; using mlir::Attribute; using mlir::Block; using mlir::Builder; using mlir::IntegerAttr; using mlir::LogicalResult; using mlir::MemRefType; using mlir::MLIRContext; using mlir::NamedAttribute; using mlir::Op; using mlir::OpAsmParser; using mlir::OpAsmPrinter; using mlir::OpBuilder; using mlir::Operation; using mlir::OperationState; using mlir::OwningRewritePatternList; using mlir::ParseResult; using mlir::Region; using mlir::ShapedType; using mlir::TensorType; using mlir::Type; using mlir::Value; using mlir::ValueRange; using loopnest::Index; using loopnest::IndexAttr; using loopnest::IndexRange; using loopnest::InjectableMapping; using loopnest::Position; namespace OpTrait = mlir::OpTrait; namespace executionPlan { using accera::ir::value::MemorySpace; struct CacheAccessContext { mlir::Value value; CacheAccessMaps accessMaps; bool activeBlockCache; bool dimReorderCache; ValueRange fullRelevantScheduleIndices; ValueRange externalRelevantScheduleIndices; // Relevant schedule indices that are external to the cache region std::vector<IndexRange> cacheRegionRelevantScheduleIndexRanges; std::vector<std::vector<Index>> cacheRegionBaseIndices; }; // Copied from ShapedType::kDynamicSize in mlir\include\mlir\IR\StandardTypes.h becuase gcc has linker issues with static constexpr constants const int64_t DynamicSizeSentinelValue = -1; #include "exec/ExecutionPlanInterfaces.h.inc" } // namespace executionPlan } // namespace accera::ir // Include the auto-generated header file containing the declarations of the execution plan operations. #define GET_OP_CLASSES #include "exec/ExecutionPlanDialect.h.inc" #include "exec/ExecutionPlanOps.h.inc" namespace accera::ir::executionPlan { // Unit attr name for controlling whether bounds checking is done for ops within a marked op const mlir::StringRef AccessBoundsCheckAttrName = "accxp.access_bounds_check"; // // Utility functions and EDSC-type intrinsics // struct ScheduleShardMapping { std::vector<int64_t> shardSizes; std::vector<int64_t> logicalDimensionMappings; std::vector<int64_t> affinePerDimCoefficients; std::vector<int64_t> affineCoefficients; std::vector<std::vector<loopnest::Index>> relevantScheduleIndices; std::vector<std::vector<size_t>> relevantScheduleIndexPositions; }; struct CacheInfo { MemRefType cacheType; bool activeBlockCache; bool dimReorderCache; int64_t maxElementBudget = -1; CacheAllocation cacheAllocation; std::optional<loopnest::Index> cacheIndex; std::optional<loopnest::Index> triggerIndex; std::vector<Index> accessBaseIndices; CacheAccessMaps accessMaps; ScheduleShardMapping fullShardMapping; ScheduleShardMapping shardMapping; llvm::SmallVector<mlir::Value, 4> fullRelevantScheduleIndices; llvm::SmallVector<mlir::Value, 4> externalRelevantScheduleIndices; // Relevant schedule indices that are external to the cache region std::vector<IndexRange> cacheRegionRelevantScheduleIndexRanges; std::vector<std::vector<Index>> cacheRegionBaseIndices; }; CacheInfo MakeAutomaticCacheInfo( mlir::OpBuilder& builder, mlir::Value input, CacheAllocation cacheAllocation, loopnest::ScheduleOp schedule, const std::optional<loopnest::Index>& outermostIncludedSplitIndex, const std::optional<int64_t>& maxElements = std::nullopt, MemorySpace memorySpace = MemorySpace::Shared); CacheInfo MakeFullBufferAutomaticCacheInfo( mlir::OpBuilder& builder, mlir::Value input, CacheAllocation cacheAllocation, loopnest::ScheduleOp schedule, MemorySpace memorySpace = MemorySpace::Global); CacheInfo MakeManualCacheInfo( mlir::OpBuilder& builder, mlir::Value input, CacheAllocation cacheAllocation, loopnest::ScheduleOp schedule, const std::optional<loopnest::Index>& keySliceIndex, const std::optional<loopnest::Index>& triggerIndex, const std::optional<int64_t>& maxElements, const std::variant<utilities::MemoryAffineCoefficients, utilities::DimensionOrder>& cacheMappingInfo, MemorySpace memorySpace); mlir::AffineMap ComputeFlatAffineMapFromAffineCoefficients( mlir::OpBuilder& builder, const utilities::MemoryAffineCoefficients& affineMapping); ScheduleShardMapping GetScheduleShardMapping( loopnest::ScheduleOp schedule, const std::vector<loopnest::Index>& accessLogicalIndices); CacheAccessContext MakeCacheAccessContext( mlir::Value cache, CacheInfo& cacheInfo); DelayedMappingRegionOp MakeDelayedMappingRegion(mlir::OpBuilder& builder, mlir::Value from, mlir::Value to, std::function<void(mlir::OpBuilder&)> body); } // namespace accera::ir::executionPlan
Anlon-Burke/vespa
searchlib/src/main/java/com/yahoo/searchlib/expression/StringResultNode.java
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.searchlib.expression; import com.yahoo.text.Utf8; import com.yahoo.vespa.objects.Deserializer; import com.yahoo.vespa.objects.ObjectVisitor; import com.yahoo.vespa.objects.Serializer; import java.util.Arrays; /** * This result holds a string. * * @author baldersheim * @author <NAME> */ public class StringResultNode extends SingleResultNode { // The global class identifier shared with C++. public static final int classId = registerClass(0x4000 + 53, StringResultNode.class); private static final StringResultNode negativeInfinity = new StringResultNode(""); private static final PositiveInfinityResultNode positiveInfinity = new PositiveInfinityResultNode(); private static final byte[] EMPTY_UTF8_ARRAY = new byte[0]; // The string value of this node, in raw UTF-8 octets. private byte[] utf8Value; /** * Constructs an empty result node. <b>NOTE:</b> This instance is broken until non-optional member data is set. */ public StringResultNode() { super(); utf8Value = EMPTY_UTF8_ARRAY; } /** * Constructs an instance of this class with given value. * * @param value The value to assign to this. */ public StringResultNode(String value) { super(); setValue(value); } private StringResultNode(byte[] rawUtf8Value) { super(); utf8Value = rawUtf8Value; } /** * Creates a new StringResultNode backed by an underlying byte array. The input is * presumed to be in valid UTF-8 format, but is _not_ checked for validity. */ protected static StringResultNode ofUncheckedUtf8Array(byte[] rawUtf8Value) { return new StringResultNode(rawUtf8Value); } /** * Sets the value of this result. * * @param value The value to set. * @return This, to allow chaining. */ public StringResultNode setValue(String value) { if (value == null) { throw new IllegalArgumentException("Value can not be null."); } this.utf8Value = Utf8.toBytes(value); return this; } @Override protected int onGetClassId() { return classId; } @Override protected void onSerialize(Serializer buf) { byte[] raw = getRaw(); buf.putInt(null, raw.length); buf.put(null, raw); } @Override protected void onDeserialize(Deserializer buf) { // We expect the UTF-8 we get from the backend to be pre-checked and valid. utf8Value = getRawUtf8Bytes(buf); } @Override public long getInteger() { try { return Integer.valueOf(getString()); } catch (java.lang.NumberFormatException e) { return 0; } } @Override public double getFloat() { try { return Double.valueOf(getString()); } catch (java.lang.NumberFormatException e) { return 0; } } @Override public String getString() { return Utf8.toString(utf8Value); } @Override public byte[] getRaw() { return utf8Value; } @Override protected int onCmp(ResultNode rhs) { return (rhs instanceof PositiveInfinityResultNode) ? -1 : internalNonPositiveInfinityCompareTo(rhs); } @Override public int hashCode() { return super.hashCode() + Arrays.hashCode(utf8Value); } @Override public void visitMembers(ObjectVisitor visitor) { super.visitMembers(visitor); visitor.visit("value", getString()); } @Override public void add(ResultNode rhs) { setValue(getString() + rhs.getString()); } @Override public void min(ResultNode rhs) { if (internalNonPositiveInfinityCompareTo(rhs) > 0) { set(rhs); } } @Override public void max(ResultNode rhs) { if (internalNonPositiveInfinityCompareTo(rhs) < 0) { set(rhs); } } public void append(ResultNode rhs) { setValue(getString() + rhs.getString()); } @Override public Object getValue() { return getString(); } @Override public void set(ResultNode rhs) { if (rhs instanceof StringResultNode) { utf8Value = ((StringResultNode) rhs).utf8Value; } else { setValue(rhs.getString()); } } @Override public void negate() { char[] a = getString().toCharArray(); for (int i = 0; i < a.length; i++) { a[i] = (char)-a[i]; } setValue(new String(a)); } private int internalNonPositiveInfinityCompareTo(ResultNode rhs) { // Note: this may not necessarily be well-defined _semantically_ unless rhs is // also a StringResultNode. The C++ implementation explicitly expects rhs to be // such an instance, but this depends on a classId check that is _not_ done in // the Java implementation... // We use getString() instead of getRaw() to support implicit stringification // (legacy Java implementation behavior), but it's not given that this is always // the desired outcome. var rhsAsUtf8 = (rhs instanceof StringResultNode) ? ((StringResultNode)rhs).utf8Value : Utf8.toBytes(rhs.getString()); return Arrays.compareUnsigned(utf8Value, rhsAsUtf8); } /** * Will provide the smallest possible value * * @return the smallest possible IntegerResultNode */ public static StringResultNode getNegativeInfinity() { return negativeInfinity; } /** * Will provide the largest possible value * * @return the smallest largest IntegerResultNode */ public static PositiveInfinityResultNode getPositiveInfinity() { return positiveInfinity; } }
zviniciusricardo/orange-talents-03-template-casa-do-codigo
src/main/java/br/com/zupacademy/vinicius/casadocodigo/categoria/CategoriaForm.java
package br.com.zupacademy.vinicius.casadocodigo.categoria; import br.com.zupacademy.vinicius.casadocodigo.validator.UniqueValue; import javax.validation.constraints.NotBlank; public class CategoriaForm { @NotBlank @UniqueValue(domainClass = Categoria.class, fieldName = "nome") private String nome; @Deprecated public CategoriaForm() { } public CategoriaForm(@NotBlank String nome) { this.nome = nome; } public Categoria toModel() { return new Categoria(this.nome); } public String getNome() { return nome; } }
IhorPatychenko/report-engine
src/main/java/org/greports/utils/AnnotationsConverter.java
package org.greports.utils; import org.greports.annotations.Cell; import org.greports.annotations.CellGetter; import org.greports.annotations.CellValidator; import org.greports.annotations.Column; import org.greports.annotations.ColumnGetter; import org.greports.annotations.ColumnSetter; import org.greports.annotations.ColumnValidator; import org.greports.annotations.Converter; import org.greports.annotations.Subreport; import org.greports.annotations.SubreportGetter; import org.greports.annotations.SubreportSetter; import org.greports.converters.AbstractValueConverter; import org.greports.converters.NotImplementedConverter; import org.greports.engine.ValueType; import java.lang.annotation.Annotation; /** * Annotation converter class. * This one is for internal use of greports engine. */ public class AnnotationsConverter { private AnnotationsConverter(){} private static final Converter notImplementedConveter = new Converter() { @Override public Class<? extends Annotation> annotationType() { return Converter.class; } @Override public Class<? extends AbstractValueConverter> converterClass() { return NotImplementedConverter.class; } @Override public String[] params() { return new String[0]; } }; public static Subreport convert(final SubreportGetter subreportGetter) { return new Subreport() { @Override public Class<? extends Annotation> annotationType() { return Subreport.class; } @Override public String[] reportName() { return subreportGetter.reportName(); } @Override public float position() { return subreportGetter.position(); } @Override public String id() { return subreportGetter.id(); } }; } public static Subreport convert(final SubreportSetter subreportGetter) { return new Subreport() { @Override public Class<? extends Annotation> annotationType() { return Subreport.class; } @Override public String[] reportName() { return subreportGetter.reportName(); } @Override public float position() { return subreportGetter.position(); } @Override public String id() { return subreportGetter.id(); } }; } public static Column convert(final ColumnGetter columnGetter) { return new Column() { @Override public Class<? extends Annotation> annotationType() { return Column.class; } @Override public String[] reportName() { return columnGetter.reportName(); } @Override public float position() { return columnGetter.position(); } @Override public String target() { return columnGetter.target(); } @Override public CellValidator[] cellValidators() { return new CellValidator[0]; } @Override public ColumnValidator[] columnValidators() { return new ColumnValidator[0]; } @Override public Converter getterConverter() { return columnGetter.typeConverter(); } @Override public Converter setterConverter() { return notImplementedConveter; } @Override public String title() { return columnGetter.title(); } @Override public String format() { return columnGetter.format(); } @Override public ValueType valueType() { return columnGetter.valueType(); } @Override public String id() { return columnGetter.id(); } @Override public boolean autoSizeColumn() { return columnGetter.autoSizeColumn(); } @Override public int columnWidth() { return columnGetter.columnWidth(); } @Override public boolean translate() { return columnGetter.translate(); } }; } public static Cell convert(final CellGetter cellGetter) { return new Cell() { @Override public Class<? extends Annotation> annotationType() { return Cell.class; } @Override public String[] reportName() { return cellGetter.reportName(); } @Override public String target() { return cellGetter.target(); } @Override public int row() { return cellGetter.row(); } @Override public int column() { return cellGetter.column(); } @Override public CellValidator[] cellValidators() { return cellGetter.cellValidators(); } @Override public Converter getterConverter() { return cellGetter.getterConverter(); } @Override public Converter setterConverter() { return notImplementedConveter; } @Override public String format() { return cellGetter.format(); } @Override public ValueType valueType() { return cellGetter.valueType(); } @Override public String id() { return cellGetter.id(); } @Override public boolean autoSizeColumn() { return cellGetter.autoSizeColumn(); } @Override public int columnWidth() { return cellGetter.columnWidth(); } @Override public boolean translate() { return cellGetter.translate(); } }; } public static Column convert(final ColumnSetter columnSetter) { return new Column() { @Override public Class<? extends Annotation> annotationType() { return Column.class; } @Override public String[] reportName() { return columnSetter.reportName(); } @Override public float position() { return columnSetter.position(); } @Override public String target() { return columnSetter.target(); } @Override public CellValidator[] cellValidators() { return columnSetter.cellValidators(); } @Override public ColumnValidator[] columnValidators() { return columnSetter.columnValidators(); } @Override public Converter getterConverter() { return notImplementedConveter; } @Override public Converter setterConverter() { return columnSetter.typeConverter(); } @Override public String title() { return columnSetter.title(); } @Override public String format() { return columnSetter.format(); } @Override public ValueType valueType() { return columnSetter.valueType(); } @Override public String id() { return columnSetter.id(); } @Override public boolean autoSizeColumn() { return columnSetter.autoSizeColumn(); } @Override public int columnWidth() { return columnSetter.columnWidth(); } @Override public boolean translate() { return false; } }; } }
muthukumaravel7/armnn
Documentation/class_test_elementwise_unary_layer_visitor.js
var class_test_elementwise_unary_layer_visitor = [ [ "TestElementwiseUnaryLayerVisitor", "class_test_elementwise_unary_layer_visitor.xhtml#ac8d328be621ddc72611dea42879027bf", null ], [ "VisitElementwiseUnaryLayer", "class_test_elementwise_unary_layer_visitor.xhtml#ab0de6425150b30ba857eae132efe5f58", null ] ];
adam-rocska/closure-templates-php
java/tests/com/google/template/soy/jssrc/internal/CanInitOutputVarVisitorTest.java
/* * Copyright 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.jssrc.internal; import static com.google.common.truth.Truth.assertThat; import com.google.template.soy.SoyFileSetParserBuilder; import com.google.template.soy.error.ErrorReporter; import com.google.template.soy.error.ExplodingErrorReporter; import com.google.template.soy.shared.SharedTestUtils; import com.google.template.soy.soytree.SoyFileSetNode; import com.google.template.soy.soytree.SoyNode; import junit.framework.TestCase; /** * Unit tests for CanInitOutputVarVisitor. * */ public class CanInitOutputVarVisitorTest extends TestCase { public void testSameValueAsIsComputableAsJsExprsVisitor() { runTestHelper("Blah blah.", true); runTestHelper("{msg desc=\"\"}Blah{/msg}", true, 0); // GoogMsgDefNode runTestHelper("{msg desc=\"\"}Blah{/msg}", true, 1); // GoogMsgRefNode runTestHelper("{msg desc=\"\"}<a href=\"{$url}\">Click here</a>{/msg}", true, 0, 0, 0); // MsgHtmlTagNode runTestHelper("{msg desc=\"\"}<a href=\"{$url}\">Click here</a>{/msg}", true, 0, 0, 2); // MsgHtmlTagNode runTestHelper("{msg desc=\"\"}<span id=\"{for $i in range(3)}{$i}{/for}\">{/msg}", true, 0, 0, 0); // MsgHtmlTagNode runTestHelper("{$boo.foo}", true); runTestHelper("{xid selected-option}", true); runTestHelper("{css selected-option}", true); runTestHelper("{switch $boo}{case 0}Blah{case 1}Bleh{default}Bluh{/switch}", true); runTestHelper("{foreach $boo in $booze}{$boo}{/foreach}", true); runTestHelper("{for $i in range(4)}{$i + 1}{/for}", true); runTestHelper("{if $boo}Blah{elseif $foo}Bleh{else}Bluh{/if}", true); runTestHelper("{if $goo}{foreach $moo in $moose}{$moo}{/foreach}{/if}", true); runTestHelper("{call .foo data=\"all\" /}", true); runTestHelper("{call .foo data=\"$boo\"}{param goo : $moo /}{/call}", true); runTestHelper("{call .foo data=\"$boo\"}{param goo}Blah{/param}{/call}", true); } public void testNotSameValueAsIsComputableAsJsExprsVisitor() { runTestHelper("{call .foo data=\"$boo\"}" + "{param goo}{foreach $moo in $moose}{$moo}{/foreach}{/param}" + "{/call}", false); } private static void runTestHelper( String soyNodeCode, boolean isSameValueAsIsComputableAsJsExprsVisitor) { runTestHelper(soyNodeCode, isSameValueAsIsComputableAsJsExprsVisitor, 0); } /** * @param indicesToNode Series of indices for walking down to the node we want to test. */ private static void runTestHelper( String soyCode, boolean isSameValueAsIsComputableAsJsExprsVisitor, int... indicesToNode) { ErrorReporter boom = ExplodingErrorReporter.get(); SoyFileSetNode soyTree = SoyFileSetParserBuilder.forTemplateContents(soyCode) .errorReporter(boom) .parse(); new ReplaceMsgsWithGoogMsgsVisitor(boom).exec(soyTree); SoyNode node = SharedTestUtils.getNode(soyTree, indicesToNode); IsComputableAsJsExprsVisitor icajev = new IsComputableAsJsExprsVisitor(boom); CanInitOutputVarVisitor ciovv = new CanInitOutputVarVisitor(icajev, boom); assertThat(ciovv.exec(node) == icajev.exec(node)) .isEqualTo(isSameValueAsIsComputableAsJsExprsVisitor); } }
Roenbaeck/sisula
sisulets/source/Variables.js
<reponame>Roenbaeck/sisula // local copies with additions and overrides (source level) source.VARIABLES = copyVariables(VARIABLES); if(source.variables && source.variables.length > 0) { var name, value; for(var v = 0; v < source.variables.length; v++) { name = source.variables[v]; value = source.variable[name].value; source.VARIABLES[name] = value; } } // local copies with additions and overrides (part level) var part, j = 0; while(part = source.part[source.parts[j++]]) { part.VARIABLES = copyVariables(source.VARIABLES); if(part.variables && part.variables.length > 0) { var name, value; for(var v = 0; v < part.variables.length; v++) { name = part.variables[v]; value = part.variable[name].value; part.VARIABLES[name] = value; } } } // do the actual replacement replaceVariables(VARIABLES, source); // global var MAXLEN = 2147483647; var S_SCHEMA = VARIABLES.SourceSchema ? VARIABLES.SourceSchema : 'dbo'; var T_SCHEMA = VARIABLES.TargetSchema ? VARIABLES.TargetSchema : 'dbo';
p4r4n01d/external_jbirdvegas_mGerrit
src/com/jbirdvegas/mgerrit/database/Changes.java
<reponame>p4r4n01d/external_jbirdvegas_mGerrit<gh_stars>1-10 package com.jbirdvegas.mgerrit.database; /* * Copyright (C) 2013 Android Open Kang Project (AOKP) * Author: <NAME> (P4R4N01D), 2013 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.content.Context; import android.content.UriMatcher; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.net.Uri; import android.util.Pair; import com.jbirdvegas.mgerrit.helpers.DBParams; import com.jbirdvegas.mgerrit.objects.JSONCommit; public class Changes extends DatabaseTable { // Table name public static final String TABLE = "Changes"; // --- Columns --- // The Change-Id of the change. public static final String C_CHANGE_ID = "change_id"; // The legacy numeric ID of the change (used in the web address) public static final String C_COMMIT_NUMBER = "_change_number"; //The subject of the change (header line of the commit message). public static final String C_SUBJECT = "subject"; //The status of the change (NEW, SUBMITTED, MERGED, ABANDONED, DRAFT). public static final String C_STATUS = "status"; // The name of the project (References Project table) public static final String C_PROJECT = "project"; // The owner of the change (References User table) public static final String C_OWNER = "owner"; /* The timestamp of when the change was created. * Store as ISO8601 string ("YYYY-MM-DD HH:MM:SS.SSS"). */ public static final String C_CREATED = "time_created"; /* The timestamp of when the change was last updated. * Store as ISO8601 string ("YYYY-MM-DD HH:MM:SS.SSS"). */ public static final String C_UPDATED = "time_modified"; // The topic to which this change belongs. public static final String C_TOPIC = "topic"; // The name of the target branch. The refs/heads/ prefix is omitted. public static final String C_BRANCH = "branch"; public static final String[] PRIMARY_KEY = { C_CHANGE_ID }; public static final int ITEM_LIST = UriType.ChangesList.ordinal(); public static final int ITEM_ID = UriType.ChangesID.ordinal(); public static final Uri CONTENT_URI = Uri.parse(DatabaseFactory.BASE_URI + TABLE); public static final String CONTENT_TYPE = DatabaseFactory.BASE_MIME_LIST + TABLE; public static final String CONTENT_ITEM_TYPE = DatabaseFactory.BASE_MIME_ITEM + TABLE; // Sort by condition for querying results. public static final String SORT_BY = C_UPDATED + " DESC"; private static Changes mInstance = null; public static Changes getInstance() { if (mInstance == null) mInstance = new Changes(); return mInstance; } @Override public void create(String TAG, SQLiteDatabase db) { // Specify a conflict algorithm here so we don't have to worry about it later db.execSQL("create table " + TABLE + " (" + C_CHANGE_ID + " text PRIMARY KEY ON CONFLICT REPLACE, " + C_SUBJECT + " text NOT NULL, " + C_CREATED + " INTEGER NOT NULL, " + C_UPDATED + " INTEGER NOT NULL ," + C_OWNER + " INTEGER NOT NULL, " + C_PROJECT + " text NOT NULL, " + C_STATUS + " text DEFAULT '" + JSONCommit.KEY_STATUS_OPEN + "' NOT NULL, " + C_TOPIC + " text, " + C_BRANCH + " text, " + C_COMMIT_NUMBER + " INTEGER NOT NULL, " + "FOREIGN KEY (" + C_OWNER + ") REFERENCES " + Users.TABLE + "(" + Users.C_ACCOUNT_ID + "), " + "FOREIGN KEY (" + C_PROJECT + ") REFERENCES " + ProjectsTable.TABLE + "(" + ProjectsTable.C_PATH + "))"); } @SuppressWarnings("unused") public static void addURIMatches(UriMatcher _urim) { _urim.addURI(DatabaseFactory.AUTHORITY, TABLE, ITEM_LIST); _urim.addURI(DatabaseFactory.AUTHORITY, TABLE + "/#", ITEM_ID); } public static String getChangeStatus(Context context, String changeID) { Uri uri = DBParams.fetchOneRow(CONTENT_URI); String status = null; Cursor c = context.getContentResolver().query(uri, new String[] { C_STATUS }, C_CHANGE_ID + " = ?", new String[] { changeID }, null); if (c.moveToFirst()) status = c.getString(0); c.close(); return status; } public static Pair<String, Integer> getMostRecentChange(Context context, String status) { Uri uri = DBParams.fetchOneRow(CONTENT_URI); Pair<String, Integer> pair = null; status = JSONCommit.Status.getStatusString(status); Cursor c = context.getContentResolver().query(uri, new String[] { C_CHANGE_ID, C_COMMIT_NUMBER }, C_STATUS + " = ?", new String[] { status }, SORT_BY); if (c.moveToFirst()) { pair = new Pair<>(c.getString(0), c.getInt(1)); } c.close(); return pair; } public static String getChangeUpdatedTime(Context context, String status, boolean newest) { Uri uri = DBParams.fetchOneRow(CONTENT_URI); String updated = null; status = JSONCommit.Status.getStatusString(status); String sort; if (newest) sort = SORT_BY; else sort = C_UPDATED + " ASC"; Cursor c = context.getContentResolver().query(uri, new String[] { C_UPDATED }, C_STATUS + " = ?", new String[] { status }, sort); if (c.moveToFirst()) updated = c.getString(0); if (updated != null && !updated.isEmpty()) { /* From the SQLite documentation, a time string will have only one space, which can * be replaced with a 'T' to confirm to the ISO-8601 standard */ updated = updated.replace(' ', 'T'); } c.close(); return updated; } public static String getNewestUpdatedTime(Context context, String status) { return getChangeUpdatedTime(context, status, true); } public static String getOldestUpdatedTime(Context context, String status) { return getChangeUpdatedTime(context, status, false); } public static Integer getChangeNumberForChange(Context context, String changeID) { Uri uri = DBParams.fetchOneRow(CONTENT_URI); Integer changeNo = null; Cursor c = context.getContentResolver().query(uri, new String[] { C_COMMIT_NUMBER }, C_CHANGE_ID + " = ?", new String[] { changeID }, null); if (c.moveToFirst()) changeNo = c.getInt(0); c.close(); return changeNo; } }
Floritte/Game-Engine-Samples
smolengine/include/Core/Core.h
#pragma once #include "Memory.h" #include "Debug/DebugLog.h" namespace std { template<class F, class...Args> auto bind_front(F&& f, Args&&...args) { return[f = std::forward<F>(f), tup = std::make_tuple(std::forward<Args>(args)...)](auto&&... more_args) ->decltype(auto) { return std::apply([&](auto&&...args)->decltype(auto) { return std::invoke(f, decltype(args)(args)..., decltype(more_args)(more_args)...); }, tup); }; } }
dbiir/pard
pard-main/src/test/java/cn/edu/ruc/iir/pard/TestQueryHandler.java
package cn.edu.ruc.iir.pard; import cn.edu.ruc.iir.pard.executor.connector.PardResultSet; import cn.edu.ruc.iir.pard.scheduler.JobScheduler; import cn.edu.ruc.iir.pard.scheduler.TaskScheduler; import cn.edu.ruc.iir.pard.server.PardQueryHandler; import org.testng.annotations.Test; /** * pard * * @author guodong */ public class TestQueryHandler { @Test public void executeQuery() { JobScheduler scheduler = JobScheduler.INSTANCE(); TaskScheduler taskScheduler = TaskScheduler.INSTANCE(); PardQueryHandler handler = new PardQueryHandler(null, scheduler, taskScheduler); String sql = "delete from booktest.customer where 1=1"; PardResultSet resultSet = handler.executeQuery(sql); System.out.println(resultSet); } }
deleidos/digitaledge-platform
parser-example-archetype/src/main/resources/archetype-resources/src/main/java/ExampleParser.java
package ${package}; import java.text.ParseException; import java.util.HashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.sf.json.JSONObject; import org.apache.log4j.Logger; import com.deleidos.rtws.core.framework.Description; import com.deleidos.rtws.core.framework.UserConfigured; import com.deleidos.rtws.core.framework.parser.AbstractLineParser; /** * A example parser for demonstration purposes */ @Description("A description of what this parser can do goes here.") public class ExampleParser extends AbstractLineParser { private static final Logger log = Logger.getLogger(ExampleParser.class); protected String headerKeyFilename; protected String headerKeys[]; protected String defaultSource; protected String defaultAccessLabel; protected char delimiter = ','; protected boolean stopOnMissingField = true; /** * No-Arg Constructor. */ public ExampleParser() { super("UTF-8"); } /** * Set header key file that describes the layout of the fields separated by * commas. */ public void setHeaderKeyFile(String filename) { this.headerKeyFilename = filename; } /** * @return the header key file */ public String getHeaderKeyFile() { return this.headerKeyFilename; } /** * @return the header keys */ public String[] getHeaderKeys() { return this.headerKeys; } /** * Set the delimiter for the data (does not affect header key file) */ @UserConfigured(value = ",", description = "The delimiter separating data fields.", flexValidator = { "RegExpValidator expression=^.$ noMatchError=\"Please provide a single character\"" }) public void setDelimiter(String value) { delimiter = value.charAt(0); } /** * @return the delimiter */ public String getDelimiter() { return Character.toString(this.delimiter); } /** * Set the default source string of the data. Can be overridden by the input * stream parameters. */ @UserConfigured(value = "UNKNOWN", description = "The string describing the source of the data.") public void setDefaultSource(String defaultSource) { this.defaultSource = defaultSource; } /** * @return the default source */ public String getDefaultSource() { return this.defaultSource; } /** * Set the default access label. Can be overridden by the input stream * parameters or the translator. */ @UserConfigured(value = "UNCLASSIFIED", description = "The default access label to include with the data.") public void setDefaultAccessLabel(String defaultAccessLabel) { this.defaultAccessLabel = defaultAccessLabel; } /** * @return the default access label */ public String getDefaultAccessLabel() { return this.defaultAccessLabel; } /** * Set the stop-on-missing field. */ @UserConfigured(value = "true", description = "Specifies whether or not to stop processing a record when fields are missing.", flexValidator = { "RegExpValidator expression=^(true|false)$" }) public void setStopOnMissingField(boolean stopOnMissingField) { this.stopOnMissingField = stopOnMissingField; } /** * @return the stop-on-missing field */ public boolean getStopOnMissingField() { return this.stopOnMissingField; } /** * Perform disposal work on the parser. */ public void dispose() { } /** * Parse and return the next JSON record, or null if all done or error. */ public JSONObject parse() { try { JSONObject json = new JSONObject(); String input = nextRecord(); if (input == null) { throw new ParseException("Input data is missing.", -1); } String streamAccessLabel = null ; //info.getProperty(StandardHeader.ACCESS_LABEL_KEY); String accessLabel = (streamAccessLabel == null) ? defaultAccessLabel : streamAccessLabel; String streamSource = null; // info.getProperty(StandardHeader.SOURCE_KEY); String source = (streamSource == null) ? defaultSource : streamSource; HashMap<String, String> map = new HashMap<String, String>(); Matcher m; /* * Perform custom parsing work here and store results in json doc.... */ return json; } catch (ParseException e) { log.error("Error parsing record in ExampleParser: ", e); return null; } } public void parseHeaders() { } }
embeddery/stackrox
central/processbaseline/search/searcher_impl_test.go
package search import ( "context" "errors" "testing" "github.com/golang/mock/gomock" mockIndex "github.com/stackrox/rox/central/processbaseline/index/mocks" mockStore "github.com/stackrox/rox/central/processbaseline/store/mocks" "github.com/stackrox/rox/central/role/resources" "github.com/stackrox/rox/generated/storage" "github.com/stackrox/rox/pkg/fixtures" "github.com/stackrox/rox/pkg/sac" "github.com/stackrox/rox/pkg/search" "github.com/stackrox/rox/pkg/testutils" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) func TestProcessBaselineSearch(t *testing.T) { suite.Run(t, new(ProcessBaselineSearchTestSuite)) } func getFakeSearchResults(num int) ([]search.Result, []*storage.ProcessBaseline) { var dbResults []*storage.ProcessBaseline var indexResults []search.Result for i := 0; i < num; i++ { baseline := fixtures.GetProcessBaselineWithID() dbResults = append(dbResults, baseline) fakeResult := search.Result{ID: baseline.Id} indexResults = append(indexResults, fakeResult) } return indexResults, dbResults } type ProcessBaselineSearchTestSuite struct { suite.Suite controller *gomock.Controller indexer *mockIndex.MockIndexer store *mockStore.MockStore searcher Searcher allowAllCtx context.Context } func (suite *ProcessBaselineSearchTestSuite) SetupTest() { suite.allowAllCtx = sac.WithGlobalAccessScopeChecker(context.Background(), sac.AllowFixedScopes( sac.AccessModeScopeKeys(storage.Access_READ_ACCESS, storage.Access_READ_WRITE_ACCESS), sac.ResourceScopeKeys(resources.ProcessWhitelist), )) suite.controller = gomock.NewController(suite.T()) suite.indexer = mockIndex.NewMockIndexer(suite.controller) suite.store = mockStore.NewMockStore(suite.controller) suite.store.EXPECT().Walk(gomock.Any(), gomock.Any()).Return(nil) searcher, err := New(suite.store, suite.indexer) suite.NoError(err) suite.searcher = searcher } func (suite *ProcessBaselineSearchTestSuite) TearDownTest() { suite.controller.Finish() } func (suite *ProcessBaselineSearchTestSuite) TestErrors() { q := search.EmptyQuery() someError := errors.New("this is a test error") suite.indexer.EXPECT().Search(q).Return(nil, someError) results, err := suite.searcher.SearchRawProcessBaselines(suite.allowAllCtx, q) suite.Equal(someError, err) suite.Nil(results) indexResults, _ := getFakeSearchResults(1) suite.indexer.EXPECT().Search(q).Return(indexResults, nil) suite.store.EXPECT().GetMany(suite.allowAllCtx, search.ResultsToIDs(indexResults)).Return(nil, nil, someError) results, err = suite.searcher.SearchRawProcessBaselines(suite.allowAllCtx, q) suite.Error(err) suite.Nil(results) } func (suite *ProcessBaselineSearchTestSuite) TestSearchForAll() { q := search.EmptyQuery() var emptyList []search.Result suite.indexer.EXPECT().Search(q).Return(emptyList, nil) // It's an implementation detail whether this method is called, so allow but don't require it. suite.store.EXPECT().GetMany(suite.allowAllCtx, testutils.AssertionMatcher(assert.Empty)).MinTimes(0).MaxTimes(1) results, err := suite.searcher.SearchRawProcessBaselines(suite.allowAllCtx, q) suite.NoError(err) suite.Empty(results) indexResults, dbResults := getFakeSearchResults(3) suite.indexer.EXPECT().Search(q).Return(indexResults, nil) suite.store.EXPECT().GetMany(suite.allowAllCtx, search.ResultsToIDs(indexResults)).Return(dbResults, nil, nil) results, err = suite.searcher.SearchRawProcessBaselines(suite.allowAllCtx, q) suite.NoError(err) suite.Equal(dbResults, results) }
wahello/openshift-installer
terraform/alicloud/vendor/github.com/aliyun/terraform-provider-alicloud/alicloud/resource_alicloud_msc_sub_webhook.go
<filename>terraform/alicloud/vendor/github.com/aliyun/terraform-provider-alicloud/alicloud/resource_alicloud_msc_sub_webhook.go package alicloud import ( "fmt" "log" "regexp" "time" "github.com/hashicorp/terraform-plugin-sdk/helper/validation" util "github.com/alibabacloud-go/tea-utils/service" "github.com/aliyun/terraform-provider-alicloud/alicloud/connectivity" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/helper/schema" ) func resourceAlicloudMscSubWebhook() *schema.Resource { return &schema.Resource{ Create: resourceAlicloudMscSubWebhookCreate, Read: resourceAlicloudMscSubWebhookRead, Update: resourceAlicloudMscSubWebhookUpdate, Delete: resourceAlicloudMscSubWebhookDelete, Importer: &schema.ResourceImporter{ State: schema.ImportStatePassthrough, }, Schema: map[string]*schema.Schema{ "server_url": { Type: schema.TypeString, Required: true, ValidateFunc: validation.StringMatch(regexp.MustCompile(`^https://oapi.dingtalk.com/robot/send\?access_token=[a-zA-Z0-9]+$`), "The serverUrl of the Webhook."), }, "webhook_name": { Type: schema.TypeString, Required: true, ValidateFunc: validation.StringMatch(regexp.MustCompile(`^[a-zA-Z]{2,12}$`), "The name must be 2 to 12 characters in length, and can contain uppercase and lowercase letters."), }, }, } } func resourceAlicloudMscSubWebhookCreate(d *schema.ResourceData, meta interface{}) error { client := meta.(*connectivity.AliyunClient) var response map[string]interface{} action := "CreateWebhook" request := make(map[string]interface{}) conn, err := client.NewMscopensubscriptionClient() if err != nil { return WrapError(err) } request["Locale"] = "en" request["ServerUrl"] = d.Get("server_url") request["WebhookName"] = d.Get("webhook_name") request["ClientToken"] = buildClientToken("<PASSWORD>") runtime := util.RuntimeOptions{} runtime.SetAutoretry(true) wait := incrementalWait(3*time.Second, 3*time.Second) err = resource.Retry(d.Timeout(schema.TimeoutCreate), func() *resource.RetryError { response, err = conn.DoRequest(StringPointer(action), nil, StringPointer("POST"), StringPointer("2021-07-13"), StringPointer("AK"), nil, request, &runtime) if err != nil { if NeedRetry(err) { wait() return resource.RetryableError(err) } return resource.NonRetryableError(err) } return nil }) addDebug(action, response, request) if err != nil { return WrapErrorf(err, DefaultErrorMsg, "alicloud_msc_sub_webhook", action, AlibabaCloudSdkGoERROR) } if fmt.Sprint(response["Code"]) != "200" { return WrapError(fmt.Errorf("%s failed, response: %v", action, response)) } d.SetId(fmt.Sprint(response["WebhookId"])) return resourceAlicloudMscSubWebhookRead(d, meta) } func resourceAlicloudMscSubWebhookRead(d *schema.ResourceData, meta interface{}) error { client := meta.(*connectivity.AliyunClient) mscOpenSubscriptionService := MscOpenSubscriptionService{client} object, err := mscOpenSubscriptionService.DescribeMscSubWebhook(d.Id()) if err != nil { if NotFoundError(err) { log.Printf("[DEBUG] Resource alicloud_msc_sub_webhook mscOpenSubscriptionService.DescribeMscSubWebhook Failed!!! %s", err) d.SetId("") return nil } return WrapError(err) } d.Set("server_url", object["ServerUrl"]) d.Set("webhook_name", object["WebhookName"]) return nil } func resourceAlicloudMscSubWebhookUpdate(d *schema.ResourceData, meta interface{}) error { client := meta.(*connectivity.AliyunClient) var response map[string]interface{} update := false request := map[string]interface{}{ "WebhookId": d.Id(), } if d.HasChange("server_url") { update = true } request["ServerUrl"] = d.Get("server_url") if d.HasChange("webhook_name") { update = true } request["WebhookName"] = d.Get("webhook_name") if update { request["Locale"] = "en" action := "UpdateWebhook" conn, err := client.NewMscopensubscriptionClient() if err != nil { return WrapError(err) } request["ClientToken"] = buildClientToken("<PASSWORD>Webhook") runtime := util.RuntimeOptions{} runtime.SetAutoretry(true) wait := incrementalWait(3*time.Second, 3*time.Second) err = resource.Retry(d.Timeout(schema.TimeoutUpdate), func() *resource.RetryError { response, err = conn.DoRequest(StringPointer(action), nil, StringPointer("POST"), StringPointer("2021-07-13"), StringPointer("AK"), nil, request, &runtime) if err != nil { if NeedRetry(err) { wait() return resource.RetryableError(err) } return resource.NonRetryableError(err) } return nil }) addDebug(action, response, request) if err != nil { return WrapErrorf(err, DefaultErrorMsg, d.Id(), action, AlibabaCloudSdkGoERROR) } if fmt.Sprint(response["Success"]) == "false" { return WrapError(fmt.Errorf("%s failed, response: %v", action, response)) } } return resourceAlicloudMscSubWebhookRead(d, meta) } func resourceAlicloudMscSubWebhookDelete(d *schema.ResourceData, meta interface{}) error { client := meta.(*connectivity.AliyunClient) action := "DeleteWebhook" var response map[string]interface{} conn, err := client.NewMscopensubscriptionClient() if err != nil { return WrapError(err) } request := map[string]interface{}{ "WebhookId": d.Id(), "Locale": "en", } wait := incrementalWait(3*time.Second, 3*time.Second) err = resource.Retry(d.Timeout(schema.TimeoutDelete), func() *resource.RetryError { response, err = conn.DoRequest(StringPointer(action), nil, StringPointer("POST"), StringPointer("2021-07-13"), StringPointer("AK"), nil, request, &util.RuntimeOptions{}) if err != nil { if NeedRetry(err) { wait() return resource.RetryableError(err) } return resource.NonRetryableError(err) } return nil }) addDebug(action, response, request) if err != nil { if IsExpectedErrors(err, []string{"ResourceNotFound"}) { return nil } return WrapErrorf(err, DefaultErrorMsg, d.Id(), action, AlibabaCloudSdkGoERROR) } if fmt.Sprint(response["Success"]) == "false" { return WrapError(fmt.Errorf("%s failed, response: %v", action, response)) } return nil }
milesgray/ImageFunctions
models/layers/activations/methods.py
<filename>models/layers/activations/methods.py import torch import torch.nn as nn import torch.nn.functional as F from torch import Tensor @torch.jit.script def mish(x): """ Applies the mish function element-wise: mish(x) = x * tanh(softplus(x)) = x * tanh(ln(1 + exp(x))) See additional documentation for mish class. """ return x * torch.tanh(F.softplus(x)) @torch.jit.script def logcosh(x): return torch.cosh(x + 1e-12).log() @torch.jit.script def xtanh(x): return torch.tanh(x) * x @torch.jit.script def xsigmoid(x): y = 1 + torch.exp(-x) y = torch.abs(y - x) z = 2 * y / x return z @torch.jit.script def centeredsigmoid(x): y = torch.sigmoid(x) y = y * 2 y = y - 1 return y @torch.jit.script def unitcenteredsigmoid(x): y = torch.sigmoid(x) y = y * 2 return y @torch.jit.script def unitcenteredtanh(x): return torch.tanh(x) + 1 @torch.jit.script def normalizer(x): mean = (x - x.mean(dim=1, keepdim=True)) std = x.std(dim=1, keepdim=True) return mean / std @torch.jit.script def gaussian(x, a): return torch.exp(-x**2/(2*a**2))
perlausten/org.openntf.domino
domino/core/src/main/java/org/openntf/domino/logging/LogRecordAdditionalInfo.java
<filename>domino/core/src/main/java/org/openntf/domino/logging/LogRecordAdditionalInfo.java /** * Copyright © 2013-2021 The OpenNTF Domino API Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openntf.domino.logging; import java.util.List; import java.util.logging.LogRecord; import org.openntf.domino.ExceptionDetails; import org.openntf.domino.WrapperFactory; import org.openntf.domino.exceptions.OpenNTFNotesException; import org.openntf.domino.utils.Factory; /** * Used when there is a throwable attached to the LogRecord being published which is an instance of a * {@link org.openntf.domino.exceptions.OpenNTFNotesException}. Such exceptions contain details about the environment (Notes user name, * current database and so on). */ @SuppressWarnings("nls") public class LogRecordAdditionalInfo { private List<ExceptionDetails.Entry> exceptionDetails; private String[] lastWrappedDocs; /** * Initializes instance of this class. * * @param logRecord * LogRecord containing a <code>Throwable</code> which is an instance of * {@link org.openntf.domino.exceptions.OpenNTFNotesException} */ public LogRecordAdditionalInfo(final LogRecord logRecord) { Throwable t = logRecord.getThrown(); if (t != null && t instanceof OpenNTFNotesException) { exceptionDetails = ((OpenNTFNotesException) t).getExceptionDetails(); } WrapperFactory wf = Factory.getWrapperFactory_unchecked(); if (wf != null) { lastWrappedDocs = wf.getLastWrappedDocsInThread(); } } /** * @return All exception details provided by the OpenNTFNotesException attached to the LogRecord used to initialize this instance. */ public List<ExceptionDetails.Entry> getExceptionDetails() { return exceptionDetails; } public String[] getLastWrappedDocs() { return lastWrappedDocs; } /** * Appends further exception details to the given <code>StringBuffer</code>. Used by the {@link LogFormatterConsoleDefault} and * {@link LogFormatterFileDefault}. * * @param sb * StringBuffer to which to append all exception details and last wrapped documents. */ public void writeToLog(final StringBuffer sb) { if (exceptionDetails != null) { sb.append(" Details where exception was thrown:\n"); for (ExceptionDetails.Entry exEntry : exceptionDetails) { sb.append(" " + exEntry.toString() + "\n"); } } if (lastWrappedDocs != null) { sb.append(" Last wrapped docs in thread:\n"); for (String lastWrappedDoc : lastWrappedDocs) { sb.append(" " + lastWrappedDoc + "\n"); } } } }
LesserGiraffe/BunnyHop
src/main/java/net/seapanda/bunnyhop/view/nodeselection/BhNodeSelectionService.java
/** * Copyright 2017 K.Koike * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.seapanda.bunnyhop.view.nodeselection; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Optional; import javafx.application.Platform; import net.seapanda.bunnyhop.control.nodeselection.BhNodeSelectionController; import net.seapanda.bunnyhop.model.node.BhNode; import net.seapanda.bunnyhop.model.workspace.Workspace; import net.seapanda.bunnyhop.modelservice.BhNodeHandler; import net.seapanda.bunnyhop.undo.UserOperationCommand; /** * ノード選択ビューの操作を公開するクラス * @author K.Koike */ public class BhNodeSelectionService { public static final BhNodeSelectionService INSTANCE = new BhNodeSelectionService(); private BhNodeSelectionService() {} private final Map<String, BhNodeSelectionView> categoryNameToSelectionView = new HashMap<>(); private final Map<String, Workspace> categoryNameToWorkspace = new HashMap<>(); /** * ノード選択ビューを登録する. * @param view 登録するビュー. */ public void registerView(BhNodeSelectionView view) { String categoryName = view.getCategoryName(); categoryNameToSelectionView.put(categoryName, view); var model = new Workspace(categoryName); var controller = new BhNodeSelectionController(model, view); model.setMsgProcessor(controller); categoryNameToWorkspace.put(categoryName, model); } /** * 引数で指定したカテゴリにテンプレートノードを追加する. * * <p> {@code node} の MVC が構築されていること. * @param categoryName {@code node} を追加するカテゴリの名前 * @param node 追加するノード * @param userOpeCmd undo 用コマンドオブジェクト */ public void addTemplateNode(String categoryName, BhNode node, UserOperationCommand userOpeCmd) { Workspace ws = categoryNameToWorkspace.get(categoryName); if (ws == null) return; BhNodeHandler.INSTANCE.addRootNode(ws, node, 0, 0, userOpeCmd); } /** * 引数で指定したカテゴリのノード選択ビューにある全てのノードを取得する. * * <p> 返されるリストのノードは, {@code addTemplateNode} で追加したノードである. * @param category このカテゴリのテンプレートノードを全て取得する * @return {@code categoryName} にある全てのテンプレートノード. 登録されていないカテゴリを指定した場合は空のリスト. */ public Collection<BhNode> getTemplateNodes(String categoryName) { Workspace ws = categoryNameToWorkspace.get(categoryName); if (ws == null) return new ArrayList<BhNode>(); return ws.getRootNodeList(); } /** * 引数で指定したカテゴリのノード選択ビューにある全てのノードを削除する. * @param categoryName このカテゴリのテンプレートノードを全て削除する * @param userOpeCmd undo 用コマンドオブジェクト */ public void deleteAllNodes(String categoryName, UserOperationCommand userOpeCmd) { Collection<BhNode> nodesToDelete = getTemplateNodes(categoryName); if (nodesToDelete.isEmpty()) return; BhNodeHandler.INSTANCE.deleteNodes(nodesToDelete, userOpeCmd); } /** * ノード選択ビューを全て拡大もしくは縮小する. * @param zoomIn 拡大する場合 true */ public void zoomAll(boolean zoomIn) { categoryNameToSelectionView.values().stream().forEach(view -> view.zoom(zoomIn)); } /** * 全てのノード選択ビューを隠す. */ public void hideAll() { categoryNameToSelectionView.values().stream().forEach(view -> view.setVisible(false)); } /** * 引数で指定したカテゴリのノード選択ビューを表示する. * @param 表示するノード選択ビューのカテゴリ名 */ public void show(String categoryName) { BhNodeSelectionView view = categoryNameToSelectionView.get(categoryName); if (view == null) return; hideAll(); view.setVisible(true); } /** * 現在表示されているカテゴリの名前を返す. * return 現在表示されているカテゴリの名前. 表示されているカテゴリがない場合は empty. */ public Optional<String> getNameOfShowedCategory() { return categoryNameToSelectionView .values().stream() .filter(view -> view.visibleProperty().get()) .findFirst() .map(view -> view.getCategoryName()); } /** * ノード選択ビューのうち表示されているものがあるかどうか調べる. * @return BhNode選択パネルのうち一つでも表示されている場合true */ public boolean isAnyShowed() { return categoryNameToSelectionView.values().stream().anyMatch(view -> view.visibleProperty().get()); } /** * 引数で指定したカテゴリのノード選択ビューが表示されているかどうかを調べる. * @param categoryName 表示状態を調べるノード選択ビューのカテゴリ名 * @return 表示されている場合 true. 批評の場合と {@code categoryName} に対応するビューが見つからなかった場合は false. */ public boolean isShowed(String categoryName) { BhNodeSelectionView view = categoryNameToSelectionView.get(categoryName); if (view == null) return false; return view.isVisible(); } }
fernando-romulo-silva/myStudies
java/quarkus/understanding-quarkus/understanding-quarkus-chapter06-http/understanding-quarkus-chapter06-part01-jaxrs-exposing/src/main/java/org/agoncal/fascicle/quarkus/http/jaxrs/ex07/CustomerResource.java
package org.agoncal.fascicle.quarkus.http.jaxrs.ex07; import org.agoncal.fascicle.quarkus.http.jaxrs.Customer; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; /** * @author <NAME> http://www.antoniogoncalves.org -- */ @Path("/customer") public class CustomerResource { @GET public String getAsPlainText() { return new Customer("John", "Smith").toString(); } @GET @Path("max") public Integer getMaximumAge() { return 42; } @GET @Produces(MediaType.APPLICATION_XML) public Customer getAsXML() { Customer customer = new Customer("John", "Smith"); return customer; } @GET @Produces(MediaType.APPLICATION_JSON) public Response getAsJson() { Customer customer = new Customer("John", "Smith"); return Response.ok(customer).encoding("utf-8").build(); } }
RetroFloppy/pfcviewer
src/pfc/cab/IntUtil.java
<reponame>RetroFloppy/pfcviewer /* * Copyright (c) 2002 <NAME>. All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ package pfc.cab; /** * Collection of static utility methods for working with int and short * variables. * @author <NAME> */ public class IntUtil { /** * Reverses byte order in 4-byte int for little-endian values. */ public static int reverseInt(int i) { int byte0 = i << 24; int byte1 = ((i >>> 8) << 24) >>> 8; int byte2 = ((i >>> 16) << 24) >>> 16; int byte3 = i >>> 24; return (byte0 | byte1 | byte2 | byte3); } /** * Reverses byte order in 2-byte short for little-endian values. */ public static short reverseShort(short i) { int hiByte = (i << 24) >> 16; int loByte = (i << 16) >>> 24; return (short)(hiByte | loByte); } /** * Constructs int given four bytes. Byte order is little-endian. */ public static int toInt(byte byte0, byte byte1, byte byte2, byte byte3) { int b0 = byte3 << 24; int b1 = (byte2 << 24) >>> 8; int b2 = (byte1 << 24) >>> 16; int b3 = (byte0 << 24) >>> 24; return (b0 | b1 | b2 | b3); } /** * Constructs short given two bytes. Byte order is little-endian. */ public static short toShort(byte byte0, byte byte1) { int hiByte = byte1 << 8; int loByte = (byte0 << 24) >>> 24; return (short)(hiByte | loByte); } /** * Converts int value to string. Result is padded with leading * zeros to fill out length. */ public static String padIntString(int value, int length) { String valueText = Integer.toString(value); StringBuffer result = new StringBuffer(valueText); for (int i = 0; i < (length - valueText.length()); i++) { result.insert(0, " "); } return result.toString(); } /** * Converts int value to hex string. Result is padded with leading * zeros to fill out digits. */ public static String toHexString(int i, int digits) { String hexString = Integer.toHexString(i); int len = hexString.length(); StringBuffer result = new StringBuffer(); for (int j = 0; j < digits; j++) { if (len < (digits - j)) result.append("0"); else result.append(hexString.charAt(len - digits + j)); } return result.toString(); } }
matheuslenke/FastFeet
frontend/src/pages/Problems/ProblemsItem/Actions/index.js
import React from 'react'; import { MdVisibility, MdDelete } from 'react-icons/md'; import { Container, ActionsList, Visualize, Delete } from './styles'; export default function Actions({ handleVisibleModal, problem, handleDelete }) { return ( <Container> <ActionsList> <Visualize onClick={handleVisibleModal}> <MdVisibility color="#8E5BE8" size={16} /> <span>Visualizar</span> </Visualize> <Delete onClick={() => handleDelete(problem.id)}> <MdDelete color="#DE3B3B" size={16} /> <span>Cancelar encomenda</span> </Delete> </ActionsList> </Container> ); }
inzamamulDU/log-anomaly-detection
deepgravewell/bgl/data_split.py
import os import random # window_size = 10 # future_step = 5 ratio = 0.8 with open('keys_with_time_8.txt', 'r') as f: all_lines = f.readlines() time_start = int(all_lines[0].strip().split()[-1]) time_end = int(all_lines[-1].strip().split()[-1]) time_interval = float(time_end - time_start) time_split = int(time_interval * ratio + float(time_start)) #W = [4,6,8,10] #S = [2,4,6,8,10,12] W = [4] S = [4] for w in W: window_size = w for s in S: future_step = s normal_train_split = [] normal_test_split = [] abnormal_test_split = [] for i in range(0, len(all_lines)-(window_size+future_step)): if future_step != 0 and all_lines[i].strip().split()[1] != '-': continue key_seq = '' normal = True for j in range(window_size+future_step): if future_step == 0: key_seq = key_seq + all_lines[i+j].strip().split()[2] + ' ' if all_lines[i+j].strip().split()[1] != '-': normal = False else: if j < window_size: if all_lines[i+j].strip().split()[1] != '-': break else: key_seq = key_seq + all_lines[i+j].strip().split()[2] + ' ' else: if all_lines[i+j].strip().split()[1] != '-': normal = False if len(key_seq.strip().split()) != window_size: continue if normal and int(all_lines[i].strip().split()[-1]) <= time_split: normal_train_split.append(key_seq) elif normal and int(all_lines[i].strip().split()[-1]) > time_split: normal_test_split.append(key_seq) else: abnormal_test_split.append(key_seq) n_train = len(normal_train_split) n_normal_test = len(normal_test_split) n_abnormal_test = len(abnormal_test_split) random_normal = normal_train_split + normal_test_split random.shuffle(random_normal) normal_train_split = random_normal[:n_train] normal_test_split = random_normal[n_train:] data_dir = 'window_' + str(window_size) + 'future_' + str(future_step) + 'remove_8' + '/' if not os.path.isdir(data_dir): os.makedirs(data_dir) normal_train_key = set() with open(data_dir+'normal_train.txt', 'w') as f: for i, item in enumerate(normal_train_split): if i % 1000 == 0: print('writing normal train '+str(100*i/len(normal_train_split)) + '%') f.write(item+'\n') for key in item.strip().split(): normal_train_key.add(key) normal_test_key = set() with open(data_dir+'normal_test.txt', 'w') as f: for i, item in enumerate(normal_test_split): if i % 1000 == 0: print('writing normal test '+str(100*i/len(normal_test_split)) + '%') f.write(item+'\n') for key in item.strip().split(): normal_test_key.add(key) abnormal_test_key = set() with open(data_dir+'abnormal_test.txt', 'w') as f: for i, item in enumerate(abnormal_test_split): if i % 1000 == 0: print('writing abnormal '+str(100*i/len(abnormal_test_split)) + '%') f.write(item+'\n') for key in item.strip().split(): abnormal_test_key.add(key) print('window_size: ', str(window_size), ' step: ', str(future_step)) print('normal_train_split_len:') print(len(normal_train_split)) print('normal_test_split_len:') print(len(normal_test_split)) print('abnormal_split_len:') print(len(abnormal_test_split)) # print('number of key in normal train:') # print(len(normal_train_key)) # print('number of key in normal test:') # print(len(normal_test_key)) # print('number of key in abnormal:') # print(len(abnormal_test_key)) # print(time_start) # print(time_end) # print(time_interval) # print(time_split)
xeddmc/Cardshifter
cardshifter-server/src/main/java/com/cardshifter/server/model/ConnectionHandler.java
package com.cardshifter.server.model; public interface ConnectionHandler { void start(); void shutdown() throws Exception; }
jgraichen/msgr
lib/msgr/channel.rb
# frozen_string_literal: true module Msgr class Channel include Logging EXCHANGE_NAME = 'msgr' attr_reader :config, :channel def initialize(config, connection) @config = config @channel = connection.create_channel end def prefetch(count) @channel.prefetch count end def exchange @exchange ||= begin @channel.topic(prefix(EXCHANGE_NAME), durable: true).tap do |ex| log(:debug) do "Created exchange #{ex.name} (type: #{ex.type}, " \ "durable: #{ex.durable?}, auto_delete: #{ex.auto_delete?})" end end end end def queue(name, **opts) @channel.queue(prefix(name), durable: true, **opts).tap do |queue| log(:debug) do "Create queue #{queue.name} (durable: #{queue.durable?}, " \ "auto_delete: #{queue.auto_delete?})" end end end def prefix(name) if config[:prefix].present? "#{config[:prefix]}-#{name}" else name end end def ack(delivery_tag) @channel.ack delivery_tag log(:debug) { "Acked message: #{delivery_tag}" } end def nack(delivery_tag) @channel.nack delivery_tag, false, true log(:debug) { "Nacked message: #{delivery_tag}" } end def close @channel.close if @channel.open? end end end
ethansaxenian/RosettaDecode
lang/C/literals-string-1.c
char ch = 'z';
heagoo/mkl-dnn
tests/benchdnn/softmax/softmax.hpp
<gh_stars>0 /******************************************************************************* * Copyright 2019 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ #ifndef _SOFTMAX_HPP #define _SOFTMAX_HPP #include "mkldnn.h" #include "common.hpp" #include "mkldnn_common.hpp" #include "mkldnn_memory.hpp" #include "perf_report.hpp" namespace softmax { using dims_t = std::vector<int64_t>; struct prb_t { prb_t(dims_t &dims, dir_t dir, mkldnn_data_type_t dt, mkldnn_format_tag_t tag, int axis, int64_t mb = 0) : dims(dims), dir(dir), dt(dt), tag(tag), axis(axis) { if (mb) this->dims[0] = mb; } ~prb_t() {} dims_t dims; dir_t dir; mkldnn_data_type_t dt; mkldnn_format_tag_t tag; int axis; }; dims_t str2dims(const char *str); void dims2str(const dims_t &dims, char *buffer); void prb2str(const prb_t *p, char *buffer, bool canonical = false); struct perf_report_t: public base_perf_report_t { perf_report_t(const char *perf_template) : base_perf_report_t(perf_template) {} virtual ~perf_report_t() {} void report(const prb_t *p, const res_t *r, const char *prb_str) { p_ = p; base_report(r, prb_str); } virtual void dump_axis(char *buf) const override { dprint(buf, p_->axis); } virtual void dump_data_type(char *buf) const override { dprint(buf, dt2str(p_->dt)); } virtual void dump_descriptor_csv(char *buf) const override { dims2str(p_->dims, buf); } virtual void dump_direction(char *buf) const override { dprint(buf, dir2str(p_->dir)); } virtual void dump_tag(char *buf) const override { dprint(buf, tag2str(p_->tag)); } private: const prb_t *p_; }; inline void map_off_to_mb_ic(const prb_t *p, int64_t off, int64_t &mb, int64_t &ic) { for (int i = (int)p->dims.size() - 1; i > 1; i--) off /= p->dims[i]; ic = off % p->dims[1]; off /= p->dims[1]; mb = off % p->dims[0]; off /= p->dims[0]; assert(off == 0); } void compute_ref_fwd(const prb_t *p, const dnn_mem_t &src, dnn_mem_t &dst); void compute_ref_bwd(const prb_t *p, const dnn_mem_t &dst, const dnn_mem_t &diff_dst, dnn_mem_t &diff_src); int doit(const prb_t *p, res_t *res); int bench(int argc, char **argv); } #endif
No-SF-Work/ayame
src/ir/values/instructions/TerminatorInst.java
package ir.values.instructions; import ir.types.FunctionType; import ir.types.PointerType; import ir.types.Type; import ir.types.Type.VoidType; import ir.values.BasicBlock; import ir.values.Function; import ir.values.Value; import ir.values.instructions.MemInst.GEPInst; import ir.values.instructions.MemInst.LoadInst; import java.util.ArrayList; public abstract class TerminatorInst extends Instruction { public TerminatorInst(TAG_ tag, Type type, int numOP) { super(tag, type, numOP); } public TerminatorInst(TAG_ tag, Type type, int numOP, BasicBlock parent) {//这些指令只会出现在bb的结尾 super(tag, type, numOP, parent); } public TerminatorInst(TAG_ tag, Type type, int numOP, Instruction prev) { super(tag, type, numOP, prev); } public TerminatorInst(Instruction next, TAG_ tag, Type type, int numOP) { super(next, tag, type, numOP); } public static class CallInst extends TerminatorInst { /** * 调用Func函数,args是传入的参数,bb是想要放置的bb */ public CallInst(Function func, ArrayList<Value> args, BasicBlock bb) { super(TAG_.Call, ((FunctionType) func.getType()).getRetType(), args.size() + 1, bb); assert func.getNumArgs() == args.size(); if (this.getType().isVoidTy()) { needname = false; } CoSetOperand(0, func);//op1 is func for (int i = 0; i < args.size(); i++) { CoSetOperand(i + 1, args.get(i));//args } } public CallInst(Function func, ArrayList<Value> args) { super(TAG_.Call, func.getType().getRetType(), args.size() + 1); assert func.getNumArgs() == args.size(); if (this.getType().isVoidTy()) { needname = false; } CoSetOperand(0, func);//op1 is func for (int i = 0; i < args.size(); i++) { CoSetOperand(i + 1, args.get(i));//args } } public boolean isPureCall() { Function func = (Function) this.getOperands().get(0); if (func.isHasSideEffect() || func.isUsedGlobalVariable()) { return false; } for (Value val : this.getOperands()) { // if (ArrayAliasAnalysis.getArrayValue(val) != null) { if (val instanceof GEPInst || (val instanceof LoadInst && !val.getType().isI32() && !((PointerType) val.getType()).getContained().isI32())) { return false; } } return true; } public Function getFunc() { return (Function) this.getOperands().get(0); } @Override public String toString() { StringBuilder sb = new StringBuilder(); if (((FunctionType) this.operands.get(0).getType()).getRetType().isVoidTy()) { sb.append("call ").append(this.getType()).append(" @").append(operands.get(0).getName()); } else { sb.append(this.getName()).append(" = call ").append(this.getType()).append(" @") .append(operands.get(0).getName()); } sb.append("("); boolean a = false; for (int i = 1; i < operands.size(); i++) { a = true; sb.append(operands.get(i).getType()).append(" ").append(operands.get(i).getName()) .append(","); } if (a) { sb.deleteCharAt(sb.length() - 1); } sb.append(")"); return sb.toString(); } public boolean hasAlias; } public static class BrInst extends TerminatorInst { /** * 条件转移 */ public BrInst(Value cond, BasicBlock trueBlock, BasicBlock falseBlock, BasicBlock parent) { super(TAG_.Br, Type.VoidType.getType(), 3, parent); this.CoSetOperand( 0, cond); this.CoSetOperand(1, trueBlock); this.CoSetOperand(2, falseBlock); needname = false; } public BrInst(Value cond, BasicBlock trueBlock, BasicBlock falseBlock) { super(TAG_.Br, VoidType.getType(), 3); this.CoSetOperand(0, cond); this.CoSetOperand(1, trueBlock); this.CoSetOperand(2, falseBlock); needname = false; } /** * 无条件转移 */ public BrInst(BasicBlock trueBlock, BasicBlock parent) { super(TAG_.Br, Type.VoidType.getType(), 1, parent); needname = false; this.CoSetOperand(0, trueBlock); } public BrInst(BasicBlock trueblock) { super(TAG_.Br, Type.VoidType.getType(), 1); this.CoSetOperand(0, trueblock); needname = false; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("br "); if (this.numOP == 1) { sb.append(operands.get(0).getType()).append(" ").append("%" + operands.get(0).getName()); } if (this.numOP == 3) { sb.append(operands.get(0).getType()).append(" ").append(operands.get(0).getName()) .append(","); sb.append(operands.get(1).getType()).append(" ").append("%" + operands.get(1).getName()) .append(","); sb.append(operands.get(2).getType()).append(" ").append("%" + operands.get(2).getName()) .append(" "); } sb.append("\n"); return sb.toString(); } } public static class RetInst extends TerminatorInst { /** * ret i32 插在bb末尾 */ public RetInst(Value val, BasicBlock parent) { super(TAG_.Ret, VoidType.getType(), 1, parent); this.CoSetOperand(0, val); needname = false; } public RetInst(Value val) { super(TAG_.Ret, VoidType.getType(), 1); this.CoSetOperand(0, val); needname = false; } /** * ret void 插在bb末尾 */ public RetInst(BasicBlock parent) { super(TAG_.Ret, VoidType.getType(), 0, parent); needname = false; } public RetInst() { super(TAG_.Ret, VoidType.getType(), 0); needname = false; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("ret "); if (this.numOP == 1) { sb.append(operands.get(0).getType() + " " + operands.get(0).getName()); } else { sb.append("void "); } return sb.toString(); } } public boolean isRet() { return this.tag == TAG_.Ret; } public boolean isBr() { return this.tag == TAG_.Br; } }
ruhan-islam/ctf-archives
THC/2021/crypto/ECDSAFE/server.py
<reponame>ruhan-islam/ctf-archives #!/usr/local/bin/python3.8 from os import urandom from Crypto.Util.number import inverse from hashlib import sha256 from Crypto.Random import random import ecdsa from flag import FLAG class PRNG: def __init__(self,seed,m,flag): self.state = seed self.m = m self.counter = 0 self.flag = flag def next_state(self): b = self.flag[self.counter % len(self.flag)] self.state = (self.state + b) % self.m self.counter += 1 return self.state C = ecdsa.NIST256p G = C.generator N = int(C.order) seed = random.randint(1,N-1) prng = PRNG(seed,N,FLAG) private_key = int(sha256(urandom(16)).hexdigest(),16) % N public_key = G * private_key print("Public key : ",(int(public_key.x()),int(public_key.y()))) signatures = [] for i in range(len(FLAG)): k = prng.next_state() % N P = G * k r = int(P.x()) % N h = int(sha256(urandom(16)).hexdigest(),16) s = inverse(k,N)*(h+r*private_key)%N signatures.append([h,r,s]) print(signatures)
Hale-Chen/12306_h5
plugins/ckeditor/plugins/preview/lang/sk.js
<reponame>Hale-Chen/12306_h5<filename>plugins/ckeditor/plugins/preview/lang/sk.js CKEDITOR.plugins.setLang("preview", "sk", {preview: "Náhľad"});
sebalander/sebaPhD
calibration/plotIntrinCalibFit.py
<gh_stars>1-10 #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue May 2 17:26:31 2017 test intrinsic calibration paramters @author: sebalander """ # -*- coding: utf-8 -*- """ Created on Tue Jul 5 16:30:53 2016 calibrates intrinsic with diff distortion model @author: sebalander """ # %% import glob import numpy as np import scipy.linalg as ln from calibration import calibrator as cl import matplotlib.pyplot as plt from importlib import reload # %% LOAD DATA # input plotCorners = False # cam puede ser ['vca', 'vcaWide', 'ptz'] son los datos que se tienen camera = 'vcaWide' # puede ser ['rational', 'fisheye', 'poly'] modelos = ['poly', 'rational', 'fisheye'] imagesFolder = "./resources/intrinsicCalib/" + camera + "/" cornersFile = imagesFolder + camera + "Corners.npy" patternFile = imagesFolder + camera + "ChessPattern.npy" imgShapeFile = imagesFolder + camera + "Shape.npy" # load data imagePoints = np.load(cornersFile) chessboardModel = np.load(patternFile) imgSize = tuple(np.load(imgShapeFile)) images = glob.glob(imagesFolder+'*.png') n = len(imagePoints) # cantidad de imagenes # Parametros de entrada/salida de la calibracion objpoints = np.array([chessboardModel]*n) # %% calculate change in radius for all points in homogenous plane reload(cl) RP = dict() # undistorted and distorted homogenous points RPP = dict() K = dict() # linear and non linear distortion D = dict() RV = dict() # rvect tvecs TV = dict() IP = dict() # PROJECTEED image points and object points OP = dict() for model in modelos: print('processing model', model) # model data files distCoeffsFile = imagesFolder + camera + model + "DistCoeffs.npy" linearCoeffsFile = imagesFolder + camera + model + "LinearCoeffs.npy" tVecsFile = imagesFolder + camera + model + "Tvecs.npy" rVecsFile = imagesFolder + camera + model + "Rvecs.npy" # load model specific data distCoeffs = np.load(distCoeffsFile) cameraMatrix = np.load(linearCoeffsFile) rVecs = np.load(rVecsFile) tVecs = np.load(tVecsFile) # initiate dictionaries RP[model] = [] RPP[model] = [] K[model] = cameraMatrix D[model] = distCoeffs RV[model] = rVecs TV[model]= tVecs IP[model] = [] OP[model] = [] # MAP TO HOMOGENOUS PLANE TO GET RADIUS for j in range(n): print('\t imagen', j) rvec = rVecs[j] tvec = tVecs[j] imagePointsProjected = cl.direct(chessboardModel, rvec, tvec, cameraMatrix, distCoeffs, model) imagePointsProjected = imagePointsProjected.reshape((-1,2)) IP[model].append(imagePointsProjected) objectPointsProjected = cl.inverse(imagePoints[j,0], rvec, tvec, cameraMatrix, distCoeffs, model) #objectPointsProjected = opbjectPointsProjected.reshape((-1,3)) OP[model].append(objectPointsProjected) if plotCorners: imagePntsX = imagePoints[j, 0, :, 0] imagePntsY = imagePoints[j, 0, :, 1] xPos = imagePointsProjected[:, 0] yPos = imagePointsProjected[:, 1] plt.figure(j) im = plt.imread(images[j]) plt.imshow(im) plt.plot(imagePntsX, imagePntsY, 'xr', markersize=10) plt.plot(xPos, yPos, '+b', markersize=10) #fig.savefig("distortedPoints3.png") # calculate distorted radius xpp, ypp = cl.ccd2hom(imagePoints[j,0], cameraMatrix) RPP[model].append(ln.norm([xpp,ypp], axis=0)) # calculate undistorted homogenous radius from 3D rototraslation xyp = cl.rotoTrasHomog(chessboardModel, rVecs[j], tVecs[j]) RP[model].append(ln.norm(xyp, axis=1)) # to array RP[model] = np.array(RP[model]).flatten() RPP[model] = np.array(RPP[model]).flatten() OP[model] = np.array(OP[model]) IP[model] = np.array(IP[model]) 0 # %% plot comparison of models reload(cl) rpMax = np.max([np.max(RP[model]) for model in modelos]) rppMax = np.max([np.max(RPP[model]) for model in modelos]) rp0 = np.linspace(0, rpMax*1.2 ,100) rpp0 = np.linspace(0, rppMax*1.2 ,100) clr = {modelos[0]:'b', modelos[1]:'r', modelos[2]:'k'} plt.figure(n) plt.xlim([0, rpMax*1.2]) plt.ylim([0, rppMax*1.2]) plt.xlabel('Undistorted radius (r\')') plt.ylabel('Distorted radius (r\")') for model in modelos: plt.plot(RP[model], RPP[model], '.', markersize=3) rpp1 = cl.distort[model](rp0, D[model]) plt.plot(rp0, rpp1, '-',c=clr[model], lw=1, label=model+' direct') rp1, _ = cl.undistort[model](rpp0, D[model]) plt.plot(rp1, rpp0, '--',c=clr[model], lw=1, label=model+' inverse') plt.legend() plt.tight_layout() # %% comparo tvecs # se nota que el modelo compensa diferencia de distorsion con un cambio en la # rototraslacion. pero en todos los casos tiene una performace cualitativamente # aceptable plt.figure(n+1) plt.subplot(221) plt.plot([-20, 30], [-20, 30], '--k', lw=0.5) plt.plot(TV['rational'][:,:,0],TV['fisheye'][:,:,0], '+') plt.xlabel('Tvec rational') plt.ylabel('Tvec fisheye') plt.legend() plt.subplot(222) plt.plot([-20, 30], [-20, 30], '--k', lw=0.5) plt.plot(TV['rational'][:,:,0],TV['poly'][:,:,0], '+') plt.xlabel('Tvec rational') plt.ylabel('Tvec poly') plt.legend() plt.subplot(223) plt.plot([-np.pi, np.pi], [-np.pi, np.pi], '--k', lw=0.5) plt.plot(RV['rational'][:,:,0],RV['fisheye'][:,:,0], '+',) plt.xlabel('Rvec rational') plt.ylabel('Rvec fisheye') plt.legend() plt.subplot(224) plt.plot([-np.pi, np.pi], [-np.pi, np.pi], '--k', lw=0.5) plt.plot(RV['rational'][:,:,0],RV['poly'][:,:,0], '+') plt.xlabel('Rvec rational') plt.ylabel('Rvec poly') plt.legend() plt.tight_layout() # %% from scipy.special import chdtriv fi = np.linspace(0,2*np.pi,20) Xcirc = np.array([np.cos(fi), np.sin(fi)]) * chdtriv(0.1, 2) def plotEllipse(ax, c, mux, muy, col): ''' se grafican una elipse asociada a la covarianza c, centrada en mux, muy ''' l, v = ln.eig(ln.inv(c)) D = v.T*np.sqrt(l.real) # queda con los autovectores como filas xeli, yeli = np.dot(ln.inv(D), Xcirc) ax.plot(xeli+mux, yeli+muy, c=col, lw=0.5) # # %% comparo los errores proyectando sobre las imagenes from matplotlib.patches import Ellipse objectPoints = chessboardModel.reshape(-1,3) imagePoints.shape plt.figure(n+2) # corro sobre las imagenes plt.subplot(121) x0, y0 = imagePoints[:,0,:].reshape(-1,2).T plt.plot(x0, y0, '+k', markersize=1) for model in modelos: x1, y1 = IP[model].reshape(-1,2).T plt.plot(x1, y1, '.', color=clr[model], markersize=1) plt.subplot(122) x0, y0 = objectPoints[:,:2].T ax = plt.gca() ax.plot(x0, y0, '+k', markersize=7) for model in modelos: x1, y1, _ = OP[model].transpose((2,1,0)) ax.plot(x1, y1, '.', color=clr[model], markersize=2) # centrides mux = np.mean(x1, axis=1) muy = np.mean(y1, axis=1) # errores ex = x1 - mux.reshape(-1,1) ey = y1 - muy.reshape(-1,1) E = np.array([ex, ey]).transpose(1,0,2) # covarianzas C = np.array([np.dot(EE, EE.T) for EE in E]) / x1.shape[0] # calculo de la elipse a partir de la covarianza for i in range(len(C)): plotEllipse(ax, C[i], mux[i], muy[i], clr[model]) plt.tight_layout() # %% check projection in one particular image imFiles = glob.glob(imagesFolder + "*.png") # list of images image2check = 'vlcsnap-2017-04-03-22h00m06s444.png' n = np.argwhere([f==imagesFolder+image2check for f in imFiles]) n.shape = -1 n = n[0] # %% plt.figure() plt.plot(x0, y0, '+k', markersize=7) model='fisheye' x1, y1, _ = OP[model].transpose((2,1,0)) plt.plot(x1[:,n], y1[:,n], 'x', color=clr[model], markersize=5) # centrides #
media-network/media-api
src/transformers/file.js
export default (file) => ({ key: file.key, contentType: file.contentType, contentLength: file.contentLength, expires: file.expires, isOrigin: file.isOrigin, lastModified: file.lastModified, lastSynchronized: file.lastSynchronized, originUrl: file.originUrl, preset: file.preset })
JiYuanFeng/MCTrans
mctrans/models/builder.py
from mmcv.utils import Registry, build_from_cfg from torch import nn NETWORKS = Registry('network') LOSSES = Registry('loss') MODEL = Registry('model') ENCODERS = Registry('encoder') DECODERS = Registry('decoder') CENTERS = Registry('center') HEADS = Registry('head') def build(cfg, registry, default_args=None): """Build a module. Args: cfg (dict, list[dict]): The config of modules, is is either a dict or a list of configs. registry (:obj:`Registry`): A registry the module belongs to. default_args (dict, optional): Default arguments to build the module. Defaults to None. Returns: nn.Module: A built nn module. """ if isinstance(cfg, list): modules = [ build_from_cfg(cfg_, registry, default_args) for cfg_ in cfg ] return nn.Sequential(*modules) else: return build_from_cfg(cfg, registry, default_args) def build_network(cfg): """Build network.""" return build(cfg, NETWORKS) def build_losses(cfg): """Build loss.""" return [build_from_cfg(_cfg, LOSSES) for _cfg in cfg] def build_model(cfg): """Build model.""" return build(cfg, MODEL) def build_encoder(cfg): """Build Encoder.""" return build(cfg, ENCODERS) def build_decoder(cfg): """Build Decoder.""" return build(cfg, DECODERS) def build_center(cfg): """Build Center.""" return build(cfg, CENTERS) def build_head(cfg): """Build SegHead.""" return build(cfg, HEADS)
zzgchina888/msdn-code-gallery-microsoft
Official Windows Platform Sample/Windows Phone 8.1 samples/[JavaScript]-Windows Phone 8.1 samples/Wallet QuickStart Sample/JavaScript/js/scenario3Update.js
//// Copyright (c) Microsoft Corporation. All rights reserved (function () { "use strict"; var page = WinJS.UI.Pages.define("/html/scenario3Update.html", { ready: function (element, options) { coffeePointsInput = document.getElementById("coffeePointsInput"); document.getElementById("updatePoints").addEventListener("click", updatePoints); document.getElementById("viewInWallet").addEventListener("click", viewInWallet); initialize(); } }), wallet, walletItem, coffeePointsInput; function initialize() { Windows.ApplicationModel.Wallet.WalletManager.requestStoreAsync().then(function (walletIn) { wallet = walletIn; return wallet.getWalletItemAsync("CoffeeLoyalty123"); }).done(function (walletItemIn) { walletItem = walletItemIn; if (walletItem) { if (walletItem.displayProperties.hasKey("PointsId")) { coffeePointsInput.value = walletItem.displayProperties.PointsId.value; } else { WinJS.log && WinJS.log("Item does not have a PointsId property.", "sample", "error"); } } else { WinJS.log && WinJS.log("Item does not exist. Add item using Scenario2", "sample", "error"); } }, function (error) { WinJS.log && WinJS.log("Error: " + error, "sample", "error"); }); } function updatePoints() { if (wallet && walletItem && walletItem.displayProperties.hasKey("PointsId")) { walletItem.displayProperties.PointsId.value = coffeePointsInput.value; wallet.updateAsync(walletItem).done(function () { WinJS.log && WinJS.log("Points updated.", "sample", "status"); }, function (error) { WinJS.log && WinJS.log("Error: " + error, "sample", "error"); }); } else if (!wallet) { WinJS.log && WinJS.log("Please wait for wallet to initialize.", "sample", "error"); } else if (!walletItem) { WinJS.log && WinJS.log("Item does not exist. Add item using Scenario2", "sample", "error"); } else { WinJS.log && WinJS.log("Item does not have a PointsId property.", "sample", "error"); } } function viewInWallet() { if (wallet && walletItem) { wallet.showAsync(walletItem.id).done(undefined, function (error) { WinJS.log && WinJS.log("Error: " + error, "sample", "error"); }); } else if (!wallet) { WinJS.log && WinJS.log("Please wait for wallet to initialize.", "sample", "error"); } else { WinJS.log && WinJS.log("Item does not exist. Add item using Scenario2", "sample", "error"); } } })();
buschtoens/ember-leaflet
tests/unit/helpers/point-test.js
import { point } from 'dummy/helpers/point'; import { module, test } from 'qunit'; /* global L */ module('Unit | Helper | point'); test('it works', function(assert) { let result = point([42.12312412431231, 41.12331213212, true]); assert.ok(result); assert.ok(result instanceof L.Point); assert.equal(result.x, 42); assert.equal(result.y, 41); });
zhs007/jarviscrawlercore
src/service/client.js
const messages = require('../../pbjs/result_pb'); const services = require('../../pbjs/result_grpc_pb'); const log = require('../log'); const grpc = require('grpc'); const TOKEN = '<KEY>'; /** * startTranslate * @param {string} servAddr - service addr * @param {string} srclang - source language * @param {string} destlang - destination language * @param {string} text - text */ function startTranslate(servAddr, srclang, destlang, text) { const client = new services.JarvisCrawlerServiceClient(servAddr, grpc.credentials.createInsecure()); const request = new messages.RequestTranslate(); request.setText(text); request.setSrclang(srclang); request.setDestlang(destlang); request.setToken(TOKEN); client.translate(request, function(err, response) { if (err) { log.error('err:', err); } if (response) { log.debug('text:', response.getText()); } }); } /** * startArticle * @param {string} servAddr - service addr * @param {string} url - url * @param {bool} attachJQuery - is attach jquery */ function startArticle(servAddr, url, attachJQuery) { const client = new services.JarvisCrawlerServiceClient(servAddr, grpc.credentials.createInsecure()); const request = new messages.RequestArticle(); request.setUrl(url); request.setAttachjquery(attachJQuery); request.setToken(TOKEN); const call = client.exportArticle(request); call.on('data', (msg) =>{ const result = msg.getResult(); if (result) { log.debug(result.getTitle()); } else { log.debug(msg.getTotallength(), msg.getCurlength()); } }); call.on('end', ()=>{ log.debug('end.'); }); call.on('error', (err)=>{ log.error('err', err); }); } /** * startGetArticles * @param {string} servAddr - service addr * @param {string} website - website */ function startGetArticles(servAddr, website) { const client = new services.JarvisCrawlerServiceClient(servAddr, grpc.credentials.createInsecure()); const request = new messages.RequestArticles(); request.setWebsite(website); request.setToken(TOKEN); // request.setUrl(url); // request.setAttachjquery(jquery); client.getArticles(request, function(err, response) { if (err) { log.error('err:', err); } if (response) { log.debug('text:', JSON.stringify(response.getArticles().toObject())); } }); } startTranslate('127.0.0.1:7051', 'en', 'zh-CN', '@P<NAME> I am sure there is a problem with excel file, I need more time to check it.'); startGetArticles('127.0.0.1:7051', 'baijingapp'); startGetArticles('127.0.0.1:7051', '36kr'); startGetArticles('127.0.0.1:7051', 'geekpark'); startGetArticles('127.0.0.1:7051', 'huxiu'); startGetArticles('127.0.0.1:7051', 'lieyunwang'); startGetArticles('127.0.0.1:7051', 'tmtpost'); startGetArticles('127.0.0.1:7051', 'techcrunch'); startArticle('127.0.0.1:7051', 'https://post.smzdm.com/p/alpzl63o/', true);
eti-nne/dtk
src/dtkComposerSupport/dtkComposerNodeMetaVector3DArray.h
/* dtkComposerNodeMetaVector3DArray.h --- * * Author: <NAME> * Copyright (C) 2011 - <NAME>, Inria. * Created: Mon Aug 6 14:25:15 2012 (+0200) * Version: $Id: 53142aadb81daa940514db7dbdd4c8f5466a6ba7 $ * Last-Updated: Thu Aug 9 11:55:31 2012 (+0200) * By: sprinter * Update #: 3 */ /* Commentary: * */ /* Change log: * */ #ifndef DTKCOMPOSERNODEMETAVECTOR3DARRAY_H #define DTKCOMPOSERNODEMETAVECTOR3DARRAY_H #include "dtkComposerSupportExport.h" #include "dtkComposerMetatype.h" #include <dtkComposerNodeLeaf.h> class dtkComposerNodeMetaVector3DArrayPrivate; // ///////////////////////////////////////////////////////////////// // dtkComposerNodeMetaVector3DArray interface // ///////////////////////////////////////////////////////////////// class DTKCOMPOSERSUPPORT_EXPORT dtkComposerNodeMetaVector3DArray : public dtkComposerNodeLeaf { public: dtkComposerNodeMetaVector3DArray(void); ~dtkComposerNodeMetaVector3DArray(void); public: void run(void); public: inline QString type(void) { return "meta_vector3D_array"; } inline QString titleHint(void) { return "Meta Vector3D Array"; } public: QString outputLabelHint(int port); private: dtkComposerNodeMetaVector3DArrayPrivate *d; }; #endif