repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
Quanscendence/braynai
coreapp/migrations/0073_auto_20200728_1624.py
# Generated by Django 2.2 on 2020-07-28 10:54 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('coreapp', '0072_auto_20200611_2220'), ] operations = [ migrations.AlterField( model_name='apidataget', name='frequency', field=models.CharField(choices=[('Daily', 'Daily'), ('1Minute', '1 Minute'), ('5Minutes', '5 Minutes'), ('10Minutes', '10 Minutes'), ('15Minutes', '15 Minutes'), ('30Minutes', '30 Minutes'), ('1Hour', '1 Hour'), ('6Hours', '6 Hours'), ('12Hours', '12 Hours'), ('Weekly', 'Weekly'), ('15 Days', '15 Days'), ('1 Month', '1 Month'), ('6 Months', '6 Months'), ('Quarterly', 'Quarterly'), ('Yearly', 'Yearly'), ('Never', 'Never')], max_length=30), ), migrations.AlterField( model_name='projectdashboard', name='report_frequency', field=models.CharField(blank=True, choices=[('Daily', 'Daily'), ('1Minute', '1 Minute'), ('5Minutes', '5 Minutes'), ('10Minutes', '10 Minutes'), ('15Minutes', '15 Minutes'), ('30Minutes', '30 Minutes'), ('1Hour', '1 Hour'), ('6Hours', '6 Hours'), ('12Hours', '12 Hours'), ('Weekly', 'Weekly'), ('15 Days', '15 Days'), ('1 Month', '1 Month'), ('6 Months', '6 Months'), ('Quarterly', 'Quarterly'), ('Yearly', 'Yearly'), ('Never', 'Never')], max_length=500, null=True), ), migrations.AlterField( model_name='projectendpoint', name='sub_df_frequency', field=models.CharField(choices=[('Daily', 'Daily'), ('1Minute', '1 Minute'), ('5Minutes', '5 Minutes'), ('10Minutes', '10 Minutes'), ('15Minutes', '15 Minutes'), ('30Minutes', '30 Minutes'), ('1Hour', '1 Hour'), ('6Hours', '6 Hours'), ('12Hours', '12 Hours'), ('Weekly', 'Weekly'), ('15 Days', '15 Days'), ('1 Month', '1 Month'), ('6 Months', '6 Months'), ('Quarterly', 'Quarterly'), ('Yearly', 'Yearly'), ('Never', 'Never')], max_length=30, null=True), ), migrations.CreateModel( name='EndpointMlApi', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, default='Unknown...', max_length=200, null=True)), ('description', models.TextField(blank=True, default='some text...', null=True)), ('version', models.PositiveIntegerField(blank=True, null=True)), ('created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Created date')), ('updated', models.DateTimeField(auto_now=True, null=True, verbose_name='Updated date')), ('api', models.CharField(blank=True, max_length=500, null=True)), ('token', models.CharField(blank=True, max_length=100, null=True)), ('end_point', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='coreapp.ProjectEndPoint')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ('name',), 'abstract': False, }, ), ]
henry-hz/optimism
proxyd/integration_tests/batching_test.go
<filename>proxyd/integration_tests/batching_test.go<gh_stars>1-10 package integration_tests import ( "net/http" "os" "testing" "github.com/ethereum-optimism/optimism/proxyd" "github.com/stretchr/testify/require" ) func TestBatching(t *testing.T) { config := ReadConfig("batching") chainIDResponse1 := `{"jsonrpc": "2.0", "result": "hello1", "id": 1}` chainIDResponse2 := `{"jsonrpc": "2.0", "result": "hello2", "id": 2}` chainIDResponse3 := `{"jsonrpc": "2.0", "result": "hello3", "id": 3}` netVersionResponse1 := `{"jsonrpc": "2.0", "result": "1.0", "id": 1}` callResponse1 := `{"jsonrpc": "2.0", "result": "ekans1", "id": 1}` type mockResult struct { method string id string result interface{} } chainIDMock1 := mockResult{"eth_chainId", "1", "hello1"} chainIDMock2 := mockResult{"eth_chainId", "2", "hello2"} chainIDMock3 := mockResult{"eth_chainId", "3", "hello3"} netVersionMock1 := mockResult{"net_version", "1", "1.0"} callMock1 := mockResult{"eth_call", "1", "ekans1"} tests := []struct { name string handler http.Handler mocks []mockResult reqs []*proxyd.RPCReq expectedRes string maxBatchSize int numExpectedForwards int }{ { name: "backend returns batches out of order", mocks: []mockResult{chainIDMock1, chainIDMock2, chainIDMock3}, reqs: []*proxyd.RPCReq{ NewRPCReq("1", "eth_chainId", nil), NewRPCReq("2", "eth_chainId", nil), NewRPCReq("3", "eth_chainId", nil), }, expectedRes: asArray(chainIDResponse1, chainIDResponse2, chainIDResponse3), maxBatchSize: 2, numExpectedForwards: 2, }, { // infura behavior name: "backend returns single RPC response object as error", handler: SingleResponseHandler(500, `{"jsonrpc":"2.0","error":{"code":-32001,"message":"internal server error"},"id":1}`), reqs: []*proxyd.RPCReq{ NewRPCReq("1", "eth_chainId", nil), NewRPCReq("2", "eth_chainId", nil), }, expectedRes: asArray( `{"error":{"code":-32011,"message":"no backends available for method"},"id":1,"jsonrpc":"2.0"}`, `{"error":{"code":-32011,"message":"no backends available for method"},"id":2,"jsonrpc":"2.0"}`, ), maxBatchSize: 10, numExpectedForwards: 1, }, { name: "backend returns single RPC response object for minibatches", handler: SingleResponseHandler(500, `{"jsonrpc":"2.0","error":{"code":-32001,"message":"internal server error"},"id":1}`), reqs: []*proxyd.RPCReq{ NewRPCReq("1", "eth_chainId", nil), NewRPCReq("2", "eth_chainId", nil), }, expectedRes: asArray( `{"error":{"code":-32011,"message":"no backends available for method"},"id":1,"jsonrpc":"2.0"}`, `{"error":{"code":-32011,"message":"no backends available for method"},"id":2,"jsonrpc":"2.0"}`, ), maxBatchSize: 1, numExpectedForwards: 2, }, { name: "duplicate request ids are on distinct batches", mocks: []mockResult{ netVersionMock1, chainIDMock2, chainIDMock1, callMock1, }, reqs: []*proxyd.RPCReq{ NewRPCReq("1", "net_version", nil), NewRPCReq("2", "eth_chainId", nil), NewRPCReq("1", "eth_chainId", nil), NewRPCReq("1", "eth_call", nil), }, expectedRes: asArray(netVersionResponse1, chainIDResponse2, chainIDResponse1, callResponse1), maxBatchSize: 2, numExpectedForwards: 3, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { config.Server.MaxUpstreamBatchSize = tt.maxBatchSize handler := tt.handler if handler == nil { router := NewBatchRPCResponseRouter() for _, mock := range tt.mocks { router.SetRoute(mock.method, mock.id, mock.result) } handler = router } goodBackend := NewMockBackend(handler) defer goodBackend.Close() require.NoError(t, os.Setenv("GOOD_BACKEND_RPC_URL", goodBackend.URL())) client := NewProxydClient("http://127.0.0.1:8545") shutdown, err := proxyd.Start(config) require.NoError(t, err) defer shutdown() res, statusCode, err := client.SendBatchRPC(tt.reqs...) require.NoError(t, err) require.Equal(t, http.StatusOK, statusCode) RequireEqualJSON(t, []byte(tt.expectedRes), res) if tt.numExpectedForwards != 0 { require.Equal(t, tt.numExpectedForwards, len(goodBackend.Requests())) } if handler, ok := handler.(*BatchRPCResponseRouter); ok { for i, mock := range tt.mocks { require.Equal(t, 1, handler.GetNumCalls(mock.method, mock.id), i) } } }) } }
SurgeTechnologies/Electro
ElectroEditor/src/Panels/SceneHierarchyPanel.cpp
// ELECTRO ENGINE // Copyright(c) 2021 - Electro Team - All rights reserved #include "SceneHierarchyPanel.hpp" #include "Core/Input.hpp" #include "UIUtils/UIUtils.hpp" #include "EditorLayer.hpp" namespace Electro { SceneHierarchyPanel::SceneHierarchyPanel(const Ref<Scene>& context) { SetContext(context); } void SceneHierarchyPanel::OnInit(void* data) { mEditorLayer = static_cast<EditorLayer*>(data); } void SceneHierarchyPanel::SetContext(const Ref<Scene>& context) { mContext = context; mSelectionContext = {}; if (mSelectionContext && false) { // Try to find same entity in new scene auto& entityMap = mContext->GetEntityMap(); UUID selectedEntityID = mSelectionContext.GetUUID(); if (entityMap.find(selectedEntityID) != entityMap.end()) mSelectionContext = entityMap.at(selectedEntityID); } } void SceneHierarchyPanel::OnImGuiRender(bool* show) { // Hierarchy ImGui::Begin(HIERARCHY_TITLE, show); if (ImGui::Button("Add Entity", { ImGui::GetWindowWidth(), 0.0f })) ImGui::OpenPopup("Add Entity"); if (ImGui::BeginPopup("Add Entity") || ImGui::BeginPopupContextWindow(nullptr, 1, false)) { if (ImGui::MenuItem("Empty Entity")) { mSelectionContext = mContext->CreateEntity("Entity"); } if (ImGui::MenuItem("Camera")) { mSelectionContext = mContext->CreateEntity("Camera"); mSelectionContext.AddComponent<CameraComponent>(); } if (ImGui::MenuItem("Mesh")) { mSelectionContext = mContext->CreateEntity("Mesh"); mSelectionContext.AddComponent<MeshComponent>(); } if (ImGui::BeginMenu("Lights")) { if (ImGui::MenuItem("PointLight")) { mSelectionContext = mContext->CreateEntity("Point Light"); mSelectionContext.AddComponent<PointLightComponent>(); } if (ImGui::MenuItem("DirectionalLight")) { mSelectionContext = mContext->CreateEntity("Directional Light"); mSelectionContext.AddComponent<DirectionalLightComponent>(); } ImGui::EndMenu(); } ImGui::EndPopup(); } bool sceneNodeOpened = ImGui::TreeNodeEx(static_cast<void*>(&mContext->GetUUID()), ImGuiTreeNodeFlags_DefaultOpen, fmt::format("{0} - {1}", mContext->GetName().c_str(), mContext->GetUUID()).c_str()); { const ImGuiPayload* data = UI::DragAndDropTarget(ELECTRO_SCENE_FILE_DND_ID); if (data) { const String filepath = *static_cast<String*>(data->Data); mEditorLayer->InitSceneEssentials(FileSystem::GetNameWithoutExtension(filepath)); mEditorLayer->DeserializeScene(filepath); } } if(sceneNodeOpened) { // For each entity in the registry, draw it mContext->GetRegistry().each([&](auto entityID) { Entity entity{ entityID, mContext.Raw() }; if (entity.HasComponent<IDComponent>()) DrawEntityNode(entity); }); ImGui::TreePop(); } mIsHierarchyFocused = ImGui::IsWindowFocused(); mIsHierarchyHovered = ImGui::IsWindowHovered(); if (ImGui::IsMouseDown(0) && mIsHierarchyHovered) mSelectionContext = {}; ImGui::End(); } void SceneHierarchyPanel::DrawEntityNode(Entity entity) { String& tag = entity.GetComponent<TagComponent>().Tag; ImGuiTreeNodeFlags flags = ((mSelectionContext == entity) ? ImGuiTreeNodeFlags_Selected : 0) | ImGuiTreeNodeFlags_OpenOnArrow; flags |= ImGuiTreeNodeFlags_SpanAvailWidth; if (mContext->GetSelectedEntity() == entity) ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(1.0f, 0.5f, 0.1f, 1.0f)); const bool opened = ImGui::TreeNodeEx(reinterpret_cast<void*>(static_cast<uint64_t>(static_cast<uint32_t>(entity))), flags, tag.c_str()); if (mContext->GetSelectedEntity() == entity) ImGui::PopStyleColor(); if (ImGui::IsItemClicked()) mSelectionContext = entity; bool entityDeleted = false; if (ImGui::BeginPopupContextItem()) { if (ImGui::MenuItem("Delete Entity")) entityDeleted = true; ImGui::EndPopup(); } if (opened) ImGui::TreePop(); if (entityDeleted) { mContext->DestroyEntity(entity); if (mSelectionContext == entity) mSelectionContext = {}; } } void SceneHierarchyPanel::OnEvent(Event& e) { EventDispatcher dispatcher(e); dispatcher.Dispatch<KeyPressedEvent>(ELECTRO_BIND_EVENT_FN(SceneHierarchyPanel::OnKeyPressed)); } bool SceneHierarchyPanel::OnKeyPressed(KeyPressedEvent& e) { if (Input::IsKeyPressed(Key::Delete) && mIsHierarchyFocused) { if (mSelectionContext) mContext->DestroyEntity(mSelectionContext); mSelectionContext = {}; return false; } if ((Input::IsKeyPressed(Key::LeftControl) || Input::IsKeyPressed(Key::RightControl)) && Input::IsKeyPressed(Key::D)) mContext->DuplicateEntity(mSelectionContext); return false; } }
lanjue1/WMS-
src/components/SelectLang/index.js
<gh_stars>0 import React, { PureComponent } from 'react'; import { formatMessage, setLocale, getLocale } from 'umi-plugin-react/locale'; import { Menu, Icon } from 'antd'; import classNames from 'classnames'; import HeaderDropdown from '../HeaderDropdown'; import styles from './index.less'; import { connect } from 'dva'; import { TranslationOutlined , BankOutlined } from '@ant-design/icons'; @connect(({ login, i18n }) => ({ login, user: login.user, })) export default class SelectLang extends PureComponent { changeLang = ({ key }) => { // setLocale(key); this.props.dispatch({ type: 'i18n/fetch', payload: { type: key }, callback: (res) => { localStorage.setItem('language_type', key) localStorage.setItem('language', JSON.stringify(res)); window.location.reload(); } }); }; render() { const { className } = this.props; const selectedLang = localStorage.getItem('language_type') ? localStorage.getItem('language_type') : 'en-US'; const locales = ['zh-CN', 'en-US',]; let indexOf=selectedLang.lastIndexOf('-') const lang=selectedLang.slice( 0,indexOf).toLocaleUpperCase() const languageLabels = { 'zh-CN': '简体中文', 'en-US': 'English', 'pt-BR': 'Português', }; const languageIcons = { 'zh-CN': '🇨🇳', // 'zh-TW': '🇭🇰', 'en-US': 'e🇳', // 'pt-BR': '🇧🇷', }; const langMenu = ( <Menu className={styles.menu} selectedKeys={[selectedLang]} onClick={this.changeLang}> {locales.map(locale => ( <Menu.Item key={locale}> <span role="img" aria-label={languageLabels[locale]}> {languageIcons[locale]} </span>{' '} {languageLabels[locale]} </Menu.Item> ))} </Menu> ); return ( <HeaderDropdown overlay={langMenu} placement="bottomRight"> <span className={classNames(styles.dropDown, className)} style={{marginRight:10}}> {/* <Icon type="global" style={{marginLeft:'8px'}} title={'Languages'} /> */} <TranslationOutlined style={{marginLeft:'8px',marginRight:'3px'}} title={'Languages'} /> {lang} </span> </HeaderDropdown> ); } }
globalhunter727/quiz
src/firebase/firebase.js
<gh_stars>1-10 import firebase from 'firebase'; // var firebaseConfig = { // apiKey: "<KEY>", // authDomain: "quizdom-70da5.firebaseapp.com", // projectId: "quizdom-70da5", // storageBucket: "quizdom-70da5.appspot.com", // messagingSenderId: "706147557425", // appId: "1:706147557425:web:40f7062e5aee9a84743510", // measurementId: "G-RE88729T4B" // }; const firebaseConfig = { apiKey: "<KEY>", authDomain: "quiz-1db0c.firebaseapp.com", projectId: "quiz-1db0c", storageBucket: "quiz-1db0c.appspot.com", messagingSenderId: "78963595474", appId: "1:78963595474:web:8d9d4f5b4ed293f52b350b" }; firebase.initializeApp(firebaseConfig); firebase.analytics(); firebase.auth() export default firebase;
bhismalab/START
START_frontend/Sources/StartApp/app/src/main/java/com/reading/start/data/response/ResponseLanguages.java
package com.reading.start.data.response; import com.reading.start.data.entity.DataLanguages; public class ResponseLanguages extends BaseResponseData<DataLanguages> { }
aland1013/coding-interview-questions
chapter-4/4.5-validate-bst.js
class Node { constructor(value) { this.value = value; this.left = null; this.right = null; } } const validateBST = ( root, min = Number.NEGATIVE_INFINITY, max = Number.POSITIVE_INFINITY ) => { if (root) { if (root.value < min || root.value > max) return false; return ( validateBST(root.left, min, root.value) && validateBST(root.right, root.value, max) ); } return true; }; // tests let a = new Node(8); let b = new Node(4); let c = new Node(10); let d = new Node(2); let e = new Node(6); let f = new Node(20); a.left = b; a.right = c; b.left = d; b.right = e; c.right = f; console.log(validateBST(a)); // true e.value = 12; console.log(validateBST(a)); // false
ducminh-phan/final-project-template
tests/controllers/test_probe.py
<reponame>ducminh-phan/final-project-template def test_ping(client): response = client.get("/ping") assert response.status_code == 405 response = client.post("/ping") assert response.status_code == 200
trist725/mgsu
network/websocket/v1/session_manager.go
<filename>network/websocket/v1/session_manager.go package websock_v1 import ( "sync" ) type sessionManager struct { sessions map[SessionID]*Session open bool rwmutex *sync.RWMutex } func newSessionManager() *sessionManager { return &sessionManager{ sessions: make(map[SessionID]*Session), open: true, rwmutex: &sync.RWMutex{}, } } func (mgr *sessionManager) register(s *Session) { mgr.rwmutex.Lock() mgr.sessions[s.ID()] = s mgr.rwmutex.Unlock() } func (mgr *sessionManager) unregister(s *Session) { mgr.rwmutex.Lock() if _, ok := mgr.sessions[s.ID()]; ok { delete(mgr.sessions, s.ID()) } mgr.rwmutex.Unlock() } func (mgr *sessionManager) broadcast(m *envelope) { mgr.rwmutex.RLock() for _, s := range mgr.sessions { if m.filter != nil { if m.filter(s) { s.writeMessage(m) } } else { s.writeMessage(m) } } mgr.rwmutex.RUnlock() } func (mgr *sessionManager) close(m *envelope) { mgr.rwmutex.Lock() for id, s := range mgr.sessions { s.writeMessage(m) delete(mgr.sessions, id) s.Close() } mgr.open = false mgr.rwmutex.Unlock() } func (mgr *sessionManager) closed() bool { mgr.rwmutex.RLock() defer mgr.rwmutex.RUnlock() return !mgr.open } func (mgr *sessionManager) len() int { mgr.rwmutex.RLock() defer mgr.rwmutex.RUnlock() return len(mgr.sessions) }
caladri/glatt
mk/platform/testmips/platform/platform_clock.c
#include <core/types.h> #include <core/critical.h> #include <core/error.h> #include <cpu/cpu.h> #include <cpu/memory.h> #include <platform/clock.h> #define TEST_RTC_DEV_BASE (0x15000000) #define TEST_RTC_DEV_TRIGGER (0x0000) #define TEST_RTC_DEV_USECONDS (0x0020) #define TEST_RTC_DEV_FUNCTION(f) \ (volatile uint64_t *)XKPHYS_MAP(CCA_UC, TEST_RTC_DEV_BASE + (f)) #define TEST_RTC_DEV_READ(f) \ (volatile uint64_t)*TEST_RTC_DEV_FUNCTION(f) #define TEST_RTC_DEV_WRITE(f, v) \ *TEST_RTC_DEV_FUNCTION(f) = (v) #define CLOCK_CALIBRATION_RUNS (10) /* * Calibrate R4K clock subsystem -- return the number of cycles in 1/hz seconds. */ unsigned platform_clock_calibrate(unsigned hz) { return (100000000 / hz); }
gaoht/house
java/classes2/com/megvii/zhimasdk/b/a/e/f.java
<filename>java/classes2/com/megvii/zhimasdk/b/a/e/f.java package com.megvii.zhimasdk.b.a.e; import com.megvii.zhimasdk.b.a.n; import java.io.InterruptedIOException; public class f extends InterruptedIOException { private final n a = null; public f() {} public f(String paramString) { super(paramString); } } /* Location: /Users/gaoht/Downloads/zirom/classes2-dex2jar.jar!/com/megvii/zhimasdk/b/a/e/f.class * Java compiler version: 6 (50.0) * JD-Core Version: 0.7.1 */
carrossos/plib
src/main/java/net/carrossos/plib/io/velocity/Velocity.java
<reponame>carrossos/plib<filename>src/main/java/net/carrossos/plib/io/velocity/Velocity.java package net.carrossos.plib.io.velocity; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.stream.Collectors; import org.apache.velocity.Template; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.VelocityEngine; import org.apache.velocity.app.event.EventCartridge; public class Velocity { private VelocityEngine engine; private boolean strict = true; private Path root; private String file; private String template; private final Map<String, Object> parameters = new HashMap<>(); private VelocityContext context; private void build() { this.engine = new VelocityEngine(); this.context = new VelocityContext(context); if (file == null) { engine.init(); } else { Properties properties = new Properties(); properties.put("file.resource.loader.path", root.toString()); engine.init(properties); } EventCartridge cartridge = new EventCartridge(); cartridge.addEventHandler(new VelocityEventHandler(strict)); this.context.attachEventCartridge(cartridge); this.parameters.forEach(context::put); } public Velocity fromFile(Path root, String file) { this.root = root.toAbsolutePath(); this.file = file; this.template = null; return this; } public Velocity fromFile(String root, String file) { return fromFile(Path.of(root), file); } public Velocity fromString(String template) { this.template = template; this.root = null; this.file = null; return this; } public String generate() throws IOException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (PrintWriter writer = new PrintWriter(buffer, false, StandardCharsets.UTF_8)) { generate(writer); } return new String(buffer.toByteArray(), StandardCharsets.UTF_8); } public void generate(Path path) throws IOException { try (var writer = new PrintWriter(Files.newBufferedWriter(path))) { generate(writer); } } public void generate(Writer writer) throws IOException { build(); try { if (file == null) { engine.evaluate(context, writer, "memTpl", template); } else { Template veloTemplate = engine.getTemplate(file); veloTemplate.merge(context, writer); } } catch (Exception e) { throw new IOException(String.format("Failed to generate using context: %s", Arrays.stream(context.getKeys()) .map(k -> k + " => " + context.get(k)).collect(Collectors.joining(", "))), e); } } public Velocity lax() { this.strict = false; return this; } public Velocity put(String key, Object val) { parameters.put(key, val); return this; } public Velocity strict() { this.strict = true; return this; } public Velocity withContext(VelocityContext context) { this.context = context; return this; } public static Velocity create() { return new Velocity(); } }
hyanwong/eol
spec/helpers/hierarchies_helper_spec.rb
require "spec_helper" describe HierarchiesHelper do describe '#en_browsable_status' do before do @hierarchy = build_stubbed(Hierarchy) @hierarchy.stub(:browsable?) { false } @hierarchy.stub(:request_publish) { false } end it 'blank hierarchies are unknown' do expect(helper.en_browsable_status(nil)).to eq('unknown') end it 'browsable hierarchies are browsable' do expect(@hierarchy).to receive(:browsable?) { true } expect(helper.en_browsable_status(@hierarchy)).to eq('browsable') end it 'request_publish hierarchies are request_publish' do expect(@hierarchy).to receive(:request_publish) { true } expect(helper.en_browsable_status(@hierarchy)).to eq('request_publish') end it 'defaults to not_browsable' do expect(helper.en_browsable_status(@hierarchy)).to eq('not_browsable') end end end
Rouzip/qiniu-go-sdk
api.v8/kodo/token.go
package kodo import ( "encoding/base64" "encoding/json" "errors" "strconv" "strings" "time" "github.com/qiniupd/qiniu-go-sdk/api.v8/auth/qbox" "github.com/qiniupd/qiniu-go-sdk/x/url.v7" ) // ---------------------------------------------------------- // 根据空间(Bucket)的域名,以及文件的 key,获得 baseUrl。 // 如果空间是 public 的,那么通过 baseUrl 可以直接下载文件内容。 // 如果空间是 private 的,那么需要对 baseUrl 进行私有签名得到一个临时有效的 privateUrl 进行下载。 // func MakeBaseUrl(domain, key string) (baseUrl string) { return "http://" + domain + "/" + url.Escape(key) } // ---------------------------------------------------------- type GetPolicy struct { Expires uint32 } func (p *Client) MakePrivateUrl(baseUrl string, policy *GetPolicy) (privateUrl string) { var expires int64 if policy == nil || policy.Expires == 0 { expires = 3600 } else { expires = int64(policy.Expires) } deadline := time.Now().Unix() + expires if strings.Contains(baseUrl, "?") { baseUrl += "&e=" } else { baseUrl += "?e=" } baseUrl += strconv.FormatInt(deadline, 10) token := qbox.Sign(p.mac, []byte(baseUrl)) return baseUrl + "&token=" + token } // -------------------------------------------------------------------------------- type PutPolicy struct { Scope string `json:"scope"` Expires uint32 `json:"deadline"` // 截止时间(以秒为单位) InsertOnly uint16 `json:"insertOnly,omitempty"` // 若非0, 即使Scope为 Bucket:Key 的形式也是insert only DetectMime uint8 `json:"detectMime,omitempty"` // 若非0, 则服务端根据内容自动确定 MimeType CallbackFetchKey uint8 `json:"callbackFetchKey,omitempty"` FsizeLimit int64 `json:"fsizeLimit,omitempty"` MimeLimit string `json:"mimeLimit,omitempty"` SaveKey string `json:"saveKey,omitempty"` CallbackUrl string `json:"callbackUrl,omitempty"` CallbackHost string `json:"callbackHost,omitempty"` CallbackBody string `json:"callbackBody,omitempty"` CallbackBodyType string `json:"callbackBodyType,omitempty"` ReturnUrl string `json:"returnUrl,omitempty"` ReturnBody string `json:"returnBody,omitempty"` PersistentOps string `json:"persistentOps,omitempty"` PersistentNotifyUrl string `json:"persistentNotifyUrl,omitempty"` PersistentPipeline string `json:"persistentPipeline,omitempty"` EndUser string `json:"endUser,omitempty"` Checksum string `json:"checksum,omitempty"` // 格式:<HashName>:<HexHashValue>,目前支持 MD5/SHA1。 NotifyQueue string `json:"notifyQueue,omitempty"` NotifyMessage string `json:"notifyMessage,omitempty"` NotifyMessageType string `json:"notifyMessageType,omitempty"` DeleteAfterDays int `json:"deleteAfterDays,omitempty"` FileType FileType `json:"fileType,omitempty"` //内部参数 OldFh string `json:"oldFh,omitempty"` PutTime int64 `json:"putTime,omitempty"` Cond string `json:"cond,omitempty"` //格式:condKey1=condVal1&condKey2=condVal2,支持hash、mime、fsize、putTime条件,只有条件匹配才会执行覆盖操作 } func (p *Client) MakeUptoken(policy *PutPolicy) string { var rr = *policy if rr.Expires == 0 { rr.Expires = 3600 } rr.Expires += uint32(time.Now().Unix()) b, _ := json.Marshal(&rr) return qbox.SignWithData(p.mac, b) } func ParseUptoken(uptoken string) (policy PutPolicy, err error) { ps := strings.Split(uptoken, ":") if len(ps) != 3 { err = errors.New("invalid uptoken") return } pb, err := base64.URLEncoding.DecodeString(ps[2]) if err != nil { return } err = json.Unmarshal(pb, &policy) return } // ----------------------------------------------------------
kokosing/hue
apps/oozie/src/oozie/static/oozie/js/workflow.registry.js
<filename>apps/oozie/src/oozie/static/oozie/js/workflow.registry.js // Licensed to Cloudera, Inc. under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. Cloudera, Inc. licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * Registry of models * - Each model should have an ID attribute. */ var RegistryModule = function($) { var module = function() { var self = this; self.nodes = {}; module.prototype.initialize.apply(self, arguments); return self; }; $.extend(module.prototype, { // Normal stuff initialize: function() {}, toString: function() { var self = this; var s = $.map(self.nodes, function(node) { return node.id(); }).join(); return s; }, add: function(id, node) { var self = this; $(self).trigger('registry.add'); self.nodes[String(id)] = node; }, remove: function(id) { var self = this; $(self).trigger('registry.remove'); delete self.nodes[String(id)]; }, get: function(id) { var self = this; return self.nodes[id]; }, clear: function() { var self = this; delete self.nodes; self.nodes = {}; }, allNodes: function() { var self = this; var nodes = []; $.each(self.nodes, function(key, node) { nodes.push(node); }); return nodes; } }); return module; };
paranoid-software/elemental-cms
tests/management/globaldepscommands/test_pushcommandshould.py
import datetime import os import re import pytest from assertpy import assert_that from bson import ObjectId, json_util from click.testing import CliRunner from elementalcms.core import MongoDbContext, FlaskContext from elementalcms.management import cli from tests import EphemeralMongoContext, EphemeralElementalFileSystem from tests.ephemeralmongocontext import MongoDbState, MongoDbStateData class TestPushCommandShould: @pytest.fixture def specs(self): return [{ 'order': 0, 'name': 'jquery', 'type': 'application/javascript', 'url': '', 'meta': {}, 'createdAt': datetime.datetime.utcnow(), 'lastModifiedAt': datetime.datetime.utcnow() }, { '_id': ObjectId(), 'order': 0, 'name': 'jquery-ui', 'type': 'text/css', 'url': '', 'meta': {}, 'createdAt': datetime.datetime.utcnow(), 'lastModifiedAt': datetime.datetime.utcnow() }, { '_id': '1', 'order': 0, 'name': 'calendar', 'type': 'module', 'url': '', 'meta': {}, 'createdAt': datetime.datetime.utcnow(), 'lastModifiedAt': datetime.datetime.utcnow() }] @staticmethod def spec_files_setup(specs, debug_settings_fixture): root_folder_path = FlaskContext(debug_settings_fixture["cmsCoreContext"]).GLOBAL_DEPS_FOLDER for spec in specs: name = spec['name'] _type = spec['type'] type_folder_name = _type.replace('/', '_') folder_path = f'{root_folder_path}/{type_folder_name}' if not os.path.exists(folder_path): os.makedirs(folder_path) spec_filepath = f'{folder_path}/{name}.json' with open(spec_filepath, 'w') as s: s.write(json_util.dumps(spec)) with open(f'{root_folder_path}/module/invalid.json', 'w') as s: s.write('...') with open(f'{root_folder_path}/module/missing-name.json', 'w') as s: s.write(json_util.dumps({ '_id': ObjectId(), 'order': 0, 'type': 'module', 'url': '', 'meta': {}, 'createdAt': datetime.datetime.utcnow(), 'lastModifiedAt': datetime.datetime.utcnow() })) with open(f'{root_folder_path}/module/unmatched-name.json', 'w') as s: s.write(json_util.dumps({ '_id': ObjectId(), 'order': 0, 'name': 'name', 'type': 'module', 'url': '', 'meta': {}, 'createdAt': datetime.datetime.utcnow(), 'lastModifiedAt': datetime.datetime.utcnow() })) def test_fail_when_type_is_not_supported(self, default_elemental_fixture, default_settings_fixture): runner = CliRunner() with runner.isolated_filesystem(): with EphemeralElementalFileSystem(default_elemental_fixture, default_settings_fixture): # noinspection PyTypeChecker result = runner.invoke(cli, ['global-deps', 'push', '-d', 'dep-one', 'unsupported-type']) assert_that(result.output).contains('type is not supported.') def test_fail_when_spec_file_is_missing(self, default_elemental_fixture, default_settings_fixture): runner = CliRunner() with runner.isolated_filesystem(): with EphemeralElementalFileSystem(default_elemental_fixture, default_settings_fixture): # noinspection PyTypeChecker result = runner.invoke(cli, ['global-deps', 'push', '-d', 'my-missing-css-dep', 'text/css']) assert_that(result.output).contains('There is no spec file for my-missing-css-dep (text/css).') def test_display_1_invalid_spec_feedback_message(self, default_elemental_fixture, default_settings_fixture, specs): with EphemeralMongoContext(MongoDbContext(default_settings_fixture['cmsDbContext']).get_connection_string(), initial_state=[ MongoDbState(db_name='elemental', data=[]) ]) as (db_name, reader): default_settings_fixture['cmsDbContext']['databaseName'] = db_name runner = CliRunner() with runner.isolated_filesystem(): with EphemeralElementalFileSystem(default_elemental_fixture, default_settings_fixture): self.spec_files_setup(specs, default_settings_fixture) # noinspection PyTypeChecker result = runner.invoke(cli, ['global-deps', 'push', '--dep', 'invalid', 'module']) assert_that(re.findall('Invalid spec', result.output)).is_length(1) def test_display_1_missing_id_feedback_message(self, default_elemental_fixture, default_settings_fixture, specs): with EphemeralMongoContext(MongoDbContext(default_settings_fixture['cmsDbContext']).get_connection_string(), initial_state=[ MongoDbState(db_name='elemental', data=[]) ]) as (db_name, reader): default_settings_fixture['cmsDbContext']['databaseName'] = db_name runner = CliRunner() with runner.isolated_filesystem(): with EphemeralElementalFileSystem(default_elemental_fixture, default_settings_fixture): self.spec_files_setup(specs, default_settings_fixture) # noinspection PyTypeChecker result = runner.invoke(cli, ['global-deps', 'push', '--dep', 'jquery', 'application/javascript']) assert_that(re.findall('Missing spec _id', result.output)).is_length(1) def test_display_1_invalid_id_feedback_message(self, default_elemental_fixture, default_settings_fixture, specs): with EphemeralMongoContext(MongoDbContext(default_settings_fixture['cmsDbContext']).get_connection_string(), initial_state=[ MongoDbState(db_name='elemental', data=[]) ]) as (db_name, reader): default_settings_fixture['cmsDbContext']['databaseName'] = db_name runner = CliRunner() with runner.isolated_filesystem(): with EphemeralElementalFileSystem(default_elemental_fixture, default_settings_fixture): self.spec_files_setup(specs, default_settings_fixture) # noinspection PyTypeChecker result = runner.invoke(cli, ['global-deps', 'push', '--dep', 'calendar', 'module']) assert_that(re.findall('Invalid spec _id', result.output)).is_length(1) def test_display_1_missing_name_feedback_message(self, default_elemental_fixture, default_settings_fixture, specs): with EphemeralMongoContext(MongoDbContext(default_settings_fixture['cmsDbContext']).get_connection_string(), initial_state=[ MongoDbState(db_name='elemental', data=[]) ]) as (db_name, reader): default_settings_fixture['cmsDbContext']['databaseName'] = db_name runner = CliRunner() with runner.isolated_filesystem(): with EphemeralElementalFileSystem(default_elemental_fixture, default_settings_fixture): self.spec_files_setup(specs, default_settings_fixture) # noinspection PyTypeChecker result = runner.invoke(cli, ['global-deps', 'push', '--dep', 'missing-name', 'module']) assert_that(re.findall('Missing spec name', result.output)).is_length(1) def test_display_1_invalid_name_feedback_message(self, default_elemental_fixture, default_settings_fixture, specs): with EphemeralMongoContext(MongoDbContext(default_settings_fixture['cmsDbContext']).get_connection_string(), initial_state=[ MongoDbState(db_name='elemental', data=[]) ]) as (db_name, reader): default_settings_fixture['cmsDbContext']['databaseName'] = db_name runner = CliRunner() with runner.isolated_filesystem(): with EphemeralElementalFileSystem(default_elemental_fixture, default_settings_fixture): self.spec_files_setup(specs, default_settings_fixture) # noinspection PyTypeChecker result = runner.invoke(cli, ['global-deps', 'push', '--dep', 'unmatched-name', 'module']) assert_that(re.findall('Invalid spec name', result.output)).is_length(1) def test_display_1_success_feedback_message(self, default_elemental_fixture, default_settings_fixture, specs): with EphemeralMongoContext(MongoDbContext(default_settings_fixture['cmsDbContext']).get_connection_string(), initial_state=[ MongoDbState(db_name='elemental', data=[]) ]) as (db_name, reader): default_settings_fixture['cmsDbContext']['databaseName'] = db_name runner = CliRunner() with runner.isolated_filesystem(): with EphemeralElementalFileSystem(default_elemental_fixture, default_settings_fixture): self.spec_files_setup(specs, default_settings_fixture) # noinspection PyTypeChecker result = runner.invoke(cli, ['global-deps', 'push', '--dep', 'jquery-ui', 'text/css']) assert_that(re.findall('pushed successfully', result.output)).is_length(1) def test_create_backup_file_for_pushed_dependency(self, default_elemental_fixture, default_settings_fixture, specs): with EphemeralMongoContext(MongoDbContext(default_settings_fixture['cmsDbContext']).get_connection_string(), initial_state=[ MongoDbState(db_name='elemental', data=[ MongoDbStateData(coll_name='global_deps', items=[specs[1]]) ]) ]) as (db_name, reader): default_settings_fixture['cmsDbContext']['databaseName'] = db_name runner = CliRunner() with runner.isolated_filesystem(): with EphemeralElementalFileSystem(default_elemental_fixture, default_settings_fixture): self.spec_files_setup(specs, default_settings_fixture) # noinspection PyTypeChecker result = runner.invoke(cli, ['global-deps', 'push', '-d', 'jquery-ui', 'text/css'], standalone_mode=False) assert_that(result.return_value[0]).exists()
InnovativeDigitalSolution/NASA_astrobee_gds
org.vlcj/uk/co/caprica/vlcj/test/drop/DropVideoPlayer.java
<reponame>InnovativeDigitalSolution/NASA_astrobee_gds /* * This file is part of VLCJ. * * VLCJ is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * VLCJ is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with VLCJ. If not, see <http://www.gnu.org/licenses/>. * * Copyright 2009, 2010, 2011, 2012, 2013, 2014, 2015 Caprica Software Limited. */ package uk.co.caprica.vlcj.test.drop; import java.awt.BorderLayout; import java.awt.Canvas; import java.awt.Color; import java.awt.Point; import java.awt.datatransfer.DataFlavor; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.File; import java.net.URL; import java.util.List; import javax.swing.ImageIcon; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.SwingUtilities; import javax.swing.TransferHandler; import javax.swing.border.LineBorder; import uk.co.caprica.vlcj.player.MediaPlayerFactory; import uk.co.caprica.vlcj.player.embedded.EmbeddedMediaPlayer; import uk.co.caprica.vlcj.player.embedded.videosurface.CanvasVideoSurface; import uk.co.caprica.vlcj.test.VlcjTest; /** * Simple demo application that opens up an undecorated window onto which MRLs can be dropped. * <p> * For example you can drop local video files or YouTube video links. * <p> * The window is always-on-top and can be moved by clicking and dragging in the window client area * (at least this is possible on Linux). */ public class DropVideoPlayer extends VlcjTest { private final DataFlavor uriListFlavor; private final DataFlavor javaUrlFlavor; private final DataFlavor javaFileListFlavor; private final MediaPlayerFactory mediaPlayerFactory; private final EmbeddedMediaPlayer mediaPlayer; private final CanvasVideoSurface videoSurface; private final JFrame frame; private final JPanel contentPane; private final Canvas canvas; public static void main(String[] args) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { try { new DropVideoPlayer().start(); } catch(Exception e) { e.printStackTrace(); System.exit(1); } } }); } public DropVideoPlayer() throws Exception { uriListFlavor = new DataFlavor("text/uri-list;class=java.lang.String"); javaUrlFlavor = new DataFlavor("application/x-java-url;class=java.net.URL"); javaFileListFlavor = DataFlavor.javaFileListFlavor; mediaPlayerFactory = new MediaPlayerFactory(); mediaPlayer = mediaPlayerFactory.newEmbeddedMediaPlayer(); mediaPlayer.setPlaySubItems(true); canvas = new Canvas(); canvas.setBackground(Color.black); videoSurface = mediaPlayerFactory.newVideoSurface(canvas); mediaPlayer.setVideoSurface(videoSurface); contentPane = new JPanel(); contentPane.setBackground(Color.black); contentPane.setBorder(new LineBorder(new Color(190, 190, 190))); contentPane.setLayout(new BorderLayout()); contentPane.add(canvas, BorderLayout.CENTER); contentPane.setTransferHandler(new MyTransferHandler()); MyMouseAdapter mouseAdapter = new MyMouseAdapter(); canvas.addMouseListener(mouseAdapter); canvas.addMouseMotionListener(mouseAdapter); frame = new JFrame("vlcj"); frame.setIconImage(new ImageIcon(getClass().getResource("/icons/vlcj-logo.png")).getImage()); frame.setContentPane(contentPane); frame.setAlwaysOnTop(true); frame.setSize(800, 450); frame.setUndecorated(true); frame.setResizable(false); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { mediaPlayer.release(); mediaPlayerFactory.release(); } }); } private void start() { frame.setVisible(true); } /** * Transfer handler implementation to handle the dropped MRL. */ @SuppressWarnings("serial") private class MyTransferHandler extends TransferHandler { @Override public boolean canImport(TransferSupport support) { return getDataFlavor(support) != null; } @Override public boolean importData(TransferSupport support) { DataFlavor flavor = getDataFlavor(support); if(flavor != null) { try { Object transferData = support.getTransferable().getTransferData(flavor); if(transferData instanceof String) { String value = (String)transferData; String[] uris = value.split("\\r\\n"); if(uris.length > 0) { // Play the first MRL that was dropped (the others are discarded) String uri = uris[0]; mediaPlayer.playMedia(uri); } return true; } else if(transferData instanceof URL) { URL value = (URL)transferData; String uri = value.toExternalForm(); mediaPlayer.playMedia(uri); } else if(transferData instanceof List) { List<?> value = (List<?>)transferData; if(value.size() > 0) { // Play the first MRL that was dropped (the others are discarded) File file = (File)value.get(0); String uri = file.getAbsolutePath(); mediaPlayer.playMedia(uri); } } } catch(Exception e) { e.printStackTrace(); } } return false; } private DataFlavor getDataFlavor(TransferSupport support) { if(support.isDataFlavorSupported(uriListFlavor)) { return uriListFlavor; } if(support.isDataFlavorSupported(javaUrlFlavor)) { return javaUrlFlavor; } if(support.isDataFlavorSupported(javaFileListFlavor)) { return javaFileListFlavor; } return null; } } /** * Mouse adapter implementation to handle dragging the window by clicking and dragging in the * window client area. */ private class MyMouseAdapter extends MouseAdapter { private Point mouseDownScreenPoint; private Point mouseDownPoint; @Override public void mousePressed(MouseEvent e) { mouseDownScreenPoint = e.getLocationOnScreen(); mouseDownPoint = e.getPoint(); } @Override public void mouseReleased(MouseEvent e) { mouseDownScreenPoint = mouseDownPoint = null; } @Override public void mouseDragged(MouseEvent e) { if(SwingUtilities.isLeftMouseButton(e)) { Point mouseDragPoint = e.getLocationOnScreen(); int x = mouseDownScreenPoint.x + (mouseDragPoint.x - mouseDownScreenPoint.x) - mouseDownPoint.x; int y = mouseDownScreenPoint.y + (mouseDragPoint.y - mouseDownScreenPoint.y) - mouseDownPoint.y; frame.setLocation(x, y); } } } }
cyraxred/cybcode
tools/bixtractor/src/main/java/org/cybcode/tools/bixtractor/pbuf/PbufMapContext.java
package org.cybcode.tools.bixtractor.pbuf; import java.util.Map; class PbufMapContext implements PbufContext { private final int maxFieldId; private final Map<Integer, ? extends PbufFieldReceiver> receivers; public PbufMapContext(int maxFieldId, Map<Integer, ? extends PbufFieldReceiver> receivers) { this.maxFieldId = maxFieldId; this.receivers = receivers; } @Override public PbufFieldReceiver getFieldReceiver(int fieldId) { return receivers.get(fieldId); } @Override public int getMaxField() { return maxFieldId; } }
h2oai/daal
lang_interface/java/com/intel/daal/data_management/data/KeyValueDataCollection.java
<gh_stars>1-10 /* file: KeyValueDataCollection.java */ /******************************************************************************* * Copyright 2014-2017 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ /** * @ingroup data_model * @{ */ package com.intel.daal.data_management.data; import com.intel.daal.services.ContextClient; import com.intel.daal.services.DaalContext; /** * <a name="DAAL-CLASS-DATA_MANAGEMENT__DATA__KEYVALUEDATACOLLECTION"></a> * @brief Class that provides functionality of the key-value container for Serializable objects * with the key of integer type */ public class KeyValueDataCollection extends SerializableBase { /** @private */ static { System.loadLibrary("JavaAPI"); } /** * Constructs the key-value container for Serializable objects * @param context Context to manage the key-value container for Serializable objects */ public KeyValueDataCollection(DaalContext context) { super(context); this.cObject = cNewDataCollection(); } public KeyValueDataCollection(DaalContext context, long cObject) { super(context); this.cObject = cObject; this.serializedCObject = null; } public long size() { return cSize(getCObject()); } public SerializableBase get(int key) { return Factory.instance().createObject(getContext(), cGetValue(getCObject(), key)); } public void set(int key, SerializableBase value) { cSetValue(getCObject(), key, value.getCObject()); } public long getKeyByIndex(int index) { return cGetKeyByIndex(getCObject(), index); } public SerializableBase getValueByIndex(int index) { return Factory.instance().createObject(getContext(), cGetValueByIndex(getCObject(), index)); } private native long cSize(long cObject); private native long cGetKeyByIndex (long cObject, int index); private native long cGetValueByIndex(long cObject, int index); private native long cGetValue(long cObject, int key); private native void cSetValue(long cObject, int key, long cValue); private native long cNewDataCollection(); } /** @} */
LajosPolya/Spotify-API-Wrapper
src/main/java/com/lajospolya/spotifyapiwrapper/response/PlayHistory.java
package com.lajospolya.spotifyapiwrapper.response; /** * @author <NAME> * Represent the response of GetMePlayerRecentlyPlayed as described at * https://developer.spotify.com/documentation/web-api/reference-beta/ */ public class PlayHistory { private Context context; private String played_at; private SimplifiedTrack track; public Context getContext() { return context; } public void setContext(Context context) { this.context = context; } public String getPlayed_at() { return played_at; } public void setPlayed_at(String played_at) { this.played_at = played_at; } public SimplifiedTrack getTrack() { return track; } public void setTrack(SimplifiedTrack track) { this.track = track; } }
johnsblatter/deeplearning4j
deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/text/accumulators/WordFreqAccumulator.java
<gh_stars>1-10 package org.deeplearning4j.spark.text.accumulators; import org.apache.spark.AccumulatorParam; import org.deeplearning4j.berkeley.Counter; /** * @author jeffreytang */ public class WordFreqAccumulator implements AccumulatorParam<Counter<String>> { @Override public Counter<String> addInPlace(Counter<String> c1, Counter<String> c2) { c1.incrementAll(c2); return c1; } @Override public Counter<String> zero(Counter<String> initialCounter) { return new Counter<>(); } @Override public Counter<String> addAccumulator(Counter<String> c1, Counter<String> c2) { if (c1 == null) { return new Counter<>(); } addInPlace(c1, c2); return c1; } }
hangqiu/pixie
src/carnot/exec/udtf_source_node.cc
/* * Copyright 2018- The Pixie Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ #include "src/carnot/exec/udtf_source_node.h" #include <arrow/array/builder_base.h> #include <arrow/memory_pool.h> #include <ostream> #include <string> #include <string_view> #include <vector> #include <magic_enum.hpp> #include "src/carnot/plan/scalar_expression.h" #include "src/carnot/udf/registry.h" #include "src/common/base/base.h" #include "src/common/memory/object_pool.h" #include "src/shared/types/arrow_adapter.h" #include "src/shared/types/types.h" #include "src/shared/types/typespb/wrapper/types_pb_wrapper.h" #include "src/table_store/table_store.h" namespace px { namespace carnot { namespace exec { // TODO(zasgar/philkuz): we should put these in the plan. // The batch size to use for UDTFs by default. constexpr int kUDTFBatchSize = 1024; std::string UDTFSourceNode::DebugStringImpl() { return std::string(); } Status UDTFSourceNode::InitImpl(const plan::Operator& plan_node) { const auto* source_plan_node = static_cast<const plan::UDTFSourceOperator*>(&plan_node); // copy the plan node to local object; plan_node_ = std::make_unique<plan::UDTFSourceOperator>(*source_plan_node); return Status::OK(); } Status UDTFSourceNode::PrepareImpl(ExecState* exec_state) { // Always has more batches to start with. has_more_batches_ = true; PL_ASSIGN_OR_RETURN(udtf_def_, exec_state->func_registry()->GetUDTFDefinition(plan_node_->name())); return Status::OK(); } Status UDTFSourceNode::OpenImpl(ExecState* exec_state) { function_ctx_ = exec_state->CreateFunctionContext(); udtf_inst_ = udtf_def_->Make(); ObjectPool init_args_pool{"udtf_init_args_pool"}; std::vector<const types::BaseValueType*> init_args; for (const auto& sv : plan_node_->init_arguments()) { switch (sv.DataType()) { case types::BOOLEAN: init_args.emplace_back(init_args_pool.Add( new types::DataTypeTraits<types::BOOLEAN>::value_type(sv.BoolValue()))); break; case types::INT64: init_args.emplace_back(init_args_pool.Add( new types::DataTypeTraits<types::INT64>::value_type(sv.Int64Value()))); break; case types::FLOAT64: init_args.emplace_back(init_args_pool.Add( new types::DataTypeTraits<types::FLOAT64>::value_type(sv.Float64Value()))); break; // TODO(zasgar): Add in int128. case types::STRING: init_args.emplace_back(init_args_pool.Add( new types::DataTypeTraits<types::STRING>::value_type(sv.StringValue()))); break; default: CHECK(0) << "Unknown datatype: " << magic_enum::enum_name(sv.DataType()); } } return udtf_def_->ExecInit(udtf_inst_.get(), function_ctx_.get(), init_args); } Status UDTFSourceNode::CloseImpl(ExecState* /*exec_state*/) { return Status::OK(); } Status UDTFSourceNode::GenerateNextImpl(ExecState* exec_state) { std::vector<std::unique_ptr<arrow::ArrayBuilder>> outputs; for (const auto& r : udtf_def_->output_relation()) { outputs.emplace_back(types::MakeArrowBuilder(r.type(), arrow::default_memory_pool())); } // TODO(zasgar): Change Exec to take in unique_ptrs. std::vector<arrow::ArrayBuilder*> outputs_raw; for (auto& out : outputs) { outputs_raw.emplace_back(out.get()); } auto has_more_batches = udtf_def_->ExecBatchUpdate(udtf_inst_.get(), function_ctx_.get(), kUDTFBatchSize, &outputs_raw); DCHECK_GT(outputs.size(), 0); auto rb_or_s = table_store::schema::RowBatch::FromColumnBuilders( *output_descriptor_, /*eow*/ !has_more_batches, /*eow*/ !has_more_batches, &outputs); if (!rb_or_s.ok()) { return rb_or_s.status(); } auto rb = rb_or_s.ConsumeValueOrDie(); return SendRowBatchToChildren(exec_state, *rb); } bool UDTFSourceNode::NextBatchReady() { return HasBatchesRemaining(); } } // namespace exec } // namespace carnot } // namespace px
xiaofeifeifeis/c-tools
c-tablegen/src/test/java/com/bixuebihui/tablegen/dbinfo/ProcedureUtilsTest.java
<gh_stars>1-10 package com.bixuebihui.tablegen.dbinfo; import com.bixuebihui.BeanFactory; import com.bixuebihui.jdbc.IDbHelper; import org.junit.Before; import org.junit.Test; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.Map; public class ProcedureUtilsTest { IDbHelper dbHelper; DatabaseMetaData metaData; String catalog = null; String schema = null; String tableOwner = "dbo"; Map<String, String> includeList = null; Map<String, String> excludeList = new HashMap<>(); @Before public void setUp() { try { dbHelper= (IDbHelper) BeanFactory.createObjectById("dbHelper"); metaData = dbHelper.getConnection().getMetaData(); } catch (SQLException e) { e.printStackTrace(); } } @Test public void testGetProcedureColumns() throws SQLException { IProcedureInfo info; List<ProcedureInfo> res = ProcedureUtils.getProcedure(metaData, catalog, schema, tableOwner, includeList, excludeList); info = res.get(2); List<ProcedureParameterInfo> rs = ProcedureUtils.getProcedureColumns(metaData, info); for(ProcedureParameterInfo in:rs){ System.out.println(in); } } @Test public void testProcess() throws SQLException{ List<ProcedureInfo> res = ProcedureUtils.getProcedure(metaData, catalog, schema, tableOwner, includeList, excludeList); for(ProcedureInfo info:res){ List<ProcedureParameterInfo> rs = ProcedureUtils.getProcedureColumns(metaData, info); String str = ProcedureGen.process(info, rs); System.out.println(str); } } @Test public void testGetProcedure() throws SQLException { List<ProcedureInfo> res = ProcedureUtils.getProcedure(metaData, catalog, schema, tableOwner, includeList, excludeList); System.out.println(res); } @Test public void testCallableStatement() throws SQLException{ String procedure ="{call sp_xyz(?,?)}"; Connection con = dbHelper.getConnection(); CallableStatement cstmt = con.prepareCall(procedure); int abc=0; cstmt.setInt(1, abc); int sqlType =1; cstmt.registerOutParameter(2, sqlType ); cstmt.execute(); int count = cstmt.getInt(2); cstmt.close(); con.close(); /** * down vote accepted You need to get a ResultSet via: ResultSet rs = stmt.executeQuery("SELECT * FROM setoffunc()"); while (rs.next()) { // read results } rs.close(); stmt.close(); Or: // Procedure call. CallableStatement proc = conn.prepareCall("{ ? = call refcursorfunc() }"); proc.registerOutParameter(1, Types.OTHER); proc.execute(); ResultSet results = (ResultSet) proc.getObject(1); while (results.next()) { // read results } results.close(); proc.close(); */ } }
trattoria/java8
2nd/src/jp/co/trattoria/chapter3_12/LatentImage.java
package jp.co.trattoria.chapter3_12; import java.util.ArrayList; import java.util.List; import java.util.function.UnaryOperator; import javafx.scene.image.Image; import javafx.scene.image.WritableImage; import javafx.scene.paint.Color; @FunctionalInterface interface ColorTransformer { Color apply(int x, int y, Color colorAtXY); default ColorTransformer compose(ColorTransformer before) { return (x, y, c) -> apply(x, y, before.apply(x, y, c)); } /** * x座標、y座標を無視するUnaryOperator<Color>をColorTransformer<Color>へ変えるメソッド * * @param op * 関数型インタフェース * @return ColorTransformerの関数型インタrフェース */ static ColorTransformer colorOnly(UnaryOperator<Color> op) { return (x, y, c) -> op.apply(c); } } public class LatentImage { private final Image in; private List<ColorTransformer> pendingOperations; private LatentImage(Image in) { this.in = in; pendingOperations = new ArrayList<ColorTransformer>(); } public static LatentImage from(Image in) { return new LatentImage(in); } /** 座標を無視 */ public LatentImage transform(UnaryOperator<Color> f) { pendingOperations.add(ColorTransformer.colorOnly(f)); return this; } /** 色変換 */ public LatentImage transform(ColorTransformer f) { pendingOperations.add(f); return this; } /** 色変換適用済みの画像を取得 */ public Image toImage() { int width = (int) in.getWidth(); int height = (int) in.getHeight(); WritableImage out = new WritableImage(width, height); for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { Color c = in.getPixelReader().getColor(x, y); for (ColorTransformer f : pendingOperations) { c = f.apply(x, y, c); } out.getPixelWriter().setColor(x, y, c); } } return out; } }
tengqingya/strategyPractice
src/main/java/App.java
import original.BlackDog; import original.Dog; import original.RedDog; import original2.BlackDog2; import original2.Dog2; import original2.RedDog2; import strategyPractice.actionImpl.ColorRedImpl; import strategyPractice.actionImpl.FlyBlackImpl; import strategyPractice.actionImpl.FlyRedImpl; import strategyPractice.dog.BlackDog3; import strategyPractice.dog.Dog3; import strategyPractice.dog.RedDog3; /** * Hello world! * */ public class App { public static void main( String[] args ) { Dog red = new RedDog(); Dog black = new BlackDog(); red.color(); red.eat(); red.say(); black.color(); black.eat(); black.say(); System.out.println("-------------------------------------------"); Dog2 red2 = new RedDog2(); Dog2 black2 = new BlackDog2(); red2.color(); red2.eat(); red2.say(); red2.dosth(); black2.color(); black2.eat(); black2.say(); black2.dosth(); System.out.println("-------------------------------------------"); Dog3 red3 =new RedDog3(); Dog3 black3 =new BlackDog3(); red3.color(); red3.say(); red3.eat(); black3.color(); black3.say(); black3.eat(); System.out.println("-------------------------------------------改需求拉------------------"); black3.setColorInterface(new ColorRedImpl()); black3.color(); System.out.println("-------------------------------------------新增需求拉------------------"); black3.setFlyInterface(new FlyBlackImpl()); red3.setFlyInterface(new FlyRedImpl()); black3.fly(); red3.fly(); } }
HundredBai/example.v1
apps/blog/router/middleware/auth.go
package middleware import ( "fmt" "github.com/shipengqi/example.v1/apps/blog/pkg/app" "github.com/shipengqi/example.v1/apps/blog/pkg/e" "github.com/shipengqi/example.v1/apps/blog/service" "github.com/gin-gonic/gin" ) func Authenticate(s *service.Service) gin.HandlerFunc { return func(c *gin.Context) { // Skip for the login request. // path := c.Request.URL.Path // if path == "/api/v1/users" { // c.Next() // return // } var token string authorization := c.GetHeader("Authorization") xToken := c.GetHeader("X-AUTH-TOKEN") if len(xToken) > 0 { token = xToken } else if len(authorization) > 0 { // get the token part _, _ = fmt.Sscanf(authorization, "Bearer %s", &token) } else { if t, ok := c.GetQuery("token"); ok { token = t } } if len(token) == 0 { app.SendResponse(c, e.ErrUnauthorized, nil) c.Abort() return } claims, err := s.AuthSvc.Authenticate(token) if err != nil { app.SendResponse(c, err, nil) c.Abort() return } c.Set("auth_claims", claims) err = s.AuthSvc.Authorize(claims, c.Request.URL.Path, c.Request.Method) if err != nil { app.SendResponse(c, err, nil) c.Abort() return } c.Next() } }
slipperyseal/B9
src/main/java/net/catchpole/B9/codec/transcoder/IntegerTranscoder.java
package net.catchpole.B9.codec.transcoder; import net.catchpole.B9.codec.stream.BitInputStream; import net.catchpole.B9.codec.stream.BitOutputStream; import java.io.IOException; class IntegerTranscoder implements TypeTranscoder<Integer> { public Integer read(BitInputStream in) throws IOException { if (!in.readBoolean()) { return 0; } return in.readBoolean() ? in.readSigned(16) : in.readSigned(32); } public void write(BitOutputStream out, Integer value) throws IOException { out.writeBoolean(value != 0); if (value != 0) { boolean fitsShort = value >= Short.MIN_VALUE && value <= Short.MAX_VALUE; out.writeBoolean(fitsShort); out.write(value, fitsShort ? 16 : 32); } } }
grendizerufo/libapril
src/platforms/AndroidJNI_Platform.cpp
/// @file /// @version 3.5 /// /// @section LICENSE /// /// This program is free software; you can redistribute it and/or modify it under /// the terms of the BSD license: http://opensource.org/licenses/BSD-3-Clause #ifdef _ANDROID #include <jni.h> #include <gtypes/Vector2.h> #include <hltypes/hmap.h> #include <hltypes/hstring.h> #define __NATIVE_INTERFACE_CLASS "com/april/NativeInterface" #include "androidUtilJNI.h" #include "Platform.h" #include "RenderSystem.h" namespace april { void* javaVM = NULL; void (*dialogCallback)(MessageBoxButton) = NULL; // defined here to avoid making a bunch of _OPENKODE #ifdefs in Android_Platform.cpp jobject classLoader = NULL; hstr _jstringToHstr(JNIEnv* env, jstring string) { const char* chars = env->GetStringUTFChars(string, NULL); hstr result(chars); env->ReleaseStringUTFChars(string, chars); return result; } JNIEnv* getJNIEnv() { JNIEnv* env = NULL; return (((JavaVM*)april::javaVM)->AttachCurrentThread(&env, NULL) == JNI_OK ? env : NULL); } jobject getActivity() { APRIL_GET_NATIVE_INTERFACE_CLASS(classNativeInterface); jfieldID fieldActivity = env->GetStaticFieldID(classNativeInterface, "Activity", _JCLASS("android/app/Activity")); return env->GetStaticObjectField(classNativeInterface, fieldActivity); } jobject getAprilActivity() { APRIL_GET_NATIVE_INTERFACE_CLASS(classNativeInterface); jfieldID fieldAprilActivity = env->GetStaticFieldID(classNativeInterface, "AprilActivity", _JCLASS("com/april/Activity")); return env->GetStaticObjectField(classNativeInterface, fieldAprilActivity); } jclass findJNIClass(JNIEnv* env, chstr classPath) { if (april::classLoader == NULL) { return env->FindClass(classPath.cStr()); } jclass classClassLoader = env->GetObjectClass(april::classLoader); jmethodID methodLoadClass = env->GetMethodID(classClassLoader, "loadClass", _JARGS(_JCLASS("java/lang/Class"), _JSTR _JBOOL)); jstring jClassPath = env->NewStringUTF(classPath.cStr()); jboolean jInitialize = JNI_TRUE; return (jclass)env->CallObjectMethod(april::classLoader, methodLoadClass, jClassPath, jInitialize); } } #endif
kinetic-software/aws-sdk-go-v2
service/backup/api_op_ListRestoreJobs.go
<reponame>kinetic-software/aws-sdk-go-v2 // Code generated by smithy-go-codegen DO NOT EDIT. package backup import ( "context" "fmt" awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware" "github.com/aws/aws-sdk-go-v2/aws/signer/v4" "github.com/aws/aws-sdk-go-v2/service/backup/types" "github.com/aws/smithy-go/middleware" smithyhttp "github.com/aws/smithy-go/transport/http" "time" ) // Returns a list of jobs that AWS Backup initiated to restore a saved resource, // including metadata about the recovery process. func (c *Client) ListRestoreJobs(ctx context.Context, params *ListRestoreJobsInput, optFns ...func(*Options)) (*ListRestoreJobsOutput, error) { if params == nil { params = &ListRestoreJobsInput{} } result, metadata, err := c.invokeOperation(ctx, "ListRestoreJobs", params, optFns, c.addOperationListRestoreJobsMiddlewares) if err != nil { return nil, err } out := result.(*ListRestoreJobsOutput) out.ResultMetadata = metadata return out, nil } type ListRestoreJobsInput struct { // The account ID to list the jobs from. Returns only restore jobs associated with // the specified account ID. ByAccountId *string // Returns only restore jobs that were created after the specified date. ByCreatedAfter *time.Time // Returns only restore jobs that were created before the specified date. ByCreatedBefore *time.Time // Returns only restore jobs associated with the specified job status. ByStatus types.RestoreJobStatus // The maximum number of items to be returned. MaxResults *int32 // The next item following a partial list of returned items. For example, if a // request is made to return maxResults number of items, NextToken allows you to // return more items in your list starting at the location pointed to by the next // token. NextToken *string noSmithyDocumentSerde } type ListRestoreJobsOutput struct { // The next item following a partial list of returned items. For example, if a // request is made to return maxResults number of items, NextToken allows you to // return more items in your list starting at the location pointed to by the next // token. NextToken *string // An array of objects that contain detailed information about jobs to restore // saved resources. RestoreJobs []types.RestoreJobsListMember // Metadata pertaining to the operation's result. ResultMetadata middleware.Metadata noSmithyDocumentSerde } func (c *Client) addOperationListRestoreJobsMiddlewares(stack *middleware.Stack, options Options) (err error) { err = stack.Serialize.Add(&awsRestjson1_serializeOpListRestoreJobs{}, middleware.After) if err != nil { return err } err = stack.Deserialize.Add(&awsRestjson1_deserializeOpListRestoreJobs{}, middleware.After) if err != nil { return err } if err = addSetLoggerMiddleware(stack, options); err != nil { return err } if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil { return err } if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil { return err } if err = addResolveEndpointMiddleware(stack, options); err != nil { return err } if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil { return err } if err = addRetryMiddlewares(stack, options); err != nil { return err } if err = addHTTPSignerV4Middleware(stack, options); err != nil { return err } if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil { return err } if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil { return err } if err = addClientUserAgent(stack); err != nil { return err } if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil { return err } if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil { return err } if err = stack.Initialize.Add(newServiceMetadataMiddleware_opListRestoreJobs(options.Region), middleware.Before); err != nil { return err } if err = addRequestIDRetrieverMiddleware(stack); err != nil { return err } if err = addResponseErrorMiddleware(stack); err != nil { return err } if err = addRequestResponseLogging(stack, options); err != nil { return err } return nil } // ListRestoreJobsAPIClient is a client that implements the ListRestoreJobs // operation. type ListRestoreJobsAPIClient interface { ListRestoreJobs(context.Context, *ListRestoreJobsInput, ...func(*Options)) (*ListRestoreJobsOutput, error) } var _ ListRestoreJobsAPIClient = (*Client)(nil) // ListRestoreJobsPaginatorOptions is the paginator options for ListRestoreJobs type ListRestoreJobsPaginatorOptions struct { // The maximum number of items to be returned. Limit int32 // Set to true if pagination should stop if the service returns a pagination token // that matches the most recent token provided to the service. StopOnDuplicateToken bool } // ListRestoreJobsPaginator is a paginator for ListRestoreJobs type ListRestoreJobsPaginator struct { options ListRestoreJobsPaginatorOptions client ListRestoreJobsAPIClient params *ListRestoreJobsInput nextToken *string firstPage bool } // NewListRestoreJobsPaginator returns a new ListRestoreJobsPaginator func NewListRestoreJobsPaginator(client ListRestoreJobsAPIClient, params *ListRestoreJobsInput, optFns ...func(*ListRestoreJobsPaginatorOptions)) *ListRestoreJobsPaginator { if params == nil { params = &ListRestoreJobsInput{} } options := ListRestoreJobsPaginatorOptions{} if params.MaxResults != nil { options.Limit = *params.MaxResults } for _, fn := range optFns { fn(&options) } return &ListRestoreJobsPaginator{ options: options, client: client, params: params, firstPage: true, } } // HasMorePages returns a boolean indicating whether more pages are available func (p *ListRestoreJobsPaginator) HasMorePages() bool { return p.firstPage || p.nextToken != nil } // NextPage retrieves the next ListRestoreJobs page. func (p *ListRestoreJobsPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*ListRestoreJobsOutput, error) { if !p.HasMorePages() { return nil, fmt.Errorf("no more pages available") } params := *p.params params.NextToken = p.nextToken var limit *int32 if p.options.Limit > 0 { limit = &p.options.Limit } params.MaxResults = limit result, err := p.client.ListRestoreJobs(ctx, &params, optFns...) if err != nil { return nil, err } p.firstPage = false prevToken := p.nextToken p.nextToken = result.NextToken if p.options.StopOnDuplicateToken && prevToken != nil && p.nextToken != nil && *prevToken == *p.nextToken { p.nextToken = nil } return result, nil } func newServiceMetadataMiddleware_opListRestoreJobs(region string) *awsmiddleware.RegisterServiceMetadata { return &awsmiddleware.RegisterServiceMetadata{ Region: region, ServiceID: ServiceID, SigningName: "backup", OperationName: "ListRestoreJobs", } }
ozavala/IfSimply
app/controllers/courses_controller.rb
class CoursesController < ApplicationController before_filter :authenticate_user!, :except => [ :show, :show_all ] before_filter :get_club, :only => [ :create, :show_all, :sort ] before_filter :get_course, :only => [ :show, :edit, :update ] def show redirect_to club_sales_page_path(@course.club) and return unless (user_signed_in? and can?(:read, @course)) if request.path != course_path(@course) redirect_to course_path(@course), status: :moved_permanently and return end end def edit authorize! :update, @course @club = @course.club if request.path != course_path(@course) render course_editor_path(@course), :text => "", :status => :moved_permanently, :layout => "mercury" and return end end def create @course = @club.courses.new authorize! :create, @course @course.assign_defaults if @course.save and @club.courses.count == 1 # first Course - build initial Lesson @lesson = @course.lessons.new @lesson.assign_defaults @lesson.save end redirect_to course_editor_path(@course) end def update authorize! :update, @course course_hash = params[:content] @course.title = course_hash[:course_title][:value] @course.description = course_hash[:course_description][:value] @course.logo = course_hash[:course_logo][:attributes][:src] @course.club.lessons_heading = course_hash[:club_lessons_heading][:value] # update the corresponding lessons for the course lesson_list = [] course_hash.each do |lesson_id, lesson_hash| if lesson_id =~ /lesson_.*/ lesson = lesson_list.select{ |lesson| lesson.id.to_s == lesson_id.split("_")[1] }.first lesson = @course.lessons.find lesson_id.split("_")[1] if lesson.blank? unless lesson.blank? attribute = lesson_id.split("_")[2] lesson.send "#{attribute}=", lesson_hash[:value] lesson_list.delete lesson if lesson_list.include?(lesson) lesson_list << lesson end end end # handle errors for the course and each lesson error_resources = [] error_resources << @course unless @course.save lesson_list.each do |lesson| error_resources << lesson unless lesson.save end error_resources << @course.club unless @course.club.save if error_resources.blank? render :text => "" else respond_error_to_mercury error_resources end end def show_all redirect_to club_sales_page_path(@club) unless user_signed_in? and can?(:read, @club) @courses = @club.courses end def sort authorize! :update, @club courses = @club.courses unless (course_list = params["courses"]).nil? course_list.map!{ |course| course.sub("course_", "") } courses.each do |course| course.position = course_list.index(course.id.to_s) + 1 course.save end end render :nothing => true end def destroy course = Course.find params[:id] @club = course.club authorize! :destroy, course flash[:error] = "An error occurred destroying the Course" unless course.destroy redirect_to show_all_club_courses_path(@club) flash.discard end private def get_club @club = Club.find params[:club_id] end def get_course @course = Course.find params[:id] end end
liftchampion/nativejson-benchmark
thirdparty/facil.io/lib/facil/core/types/fiobj/fiobj_json.h
#ifndef H_FIOBJ_JSON_H #define H_FIOBJ_JSON_H /* Copyright: <NAME>, 2017-2018 License: MIT */ #include "fiobj_ary.h" #include "fiobj_hash.h" #include "fiobj_numbers.h" #include "fiobj_str.h" #include "fiobject.h" #ifdef __cplusplus extern "C" { #endif /* ***************************************************************************** JSON API ***************************************************************************** */ /** Limit JSON nesting, 32 is the limit to accomodate a 32 bit type. */ #if !defined(JSON_MAX_DEPTH) || JSON_MAX_DEPTH > 32 #undef JSON_MAX_DEPTH #define JSON_MAX_DEPTH 32 #endif /** * Parses JSON, setting `pobj` to point to the new Object. * * Returns the number of bytes consumed. On Error, 0 is returned and no data is * consumed. */ size_t fiobj_json2obj(FIOBJ *pobj, const void *data, size_t len); /** * Stringify an object into a JSON string. Remember to `fiobj_free`. * * Note that only the foloowing basic fiobj types are supported: Primitives * (True / False / NULL), Numbers (Number / Float), Strings, Hashes and Arrays. * * Some objects (such as the POSIX specific IO type) are unsupported and may be * formatted incorrectly. */ FIOBJ fiobj_obj2json(FIOBJ, uint8_t pretty); /** * Formats an object into a JSON string, appending the JSON string to an * existing String. Remember to `fiobj_free` as usual. * * Note that only the foloowing basic fiobj types are supported: Primitives * (True / False / NULL), Numbers (Number / Float), Strings, Hashes and * Arrays. * * Some objects (such as the POSIX specific IO type) are unsupported and may be * formatted incorrectly. */ FIOBJ fiobj_obj2json2(FIOBJ dest, FIOBJ object, uint8_t pretty); #if DEBUG void fiobj_test_json(void); #endif #ifdef __cplusplus } /* extern "C" */ #endif #endif
onnoA/sample-collection
shop-demo-video/src/main/java/com/onnoa/shop/demo/video/config/FTPClientConfig.java
package com.onnoa.shop.demo.video.config; import com.onnoa.shop.demo.video.exception.VideoException; import lombok.extern.slf4j.Slf4j; import org.apache.commons.net.ftp.FTPClient; import org.apache.commons.net.ftp.FTPReply; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Scope; import org.springframework.context.annotation.ScopedProxyMode; import org.springframework.web.context.WebApplicationContext; @Slf4j @Configuration public class FTPClientConfig { @Value("${ftp.ftpHost}") private String ftpHost; @Value("${ftp.ftpPort}") private Integer ftpPort; @Value("${ftp.ftpUserName}") private String ftpUserName; @Value("${ftp.ftpPassword}") private String ftpPassword; @Bean @Scope(value = WebApplicationContext.SCOPE_REQUEST, proxyMode = ScopedProxyMode.TARGET_CLASS) public FTPClient ftpClient() { FTPClient ftpClient = new FTPClient(); try { //设置缓存区大小 ftpClient.setBufferSize(1024 * 1024); // 连接FTP服务器 ftpClient.connect(ftpHost, ftpPort); // 登陆FTP服务器 ftpClient.login(ftpUserName, ftpPassword); if (!FTPReply.isPositiveCompletion(ftpClient.getReplyCode())) { log.error("未连接到FTP,用户名或密码错误。"); ftpClient.disconnect(); throw VideoException.FTP_CONNECT_VERIFY; } } catch (Exception e) { log.error("FTP的IP地址可能错误,请正确配置。", e); throw VideoException.FTP_CONNECT; } log.info("FTP建连接成功" + ftpClient.hashCode()); return ftpClient; } public FTPClient getFTPClient() { FTPClient ftpClient = new FTPClient(); try { //设置缓存区大小 ftpClient.setBufferSize(1024 * 1024); // 连接FTP服务器 ftpClient.connect(ftpHost, ftpPort); // 登陆FTP服务器 ftpClient.login(ftpUserName, ftpPassword); if (!FTPReply.isPositiveCompletion(ftpClient.getReplyCode())) { log.error("未连接到FTP,用户名或密码错误。"); ftpClient.disconnect(); throw VideoException.FTP_CONNECT_VERIFY; } } catch (Exception e) { log.error("FTP的IP地址可能错误,请正确配置。", e); throw VideoException.FTP_CONNECT; } log.info("FTP建连接成功" + ftpClient.hashCode()); return ftpClient; } }
wix/petri
wix-petri-core/src/main/java/com/wixpress/petri/experiments/domain/DynamicFilterLoader.java
<reponame>wix/petri<gh_stars>100-1000 package com.wixpress.petri.experiments.domain; import com.google.common.reflect.ClassPath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; /** * Created by talyas on 2/2/15. */ public class DynamicFilterLoader { private static final Logger logger = LoggerFactory.getLogger(DynamicFilterLoader.class); private final String petriPluginsPath = "petri-plugins"; private final String petriPluginsPathJarPrefix = "extended-filters"; private final String filtersPackage; public DynamicFilterLoader(String filtersPackage) { this.filtersPackage = filtersPackage; } public List<Class> loadFilterTypesFromJars() { List<Class> filterTypes = new ArrayList<>(); try { File[] extendedFiltersJars = getExtendedFiltersJars(new File(petriPluginsPath)); if (extendedFiltersJars != null) { URLClassLoader ucl = new URLClassLoader(getUrls(extendedFiltersJars), DynamicFilterLoader.class.getClassLoader()); for (ClassPath.ClassInfo classInfo : ClassPath.from(ucl).getTopLevelClasses(filtersPackage)) { filterTypes.add(classInfo.load()); } } } catch (IOException e) { e.printStackTrace(); } return filterTypes; } private File[] getExtendedFiltersJars(File pluginsFolder) { logger.info("scanning filters to load from folder - " + pluginsFolder.getAbsolutePath()); return pluginsFolder.listFiles(new FileFilter() { public boolean accept(File file) { return file.getPath().toLowerCase().contains(petriPluginsPathJarPrefix); } }); } private URL[] getUrls(File[] extendedFiltersJars) { URL[] urls = new URL[extendedFiltersJars.length]; for (int i = 0; i < extendedFiltersJars.length; i++) { try { urls[i] = extendedFiltersJars[i].toURI().toURL(); logger.info("scanning filters from jar - " + urls[i]); } catch (MalformedURLException e) { e.printStackTrace(); } } return urls; } }
harshp8l/deep-learning-lang-detection
data/test/scala/394b3fcfa309b251157a5b1db76fda3039bcb0caWebSocketConfig.scala
package com.smile.love.websocket import org.springframework.context.annotation.Configuration import org.springframework.messaging.simp.config.MessageBrokerRegistry import org.springframework.web.socket.config.annotation.{AbstractWebSocketMessageBrokerConfigurer, EnableWebSocketMessageBroker, StompEndpointRegistry} ; @Configuration @EnableWebSocketMessageBroker class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer{ override def registerStompEndpoints(stompEndpointRegistry: StompEndpointRegistry): Unit = { stompEndpointRegistry.addEndpoint("/wso").withSockJS } override def configureMessageBroker(registry: MessageBrokerRegistry) = { registry.enableSimpleBroker("/wso") } }
injecti0n/Dakplusplus
src/main/java/dakplusbackend/service/stubs/WorkingDayServiceStub.java
package dakplusbackend.service.stubs; import dakplusbackend.model.Employee; import dakplusbackend.model.WorkingDay; import dakplusbackend.service.WorkingDayService; import java.sql.SQLException; // TODO public class WorkingDayServiceStub implements WorkingDayService { @Override public WorkingDay addWorkingdayToEmployee(Employee employee, WorkingDay workingDay) throws SQLException, IllegalArgumentException { return null; } @Override public WorkingDay saveWorkday(WorkingDay day) { return null; } }
inms-adarsh/perfi-app
app/screens/Customers/index.js
<gh_stars>0 import CustomersContainer from './CustomersContainer'; export default CustomersContainer;
ingeningridr/TelefoniaAutheV2
initial/src/main/java/com/example/springboot/telefonia/wsdl/request/ItemReferencia.java
<gh_stars>0 package com.example.springboot.telefonia.wsdl.request; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; @JacksonXmlRootElement(localName = "itemReferencia") public class ItemReferencia { @JacksonXmlProperty(localName = "REFCodClasificacion") private String REFCodClasificacion; @JacksonXmlProperty(localName = "REFCodigo1") private String REFCodigo1; @JacksonXmlProperty(localName = "REFCodigo2") private String REFCodigo2; @JacksonXmlProperty(localName = "REFNombreCorto") private String REFNombreCorto; @JacksonXmlProperty(localName = "REFNombreLargo") private String REFNombreLargo; @JacksonXmlProperty(localName = "CARCodigo1") private String CARCodigo1; @JacksonXmlProperty(localName = "REFPrecioLista") private String REFPrecioLista; @JacksonXmlProperty(localName = "IRFBruto") private String IRFBruto; @JacksonXmlProperty(localName = "IRFDescuento") private String IRFDescuento; @JacksonXmlProperty(localName = "IRFPago") private String IRFPago; @JacksonXmlProperty(localName = "IRFCantidad") private String IRFCantidad; @JacksonXmlProperty(localName = "IRFValorImpuesto") private String IRFValorImpuesto; @JacksonXmlProperty(localName = "IRFImpuesto") private String IRFImpuesto; @JacksonXmlProperty(localName = "REFEsCombo") private String REFEsCombo; @JacksonXmlProperty(localName = "REFUltimoCosto") private String REFUltimoCosto; @JacksonXmlProperty(localName = "PRVCodigo") private String PRVCodigo; @JacksonXmlProperty(localName = "REFManejaLotes") private String REFManejaLotes; @JacksonXmlProperty(localName = "REFFactorConversion") private String REFFactorConversion; @JacksonXmlProperty(localName = "REFInventario") private String REFInventario; @JacksonXmlProperty(localName = "REFEsParaVenta") private String REFEsParaVenta; @JacksonXmlProperty(localName = "estado") private String estado; @JacksonXmlProperty(localName = "IRFPagNoVenta") private String IRFPagNoVenta; @JacksonXmlProperty(localName = "IRFVenta") private String IRFVenta; @JacksonXmlProperty(localName = "IRFValorImpuestoNeto") private String IRFValorImpuestoNeto; @JacksonXmlProperty(localName = "IRFComision") private String IRFComision; @JacksonXmlProperty(localName = "IRFNeto") private String IRFNeto; @JacksonXmlProperty(localName = "REFCodigoIngresado") private String REFCodigoIngresado; @JacksonXmlProperty(localName = "RREFPuntos") private String RREFPuntos; @JacksonXmlProperty(localName = "ICPPresentacion") private String ICPpresentacion; @JacksonXmlProperty(localName = "ICPDescripcion") private String ICPdescripcion; @JacksonXmlProperty(localName = "ICPCadena") private String ICPcadena; @JacksonXmlProperty(localName = "ICPLetra") private String ICPletra; public String getREFCodClasificacion() { return REFCodClasificacion; } public void setREFCodClasificacion(String REFCodClasificacion) { this.REFCodClasificacion = REFCodClasificacion; } public String getREFCodigo1() { return REFCodigo1; } public void setREFCodigo1(String REFCodigo1) { this.REFCodigo1 = REFCodigo1; } public String getREFCodigo2() { return REFCodigo2; } public void setREFCodigo2(String REFCodigo2) { this.REFCodigo2 = REFCodigo2; } public String getREFNombreCorto() { return REFNombreCorto; } public void setREFNombreCorto(String REFNombreCorto) { this.REFNombreCorto = REFNombreCorto; } public String getREFNombreLargo() { return REFNombreLargo; } public void setREFNombreLargo(String REFNombreLargo) { this.REFNombreLargo = REFNombreLargo; } public String getCARCodigo1() { return CARCodigo1; } public void setCARCodigo1(String CARCodigo1) { this.CARCodigo1 = CARCodigo1; } public String getREFPrecioLista() { return REFPrecioLista; } public void setREFPrecioLista(String REFPrecioLista) { this.REFPrecioLista = REFPrecioLista; } public String getIRFBruto() { return IRFBruto; } public void setIRFBruto(String IRFBruto) { this.IRFBruto = IRFBruto; } public String getIRFDescuento() { return IRFDescuento; } public void setIRFDescuento(String IRFDescuento) { this.IRFDescuento = IRFDescuento; } public String getIRFPago() { return IRFPago; } public void setIRFPago(String IRFPago) { this.IRFPago = IRFPago; } public String getIRFCantidad() { return IRFCantidad; } public void setIRFCantidad(String IRFCantidad) { this.IRFCantidad = IRFCantidad; } public String getIRFValorImpuesto() { return IRFValorImpuesto; } public void setIRFValorImpuesto(String IRFValorImpuesto) { this.IRFValorImpuesto = IRFValorImpuesto; } public String getIRFImpuesto() { return IRFImpuesto; } public void setIRFImpuesto(String IRFImpuesto) { this.IRFImpuesto = IRFImpuesto; } public String getREFEsCombo() { return REFEsCombo; } public void setREFEsCombo(String REFEsCombo) { this.REFEsCombo = REFEsCombo; } public String getREFUltimoCosto() { return REFUltimoCosto; } public void setREFUltimoCosto(String REFUltimoCosto) { this.REFUltimoCosto = REFUltimoCosto; } public String getPRVCodigo() { return PRVCodigo; } public void setPRVCodigo(String PRVCodigo) { this.PRVCodigo = PRVCodigo; } public String getREFManejaLotes() { return REFManejaLotes; } public void setREFManejaLotes(String REFManejaLotes) { this.REFManejaLotes = REFManejaLotes; } public String getREFFactorConversion() { return REFFactorConversion; } public void setREFFactorConversion(String REFFactorConversion) { this.REFFactorConversion = REFFactorConversion; } public String getREFInventario() { return REFInventario; } public void setREFInventario(String REFInventario) { this.REFInventario = REFInventario; } public String getREFEsParaVenta() { return REFEsParaVenta; } public void setREFEsParaVenta(String REFEsParaVenta) { this.REFEsParaVenta = REFEsParaVenta; } public String getEstado() { return estado; } public void setEstado(String estado) { this.estado = estado; } public String getIRFPagNoVenta() { return IRFPagNoVenta; } public void setIRFPagNoVenta(String IRFPagNoVenta) { this.IRFPagNoVenta = IRFPagNoVenta; } public String getIRFVenta() { return IRFVenta; } public void setIRFVenta(String IRFVenta) { this.IRFVenta = IRFVenta; } public String getIRFValorImpuestoNeto() { return IRFValorImpuestoNeto; } public void setIRFValorImpuestoNeto(String IRFValorImpuestoNeto) { this.IRFValorImpuestoNeto = IRFValorImpuestoNeto; } public String getIRFComision() { return IRFComision; } public void setIRFComision(String IRFComision) { this.IRFComision = IRFComision; } public String getIRFNeto() { return IRFNeto; } public void setIRFNeto(String IRFNeto) { this.IRFNeto = IRFNeto; } public String getREFCodigoIngresado() { return REFCodigoIngresado; } public void setREFCodigoIngresado(String REFCodigoIngresado) { this.REFCodigoIngresado = REFCodigoIngresado; } public String getRREFPuntos() { return RREFPuntos; } public void setRREFPuntos(String RREFPuntos) { this.RREFPuntos = RREFPuntos; } public String getICPpresentacion() { return ICPpresentacion; } public void setICPpresentacion(String ICPpresentacion) { this.ICPpresentacion = ICPpresentacion; } public String getICPdescripcion() { return ICPdescripcion; } public void setICPdescripcion(String ICPdescripcion) { this.ICPdescripcion = ICPdescripcion; } public String getICPcadena() { return ICPcadena; } public void setICPcadena(String ICPcadena) { this.ICPcadena = ICPcadena; } public String getICPletra() { return ICPletra; } public void setICPletra(String ICPletra) { this.ICPletra = ICPletra; } }
PollyWeng/NUC972
rootfs/usr/local/sbin/www/cgi-bin/xxx.c
<reponame>PollyWeng/NUC972<filename>rootfs/usr/local/sbin/www/cgi-bin/xxx.c #include "stdio.h" int main() { printf("Content-Type: text/plain\n\n"); printf("Hello world !\n"); printf("CGI test Ccccc..... !\n"); return 0; }
parti-xyz/curry-web
app/models/deprecated_event.rb
class DeprecatedEvent < ApplicationRecord include Statementable TEMPLATES = %w( default default_with_photo press ) TEMPLATES_SPECIAL = %w( map_with_assembly any_speech map speech sns ) belongs_to :user, optional: true belongs_to :project has_many :comments, as: :commentable has_many :speeches, dependent: :destroy has_one :deprecated_sns_event mount_uploader :image, ImageUploader mount_uploader :social_image, ImageUploader scope :recent, -> { order('id DESC') } scope :by_organization, ->(organization) { where(project: organization.projects) } SLUG_CANDLE_SPEECH = 'candle-speech' LARGE_AREA = %w(서울특별시 부산광역시 대구광역시 인천광역시 광주광역시 대전광역시 울산광역시 세종특별자치시 경기도 경기남부 경기북부 강원도 충청북도 충청남도 전라북도 전라남도 경상북도 경상남도 제주특별자치도) CONGRESSMEN = [ ['강길부', '울산', '울주군', '울산광역시 울주군 범서읍 굴화리 32-2번지 인재빌딩3층'], ['강석진', '경남', '산청군함양군거창군합천군', '경상남도 거창군 거창읍 시장2길 9 애지안 2층'], ['강석호', '경북', '영양군영덕군봉화군울진군', '경상북도 영덕군 영덕읍 중앙길 99번지'], ['강효상', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['경대수', '충북', '증평군진천군음성군', '충청북도 음성군 음성읍 음성천동길 160 2층'], ['곽대훈', '대구', '달서구갑', '대구광역시 달서구 달구벌대로 1521 K타워 8층'], ['곽상도', '대구', '중구남구', '대구광역시 남구 중앙대로 242 명종빌딩 4층'], ['권석창', '충북', '제천시단양군', '충청북도 제천시 의병대로78 (명동로터리 이동우컬렉션2층)'], ['권성동', '강원', '강릉시', '강원도 강릉시 교동광장로 90 FCC빌딩 4층'], ['김광림', '경북', '안동시', '경상북도 안동시 당북동 48-3 선메디컬빌딩'], ['김규환', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['김기선', '강원', '원주시갑', '강원도 원주시 서원대로 113 6층'], ['김도읍', '부산', '북구강서구을', '부산광역시 강서구 대저1동 2377-4번지'], ['김명연', '경기', '안산시단원구갑', '경기도 안산시 단원구 삼일로 310 서울프라자 4층'], ['김무성', '부산', '중구영도구', '부산광역시 영도구 태종로 40'], ['김상훈', '대구', '서구', '대구광역시 서구 평리4동 1371-8 경총회관 3층'], ['김석기', '경북', '경주시', '경상북도 경주시 금성로 308 2층 (서부동)'], ['김선동', '서울', '도봉구을', '서울시 도봉구 방학1동 680-1 이정빌딩 3F'], ['김성원', '경기', '동두천시연천군', '경기도 동두천시 평화로 2248 봉익빌딩 2층'], ['김성찬', '경남', '창원시진해구', '경상남도 창원시 진해구 충장로347 NH농협2층'], ['김성태', '서울', '강서구을', '서울시 강서구 양천로 112 선샤인빌딩 3층'], ['김성태', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['김세연', '부산', '금정구', '부산 금정구 중앙대로 1711 금정빌딩 2층'], ['김순례', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['김승희', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['김영우', '경기', '포천시가평군', '경기도 포천시 구절초로 12 3층'], ['김재경', '경남', '진주시을', '경상남도 진주시 동진로 146 대광빌딩 4층'], ['김정재', '경북', '포항시북구', '경상북도 포항시 북구 새천년대로 1210 이레빌딩 3층'], ['김정훈', '부산', '남구갑', '부산광역시 남구 못골로 104 3층'], ['김종석', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['김종태', '경북', '상주시군위군의성군청송군', '경상북도 상주시 서성동 56-6 동영빌딩 3층'], ['김진태', '강원', '춘천시', '강원도 춘천시 후석로 10 청인빌딩 4층'], ['김태흠', '충남', '보령시서천군', '충남 보령시 동대동 1833번지 대응빌딩 4층'], ['김학용', '경기', '안성시', '경기도 안성시 중앙로 473'], ['김한표', '경남', '거제시', '경상남도 거제시 서문로 56 명성빌딩 4층'], ['김현아', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['나경원', '서울', '동작구을', '서울시 동작구 사당로 219 메트로빌딩 4층'], ['문진국', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['민경욱', '인천', '연수구을', '인천광역시 연수구 컨벤시아대로 55 송도이안 2층 202호'], ['박대출', '경남', '진주시갑', '경상남도 진주시 진주성로 20, 2층 '], ['박덕흠', '충북', '보은군옥천군영동군괴산군', '충청북도 옥천군 옥천읍 중앙로 10'], ['박맹우', '울산', '남구을', '울산광역시 남구 번영로 90-1 항사랑병원 4층'], ['박명재', '경북', '포항시남구울릉군', '경상북도 남구 포스코대로 393 홍제빌딩 5층'], ['박성중', '서울', '서초구을', '서울특별시 강남대로 221 양재주차빌딩 515호'], ['박순자', '경기', '안산시단원구을', '경기도 안산시 단원구 고잔동 706-3 장은타워 511호'], ['박완수', '경남', '창원시의창구', '경상남도 창원시 의창구 태복산로317 오션파이브빌딩 201호'], ['박인숙', '서울', '송파구갑', '충청남도 천안시 동남구 만남로 6 캐럿21 7층'], ['박찬우', '충남', '천안시갑', '충남 천안시 동남구 충절로 174 (원성동 제일빌딩 5층)'], ['배덕광', '부산', '해운대구을', '부산광역시 해운대구 반여로 41번길 54 대우빌딩 4층'], ['백승주', '경북', '구미시갑', '경북 구미시 구미대로 342'], ['서청원', '경기', '화성시갑', '경기도 화성시 향남읍 삼천병마로 194 2층 '], ['성일종', '충남', '서산시태안군', '충남 서산시 고운로 147, 3층'], ['송석준', '경기', '이천시', '경기도 이천시 중리천로72번길 2층(산림조합건물)'], ['송희경', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['신보라', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['신상진', '경기', '성남시중원구', '경기도 성남시 중원구 산성대로 326-2(중앙동 352) 수경빌딩 5층'], ['심재철', '경기', '안양시동안구을', '경기도 안양시 동안구 경수대로 540 성옥빌딩 1층(호계1동)'], ['안상수', '인천', '중구동구강화군옹진군', '인천시 강화군강화읍 강화대로 401-1 향군회관 2층'], ['엄용수', '경남', '밀양시의령군함안군창녕군', '경남 밀양시 밀양대로 1856'], ['여상규', '경남', '사천시남해군하동군', '경상남도 사천시 용현면 덕곡리 514-3 부성빌딩 202호 (664-952)'], ['염동열', '강원', '태백시횡성군영월군평창군정선군', '강원도 영월군 영월읍 중앙로 56'], ['오신환', '서울', '관악구을', '서울시 관악구 남부순환로 1469, 3층'], ['원유철', '경기', '평택시갑', '경기도 평택시 지산로60 3층 (지산동)'], ['유기준', '부산', '서구동구', '부산광역시 서구 자갈치로1 신진빌딩 4층'], ['유민봉', '비례대표', '비례', '서울특별시 영등포구 의사당대로1 국회의원회관 1015호'], ['유승민', '대구', '동구을', '대구시 동구 화랑로 459 테바빌딩 3층 (41166)'], ['유의동', '경기', '평택시을', '경기도 평택시 합정동 965-13 명성빌딩 5층'], ['유재중', '부산', '수영구', '부산광역시 수영구 수영로 672 동광빌딩 4층(48267)'], ['윤상직', '부산', '기장군', '부산광역시 기장군 기장읍 기장대로 516 자이안트빌딩 305호'], ['윤상현', '인천', '남구을', '인천광역시 남구 소성로 171'], ['윤영석', '경남', '양산시갑', '경상남도 양산시 삼일로 70(중부동 402) 구,터미널 건물 1층'], ['윤재옥', '대구', '달서구을', '대구광역시 달서구 월배로202(상인동,남정빌딩) 6층'], ['윤종필', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['윤한홍', '경남', '창원시마산회원구', '경상남도 창원시 마산회원구 율림교로 13 메트로M빌딩 504호'], ['이군현', '경남', '통영시고성군', '경상남도 통영시 북신동 1-2 농협축협건물 3층(통영)'], ['이만희', '경북', '영천시청도군', '경상북도 영천시 완산로 6길(완산동 1000-70) 3층'], ['이명수', '충남', '아산시갑', '충청남도 아산시 온천대로 1541 명지빌딩 5층'], ['이양수', '강원', '속초시고성군양양군', '속초시 동해대로 4213 복오빌딩 3층'], ['이완영', '경북', '고령군성주군칠곡군', '경상북도 칠곡군 왜관읍 중앙로 147 삼양빌딩 3층'], ['이우현', '경기', '용인시갑', '경기도 용인시 처인구 중부대로 1403 태중빌딩 501호'], ['이은권', '대전', '중구', '대전광역시 중구 계백로 1528 (유천동) 남강빌딩 3층'], ['이은재', '서울', '강남구병', '서울특별시 강남구 삼성로 349 우진빌딩 403호'], ['이장우', '대전', '동구', '대전광역시 동구 대전로 887 화성빌딩 5층'], ['이정현', '전남', '순천시', '전라남도 순천시 비봉2길 4-38 3층(조례동) 이정현의원사랑방'], ['이종구', '서울', '강남구갑', '서울특별시 강남구 논현로 667 진우빌딩 401호'], ['이종명', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['이종배', '충북', '충주시', '충청북도 충주시 중앙로 3(문화동 569-1) '], ['이주영', '경남', '창원시마산합포구', '경상남도 창원시 마산합포구 해안대로 297 (신포동 1가)준빌딩 3층'], ['이진복', '부산', '동래구', '부산광역시 동래구 동래로 136번길 32, 3층'], ['이채익', '울산', '남구갑', '울산광역시 남구 수암로 4 템포빌딩 10층 (680-831)'], ['이철규', '강원', '동해시삼척시', '강원도 동해시 천곡동 흥국생명빌딩 3층'], ['이철우', '경북', '김천시', '경상북도 김천시 신음동 424-1 5층'], ['이학재', '인천', '서구갑', '인천광역시 서구 가정로 369 (신현동, 서경백화점 3층)'], ['이헌승', '부산', '부산진구을', '부산광역시 부산진구 가야대로 607 새마을회관 8층'], ['이현재', '경기', '하남시', '경기도 하남시 하남대로 800 해림빌딩 2층'], ['이혜훈', '서울', '서초구갑', '서울시 서초구 신반포로 219 반포쇼핑타운 8동 410호'], ['임이자', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['장석춘', '경북', '구미시을', '경상북도 구미시 인동가산로 33 쌍둥이빌딩 9층'], ['장제원', '부산', '사상구', '부산광역시 사상구 사상로 181'], ['전희경', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['정갑윤', '울산', '중구', '울산광역시 중구번영로 355 (학산동) 중원빌딩 4층'], ['정병국', '경기', '여주시양평군', '경기도 여주시 여흥로 111 2층/ 양평군 양평읍 시민로 117 2층'], ['정양석', '서울', '강북구갑', '서울특별시 강북구 노해로 88'], ['정용기', '대전', '대덕구', '대전광역시 대덕구 한밭대로 990 3층 (오정동 69-3)'], ['정우택', '충북', '청주시상당구', '충북 청주시 상당구 청남로 2200'], ['정운천', '전북', '전주시을', '전라북도 전주시 완산구 홍산로 275 충광빌딩 503호'], ['정유섭', '인천', '부평구갑', '인천광역시 부평구 주부토로 3'], ['정종섭', '대구', '동구갑', '대구광역시 동구 동대구로 523'], ['정진석', '충남', '공주시부여군청양군', '충남 공주시 번영1로 70 범아빌딩 3층'], ['정태옥', '대구', '북구갑', '대구 북구 동북로 156 스카이빌딩 7층'], ['조경태', '부산', '사하구을', '부산광역시 사하구 장림2동 416-1번지'], ['조원진', '대구', '달서구병', '대구광역시 달수구 와룡로 124'], ['조훈현', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['주광덕', '경기', '남양주시병', '경기도 남양주시 경춘로 382번지'], ['주호영', '대구', '수성구을', '대구광역시 수성구 동대구로 6'], ['지상욱', '서울', '중구성동구을', '서울특별시 중구 다산로 124 (신당동) 2층'], ['최경환', '경북', '경산시', '경상북도 경산시 중앙로 38 2층'], ['최교일', '경북', '영주시문경시예천군', '경상북도 영주시 대동로144 2층'], ['최연혜', '비례대표', '비례', '서울시 영등포구 의사당대로 1'], ['추경호', '대구', '달성군', '대구광역시 달성군 화원읍 성화로9 (화원빌딩 5층)'], ['하태경', '부산', '해운대구갑', '부산시 해운대구 좌동 1479-1 웅신시네아트 209호'], ['한선교', '경기', '용인시병', '경기도 용인시 수지구 포은대로 410'], ['함진규', '경기', '시흥시갑', '경기도 시흥시 복지로 3 (유암타워), 501호'], ['홍문종', '경기', '의정부시을', '경기도 의정부시 신곡동 762-2 엘리트타운 601호'], ['홍문표', '충남', '홍성군예산군', '충남 홍성군 홍성읍 홍성천길 150 3층 / 충남 홍성군 홍성읍 오관리 464-13'], ['홍일표', '인천', '남구갑', '인천시 남구 주안1동 190-5번지 인혜빌딩 502호'], ['홍철호', '경기', '김포시을', '경기도 김포시 김포한강1로 247 해리움타운 508호'], ['황영철', '강원', '홍천군철원군화천군양구군인제군', '강원도 홍천군 홍천읍 진삼거리길 13 한샘빌딩 3층 후원회사무실'] ] def fallback_social_image_url if self.read_attribute(:social_image).present? self.social_image.lg.url elsif self.project.try(:read_attribute, :social_image).present? self.project.social_image_url else self.image_url end end def formatted_title_to_agent(user_nickname = nil) "\"#{self.title_to_agent}\"에 대해 #{"#{user_nickname}님 등이 " if user_nickname.present?}행동을 촉구합니다" end def closed? self.closed_at.present? end def opened? !closed? end def comment_closed? closed? end def comment_opened? opened? end end
MrAwesomeRocks/caelus-cml
src/libraries/core/meshTools/sets/topoSets/topoSet.hpp
/*---------------------------------------------------------------------------*\ Copyright (C) 2011 OpenFOAM Foundation ------------------------------------------------------------------------------- License This file is part of CAELUS. CAELUS is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. CAELUS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with CAELUS. If not, see <http://www.gnu.org/licenses/>. Class CML::topoSet Description General set of labels of mesh quantity (points, cells, faces). Contains various 'NotImplemented' functions, but I do not want to make this class abstract since it is quite handy to work on topoSets. SourceFiles topoSet.cpp \*---------------------------------------------------------------------------*/ #ifndef topoSet_H #define topoSet_H #include "HashSet.hpp" #include "regIOobject.hpp" #include "labelList.hpp" #include "typeInfo.hpp" #include "autoPtr.hpp" #include "pointField.hpp" // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // namespace CML { class mapPolyMesh; class polyMesh; class primitiveMesh; /*---------------------------------------------------------------------------*\ Class topoSet Declaration \*---------------------------------------------------------------------------*/ class topoSet : public regIOobject, public labelHashSet { protected: // Protected Member Functions //- Update map from map. Used to update cell/face labels // after morphing void updateLabels(const labelList& map); //- Check validity of contents. void check(const label maxLabel); //- Write part of contents nicely formatted. Prints labels only. void writeDebug ( Ostream& os, const label maxElem, topoSet::const_iterator& iter, label& elemI ) const; //- Write part of contents nicely formatted. Prints label // and corresponding coordinate. void writeDebug ( Ostream& os, const pointField& coords, const label maxElem, topoSet::const_iterator& iter, label& elemI ) const; //- Write labels and coordinates columnwise to os. Truncate to maxLen. void writeDebug ( Ostream& os, const pointField& coords, const label maxLen ) const; //- Disallow default bitwise copy construct topoSet(const topoSet&); public: //- Runtime type information TypeName("topoSet"); // Static //- Name of file set will use. static fileName localPath(const polyMesh& mesh, const word& name); // Declare run-time constructor selection table // For the direct constructor declareRunTimeSelectionTable ( autoPtr, topoSet, word, ( const polyMesh& mesh, const word& name, readOption r, writeOption w ), (mesh, name, r, w) ); // For the constructor from size declareRunTimeSelectionTable ( autoPtr, topoSet, size, ( const polyMesh& mesh, const word& name, const label size, writeOption w ), (mesh, name, size, w) ); // For the constructor as copy declareRunTimeSelectionTable ( autoPtr, topoSet, set, ( const polyMesh& mesh, const word& name, const topoSet& set, writeOption w ), (mesh, name, set, w) ); // Constructors //- Construct from IOobject as explicitly passed type. // Can't use typeName info here since subclasses not yet instantiated topoSet(const IOobject&, const word& wantedType); //- Construct from polyMesh and name. Searches for a polyMesh/sets // directory but not beyond mesh.facesInstance. topoSet ( const polyMesh& mesh, const word& wantedType, const word& name, readOption r=MUST_READ, writeOption w=NO_WRITE ); //- Construct empty from additional size of labelHashSet. // Searches for a polyMesh/sets // directory but not beyond mesh.facesInstance. topoSet ( const polyMesh& mesh, const word& name, const label, writeOption w=NO_WRITE ); //- Construct empty from additional labelHashSet // Searches for a polyMesh/sets // directory but not beyond mesh.facesInstance. topoSet ( const polyMesh& mesh, const word& name, const labelHashSet&, writeOption w=NO_WRITE ); //- Construct empty from IOobject and size. topoSet(const IOobject&, const label size); //- Construct from IOobject and labelHashSet. topoSet(const IOobject&, const labelHashSet&); //- Clone autoPtr<topoSet> clone() const { NotImplemented; return autoPtr<topoSet>(nullptr); } // Selectors //- Return a pointer to a toposet read from file static autoPtr<topoSet> New ( const word& setType, const polyMesh& mesh, const word& name, readOption r=MUST_READ, writeOption w=NO_WRITE ); //- Return a pointer to a new toposet of given size static autoPtr<topoSet> New ( const word& setType, const polyMesh& mesh, const word& name, const label size, writeOption w=NO_WRITE ); //- Return a pointer to a new toposet as copy of another toposet static autoPtr<topoSet> New ( const word& setType, const polyMesh& mesh, const word& name, const topoSet& set, writeOption w=NO_WRITE ); //- Destructor virtual ~topoSet(); // Member functions //- Invert contents. (insert all members 0..maxLen-1 which were not in // set) virtual void invert(const label maxLen); //- Subset contents. Only elements present in both sets remain. virtual void subset(const topoSet& set); //- Add elements present in set. virtual void addSet(const topoSet& set); //- Delete elements present in set. virtual void deleteSet(const topoSet& set); //- Sync set across coupled patches. virtual void sync(const polyMesh& mesh); //- Write labels columnwise to os. Truncate to maxLen. virtual void writeDebug(Ostream& os, const label maxLen) const; //- Like above but also writes mesh related quantity // (usually coordinate). virtual void writeDebug ( Ostream& os, const primitiveMesh&, const label maxLen ) const = 0; //- Write contents. virtual bool writeData(Ostream&) const; //- Update any stored data for new labels. Not implemented. virtual void updateMesh(const mapPolyMesh& morphMap); //- Return max allowable index (+1). Not implemented. virtual label maxSize(const polyMesh& mesh) const = 0; // Member operators //- Copy labelHashSet part only void operator=(const topoSet&); }; // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // } // End namespace CML // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // #endif // ************************************************************************* //
phatblat/macOSPrivateFrameworks
PrivateFrameworks/CoreFollowUp/FLTopLevelViewModel.h
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>. // #import "NSObject.h" #import "FLViewModel.h" @class FLFollowUpController, FLItemChangeObserver, NSString; @interface FLTopLevelViewModel : NSObject <FLViewModel> { FLFollowUpController *_controller; FLItemChangeObserver *_observer; NSString *_bundleIdentifier; NSString *_localizedDeviceRowTitle; } + (id)_prefixFromBundleIdentifier:(id)arg1; + (id)redirectURLForItem:(id)arg1 withAction:(id)arg2; @property(copy, nonatomic) NSString *localizedDeviceRowTitle; // @synthesize localizedDeviceRowTitle=_localizedDeviceRowTitle; - (void).cxx_destruct; - (void)setItemChangeHandler:(CDUnknownBlockType)arg1; - (void)mapItemsToGroups:(id)arg1; - (id)groups; - (BOOL)allPendingItemsContains:(id)arg1; - (void)_refreshItemsWithExtensionToItemMap:(id)arg1 completion:(CDUnknownBlockType)arg2; - (id)extensionToItemMapFromItems:(id)arg1; - (void)refreshItems:(id)arg1 withCompletionHandler:(CDUnknownBlockType)arg2; - (void)refreshItemsForItem:(id)arg1 withCompletionHandler:(CDUnknownBlockType)arg2; - (id)allPendingItems; - (id)initWithBundleIdentifier:(id)arg1 controller:(id)arg2; - (id)initWithBundleIdentifier:(id)arg1 clientIdentifier:(id)arg2; - (id)initWithIdentifier:(id)arg1; // Remaining properties @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(readonly) Class superclass; @end
ZenMX/elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/stats/common/FoldValues.java
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.core.ml.dataframe.stats.common; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Objects; public class FoldValues implements Writeable, ToXContentObject { public static final ParseField FOLD = new ParseField("fold"); public static final ParseField VALUES = new ParseField("values"); public static FoldValues fromXContent(XContentParser parser, boolean ignoreUnknownFields) { return createParser(ignoreUnknownFields).apply(parser, null); } private static ConstructingObjectParser<FoldValues, Void> createParser(boolean ignoreUnknownFields) { ConstructingObjectParser<FoldValues, Void> parser = new ConstructingObjectParser<>("fold_values", ignoreUnknownFields, a -> new FoldValues((int) a[0], (List<Double>) a[1])); parser.declareInt(ConstructingObjectParser.constructorArg(), FOLD); parser.declareDoubleArray(ConstructingObjectParser.constructorArg(), VALUES); return parser; } private final int fold; private final double[] values; private FoldValues(int fold, List<Double> values) { this(fold, values.stream().mapToDouble(Double::doubleValue).toArray()); } public FoldValues(int fold, double[] values) { this.fold = fold; this.values = values; } public FoldValues(StreamInput in) throws IOException { fold = in.readVInt(); values = in.readDoubleArray(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(fold); out.writeDoubleArray(values); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FOLD.getPreferredName(), fold); builder.array(VALUES.getPreferredName(), values); builder.endObject(); return builder; } @Override public boolean equals(Object o) { if (o == this) return true; if (o == null || getClass() != o.getClass()) return false; FoldValues other = (FoldValues) o; return fold == other.fold && Arrays.equals(values, other.values); } @Override public int hashCode() { return Objects.hash(fold, Arrays.hashCode(values)); } }
Repast/repast.simphony
repast.simphony.core/test_watcher/repast/simphony/engine/watcher/PerfTest.java
package repast.simphony.engine.watcher; /** * @author <NAME> * @version $Revision: 1.1 $ $Date: 2005/12/21 22:26:02 $ */ public class PerfTest { int iterations = 20000000; public void run() { Generator gen = new Generator(); for (int i = 0; i < iterations; i++) { gen.run(); } long start = System.currentTimeMillis(); for (int i = 0; i < iterations; i++) { gen.run(); } long duration = System.currentTimeMillis() - start; System.out.println("Duration = " + duration + " ms"); } public static void main(String[] args) { PerfTest test = new PerfTest(); test.run(); } }
vprilepskiy/java-a-to-z
chapter_003.2/src/main/java/ru/job4j/manager/client/Client.java
<reponame>vprilepskiy/java-a-to-z<filename>chapter_003.2/src/main/java/ru/job4j/manager/client/Client.java package ru.job4j.manager.client; import ru.job4j.manager.Actions; import ru.job4j.manager.Menu; import ru.job4j.manager.Settings; import java.io.IOException; import java.net.InetAddress; import java.net.Socket; /** * Created by VLADIMIR on 18.03.2017. */ public class Client implements IClient { @Override public void start() { final String propertiesFileName = "app.properties"; final Settings settings = new Settings(propertiesFileName); final String ip = settings.getValue("ip"); final int port = Integer.valueOf(settings.getValue("port")); try (Socket socket = new Socket(InetAddress.getByName(ip), port)) { System.out.println("client connect"); Menu menu = new Menu(); menu.menuNavigator(new Actions(socket)); } catch (IOException e) { e.printStackTrace(); } } /** * Start client. Only after start Server. * @param args - noop. */ public static void main(String[] args) { new Client().start(); } }
changhiskhan/rikai
python/rikai/types/rle.py
# Copyright 2021 Rikai Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import annotations from typing import Tuple import numpy as np def encode(arr: np.ndarray) -> np.array: """Run-length encoding a matrix. Parameters ---------- arr : a data array or n-D metrix/tensor. """ if len(arr.shape) > 1: return encode(arr.reshape(-1)) if len(arr) == 0: return [] total = len(arr) conti_idx = np.r_[0, np.flatnonzero(~np.equal(arr[1:], [arr[:-1]])) + 1] counts = np.diff(np.r_[conti_idx, total]) if arr[0]: counts = np.insert(counts, 0, 0) return counts def decode( rle: np.array, shape: Tuple[int] | Tuple[int, int], order: str = "C" ) -> np.ndarray: """Decode (COCO) RLE encoding into a numpy mask. Parameters ---------- rle : np.array A 1-D array of RLE encoded data. shape: tuple of ints (height, width) order: str Numpy array order. If uses Coco-style RLE, order should set to F. """ val = 0 start_idx = 0 n = np.sum(rle) arr = np.full(n, 0, dtype=np.uint8) for length in rle: arr[start_idx: start_idx + length] = val # fmt:skip start_idx += length val = 1 - val return arr.reshape(shape, order=order)
touxiong88/92_mediatek
custom/common/kernel/accelerometer/lis3dh/lis3dh.h
/* linux/drivers/hwmon/LIS3DH.c * * (C) Copyright 2008 * MediaTek <www.mediatek.com> * * LIS3DH driver for MT6516 * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #ifndef LIS3DH_H #define LIS3DH_H #include <linux/ioctl.h> #define LIS3DH_I2C_SLAVE_ADDR 0x30//0x30<-> SD0=GND;0x32<-> SD0=High /* LIS3DH Register Map (Please refer to LIS3DH Specifications) */ #define LIS3DH_REG_CTL_REG1 0x20 #define LIS3DH_REG_CTL_REG2 0x21 #define LIS3DH_REG_CTL_REG3 0x22 #define LIS3DH_REG_CTL_REG4 0x23 #define LIS3DH_REG_DATAX0 0x28 #define LIS3DH_REG_OUT_X 0x28 #define LIS3DH_REG_OUT_Y 0x2A #define LIS3DH_REG_OUT_Z 0x2C /* #define LIS3DH_REG_DEVID 0x00 #define LIS3DH_REG_THRESH_TAP 0x1D #define LIS3DH_REG_OFSX 0x1E #define LIS3DH_REG_OFSY 0x1F #define LIS3DH_REG_OFSZ 0x20 #define LIS3DH_REG_DUR 0x21 #define LIS3DH_REG_THRESH_ACT 0x24 #define LIS3DH_REG_THRESH_INACT 0x25 #define LIS3DH_REG_TIME_INACT 0x26 #define LIS3DH_REG_ACT_INACT_CTL 0x27 #define LIS3DH_REG_THRESH_FF 0x28 #define LIS3DH_REG_TIME_FF 0x29 #define LIS3DH_REG_TAP_AXES 0x2A #define LIS3DH_REG_ACT_TAP_STATUS 0x2B #define LIS3DH_REG_BW_RATE 0x2C #define LIS3DH_REG_POWER_CTL 0x2D #define LIS3DH_REG_INT_ENABLE 0x2E #define LIS3DH_REG_INT_MAP 0x2F #define LIS3DH_REG_INT_SOURCE 0x30 #define LIS3DH_REG_DATA_FORMAT 0x31 #define LIS3DH_REG_DATAX0 0x32 #define LIS3DH_REG_FIFO_CTL 0x38 #define LIS3DH_REG_FIFO_STATUS 0x39 */ #define LIS3DH_FIXED_DEVID 0xE5 #define LIS3DH_BW_200HZ 0x60 #define LIS3DH_BW_100HZ 0x50 //400 or 100 on other choise //changed #define LIS3DH_BW_50HZ 0x40 #define LIS3DH_FULLRANG_LSB 0XFF #define LIS3DH_MEASURE_MODE 0x08 //changed #define LIS3DH_DATA_READY 0x07 //changed //#define LIS3DH_FULL_RES 0x08 #define LIS3DH_RANGE_2G 0x00 #define LIS3DH_RANGE_4G 0x10 #define LIS3DH_RANGE_8G 0x20 //8g or 2g no ohter choise//changed //#define LIS3DH_RANGE_16G 0x30 //8g or 2g no ohter choise//changed #define LIS3DH_SELF_TEST 0x10 //changed #define LIS3DH_STREAM_MODE 0x80 #define LIS3DH_SAMPLES_15 0x0F #define LIS3DH_FS_8G_LSB_G 0x20 #define LIS3DH_FS_4G_LSB_G 0x10 #define LIS3DH_FS_2G_LSB_G 0x00 #define LIS3DH_LEFT_JUSTIFY 0x04 #define LIS3DH_RIGHT_JUSTIFY 0x00 #define LIS3DH_SUCCESS 0 #define LIS3DH_ERR_I2C -1 #define LIS3DH_ERR_STATUS -3 #define LIS3DH_ERR_SETUP_FAILURE -4 #define LIS3DH_ERR_GETGSENSORDATA -5 #define LIS3DH_ERR_IDENTIFICATION -6 #define LIS3DH_BUFSIZE 256 #endif
zhouwentong1993/qmq
qmq-remoting/src/main/java/qunar/tc/qmq/metainfoclient/MetaInfoClientHandler.java
/* * Copyright 2018 Qunar, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package qunar.tc.qmq.metainfoclient; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.util.internal.ConcurrentSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import qunar.tc.qmq.meta.BrokerCluster; import qunar.tc.qmq.meta.BrokerGroup; import qunar.tc.qmq.base.OnOfflineState; import qunar.tc.qmq.meta.BrokerState; import qunar.tc.qmq.protocol.CommandCode; import qunar.tc.qmq.protocol.Datagram; import qunar.tc.qmq.protocol.consumer.MetaInfoResponse; import qunar.tc.qmq.utils.PayloadHolderUtils; import java.util.ArrayList; import java.util.List; /** * @author yiqun.fan create on 17-8-31. */ @ChannelHandler.Sharable class MetaInfoClientHandler extends SimpleChannelInboundHandler<Datagram> { private static final Logger LOG = LoggerFactory.getLogger(MetaInfoClientHandler.class); private final ConcurrentSet<MetaInfoClient.ResponseSubscriber> responseSubscribers = new ConcurrentSet<>(); void registerResponseSubscriber(MetaInfoClient.ResponseSubscriber subscriber) { responseSubscribers.add(subscriber); } @Override protected void channelRead0(ChannelHandlerContext ctx, Datagram msg) { MetaInfoResponse response = null; if (msg.getHeader().getCode() == CommandCode.SUCCESS) { response = deserializeMetaInfoResponse(msg.getBody()); } if (response != null) { notifySubscriber(response); } else { LOG.warn("request meta info UNKNOWN. code={}", msg.getHeader().getCode()); } } private void notifySubscriber(MetaInfoResponse response) { for (MetaInfoClient.ResponseSubscriber subscriber : responseSubscribers) { try { subscriber.onResponse(response); } catch (Exception e) { LOG.error("", e); } } } private static MetaInfoResponse deserializeMetaInfoResponse(ByteBuf buf) { try { final MetaInfoResponse metaInfoResponse = new MetaInfoResponse(); metaInfoResponse.setTimestamp(buf.readLong()); metaInfoResponse.setSubject(PayloadHolderUtils.readString(buf)); metaInfoResponse.setConsumerGroup(PayloadHolderUtils.readString(buf)); metaInfoResponse.setOnOfflineState(OnOfflineState.fromCode(buf.readByte())); metaInfoResponse.setClientTypeCode(buf.readByte()); metaInfoResponse.setBrokerCluster(deserializeBrokerCluster(buf)); return metaInfoResponse; } catch (Exception e) { LOG.error("deserializeMetaInfoResponse exception", e); } return null; } private static BrokerCluster deserializeBrokerCluster(ByteBuf buf) { final int brokerGroupSize = buf.readShort(); final List<BrokerGroup> brokerGroups = new ArrayList<>(brokerGroupSize); for (int i = 0; i < brokerGroupSize; i++) { final BrokerGroup brokerGroup = new BrokerGroup(); brokerGroup.setGroupName(PayloadHolderUtils.readString(buf)); brokerGroup.setMaster(PayloadHolderUtils.readString(buf)); brokerGroup.setUpdateTime(buf.readLong()); final int brokerStateCode = buf.readByte(); final BrokerState brokerState = BrokerState.codeOf(brokerStateCode); brokerGroup.setBrokerState(brokerState); brokerGroups.add(brokerGroup); } return new BrokerCluster(brokerGroups); } }
LeiQiao/Parasite-Plugins
simple_html/simple_html_plugin.py
<reponame>LeiQiao/Parasite-Plugins from pa.plugin import Plugin from flask import Blueprint, request, send_file import glob import os import sys import pa import mimetypes import io from .frame import FrameDecorator from .inherit import InheritRoute class SimpleHTMLPlugin(Plugin): __pluginname__ = 'simple_html' @Plugin.before_load def regist_html(self): if self.manifest is None or 'html' not in self.manifest: return # 注册接口 blueprint = Blueprint( name='{0}_{1}_blueprint'.format(self.manifest['name'], SimpleHTMLPlugin.__pluginname__), import_name=__name__, url_prefix='' ) setattr(self, 'blueprint', blueprint) spc_files = {} setattr(self, 'blueprint_route', spc_files) html = self.manifest['html'] for route, files in html.items(): if isinstance(files, list): for file in files: file_path = os.path.join(os.path.dirname(sys.modules[self.__module__].__file__), file) spc_files.update(SimpleHTMLPlugin.find_all_files(route, file_path)) elif isinstance(files, str): file_path = os.path.join(os.path.dirname(sys.modules[self.__module__].__file__), files) spc_files.update(SimpleHTMLPlugin.find_all_files(route, file_path)) else: raise TypeError('{0}: route \'{1}\' has unknown type, either str or list' .format(self.__pluginname__, route)) for file_route in spc_files.keys(): pa.log.info('{0}: add static file rule: {1}'.format(self.__pluginname__, file_route)) blueprint.add_url_rule(file_route, view_func=SimpleHTMLPlugin.static_file_route, methods=['GET']) pa.web_app.register_blueprint(blueprint) @staticmethod def find_all_files(route, file): files = glob.glob(file) route_files = {} for file_path in files: if os.path.isdir(file_path): sub_route = os.path.join(route, os.path.basename(file_path)) + '/' route_files.update(SimpleHTMLPlugin.find_all_files(sub_route, os.path.join(file_path, '*'))) else: if len(files) > 1 or route[-1] == '/': file_route = os.path.join(route, os.path.basename(file_path)) elif route[-8:] == '/<index>': file_route = route[:-7] else: file_route = route route_files[file_route] = file_path return route_files @staticmethod def static_file_route(): for plugin in pa.plugin_manager.all_installed_plugins: if not hasattr(plugin, 'blueprint_route'): continue routes = getattr(plugin, 'blueprint_route') file_route = request.url_rule.rule if file_route not in routes.keys(): continue file_path = routes[file_route] mime_type = mimetypes.guess_type(file_path)[0] or 'application/stream' file_pointer = open(file_path, 'rb') fp = io.BytesIO(file_pointer.read()) file_pointer.close() fp = FrameDecorator.decorator_route(file_route, fp) fp = InheritRoute.inherit_route(file_route, fp) return send_file(fp, mimetype=mime_type) return None
tfpractice/tfp_metaprogramming
lib/tfp_metaprogramming/callable/procs.rb
<filename>lib/tfp_metaprogramming/callable/procs.rb<gh_stars>0 module TfpMetaprogramming module Callable module Procs end end end require_relative 'procs/procster'
lenloe1/v2.7
protocol/zigbee/documentation/120-3023-000_AF_API/navtreeindex25.js
<filename>protocol/zigbee/documentation/120-3023-000_AF_API/navtreeindex25.js var NAVTREEINDEX25 = { "group__enums.html#gadf57a71380d591c6dfa4544b9ef46aa7":[2,0,4,714], "group__enums.html#gadf5fcf624e01dd0e9f44dae5db1a12ce":[2,0,4,343], "group__enums.html#gadf6ebb4ebbac27d7e0807a249b91e18c":[2,0,4,813], "group__enums.html#gadf72894ea060d25a88b748c79023ddb3":[2,0,4,353], "group__enums.html#gadf7ad8300f74cd1b285f095981ba8a77":[2,0,4,709], "group__enums.html#gadf9b53496f9760ceb0d6ddba7ac91beb":[2,0,4,162], "group__enums.html#gadfa44cd2588220f43f67b1fd5864f9dd":[2,0,4,898], "group__enums.html#gadfa45f03feb5e7130f3c5b88e49c518d":[2,0,4,477], "group__enums.html#gadffc876dd37cf41de9dd1d2c859c6400":[2,0,4,319], "group__enums.html#gae03e9e40336967cc81207aa94ab5572f":[2,0,4,96], "group__enums.html#gae065a4b812220231112ed6e5cedff317":[2,0,4,986], "group__enums.html#gae0a3c57be1fb28beeedc937c74b07908":[2,0,4,721], "group__enums.html#gae13621e60b78973cc182de5c8f2caddf":[2,0,4,1260], "group__enums.html#gae143a06f7488002917564d54fa94b712":[2,0,4,818], "group__enums.html#gae197ccf5aaf43dc7075ea0ac52d87696":[2,0,4,683], "group__enums.html#gae1a89f306d019f332ffa6df49714f032":[2,0,4,383], "group__enums.html#gae1b95e10892f1b940387b4040b9322da":[2,0,4,903], "group__enums.html#gae1da418c1e8e0acbf4c24bf5671423a3":[2,0,4,924], "group__enums.html#gae22151a08dec27a3a1cc4217a6ae0ce3":[2,0,4,776], "group__enums.html#gae293aa9aee2782031e88dd0c82cc4e87":[2,0,4,230], "group__enums.html#gae2ca272076186068d9d4e2b3f4e56fe0":[2,0,4,1091], "group__enums.html#gae31729046076e5e54ca26bd7fda02618":[2,0,4,1017], "group__enums.html#gae325a21072bd636d2b0a94f63633c6a8":[2,0,4,763], "group__enums.html#gae34ecc07028946c6a8369c603254bf21":[2,0,4,915], "group__enums.html#gae38ef8f9304e88dea0f6914c55338973":[2,0,4,1218], "group__enums.html#gae38f130aab1fed62558217215dc8bec9":[2,0,4,1222], "group__enums.html#gae3c6e47e7cfe75ddf5f4d0fc31ebc32d":[2,0,4,425], "group__enums.html#gae3caddfbe2c872d40c777751e1f28b5d":[2,0,4,1135], "group__enums.html#gae3d66c398c6632688821e7e06d086081":[2,0,4,1206], "group__enums.html#gae40306abc415b808fbdf2c6198481fb9":[2,0,4,1064], "group__enums.html#gae457bbb38bcdb9e9fe93e7b6d89f3308":[2,0,4,652], "group__enums.html#gae45e828eabf36c2f07cb4fc872543b47":[2,0,4,345], "group__enums.html#gae460724440b1a5622a7525ce88dd6318":[2,0,4,706], "group__enums.html#gae48e4ec4ff10969be17fbd7f40624d81":[2,0,4,1340], "group__enums.html#gae49336ceed32915cad79d2cf06b74aa5":[2,0,4,438], "group__enums.html#gae4d4950164b1e34025c8442613f3652f":[2,0,4,680], "group__enums.html#gae5096ede1d2088fc81a71922831743c6":[2,0,4,1175], "group__enums.html#gae519b15580ea8986b50a90527b78aca9":[2,0,4,984], "group__enums.html#gae54865a11741ac0db21c12e2b08e6b8d":[2,0,4,707], "group__enums.html#gae58ffa462d3b8bcb92f998405eea90b8":[2,0,4,1326], "group__enums.html#gae5aef6eb2ecbc32b95cf69b9fad50b7e":[2,0,4,699], "group__enums.html#gae5b01709bc0fe4b4659d2702682e5227":[2,0,4,1044], "group__enums.html#gae60b189df20d97cd9af7d995682dd499":[2,0,4,310], "group__enums.html#gae60f1663b0f246c3d6eb4c5b6af6130b":[2,0,4,307], "group__enums.html#gae615f5df099ea58d01a76bfc1b473889":[2,0,4,543], "group__enums.html#gae65718fc2f2e55278738df66507396a0":[2,0,4,610], "group__enums.html#gae65718fc2f2e55278738df66507396a0":[2,0,4,611], "group__enums.html#gae6785e8aa2184973982f13bea1ff87ba":[2,0,4,936], "group__enums.html#gae6834e7120a1cecde7fe6eecb9e2a45c":[2,0,4,604], "group__enums.html#gae6a953abd31afa113103230169e282ea":[2,0,4,794], "group__enums.html#gae6bdc6452e4a06aa6c2282d4c9645149":[2,0,4,1000], "group__enums.html#gae6dea6f3b651a2249120b3fb9d36c1df":[2,0,4,129], "group__enums.html#gae7138fa9c23197a19736ab64451023fc":[2,0,4,623], "group__enums.html#gae7543105b53980ce881a021a8c02403c":[2,0,4,1149], "group__enums.html#gae7bffbe1892dfb04db42072a370f5023":[2,0,4,237], "group__enums.html#gae7e7e5463bb8bab76d65abdd0db00ef0":[2,0,4,185], "group__enums.html#gae7f81aca3661fc67ffe4b2e80e96cc95":[2,0,4,502], "group__enums.html#gae808485ac91ec791bdc4fd12abf7aa87":[2,0,4,1194], "group__enums.html#gae8092f52be0e9d65780d32e9ccc53cec":[2,0,4,405], "group__enums.html#gae810113f1d2268eec1a0aa18a8164fc3":[2,0,4,656], "group__enums.html#gae847a51bd22b5ae00bb7c6182701d3a4":[2,0,4,872], "group__enums.html#gae84ecffa99e91c60ae1810b8d0c77125":[2,0,4,1057], "group__enums.html#gae86152e0364ada44097ec9764457505d":[2,0,4,381], "group__enums.html#gae8697a35c0181941818d42a52f8b2580":[2,0,4,501], "group__enums.html#gae86b732cfbf193c44a07471440b30d74":[2,0,4,1320], "group__enums.html#gae8871a01f55f2acfd84da20aaf341062":[2,0,4,727], "group__enums.html#gae895d880fbba8e6ec9a9080bf8277dc1":[2,0,4,1236], "group__enums.html#gae8e734091b184c5605136073251a43c2":[2,0,4,748], "group__enums.html#gae8ec0c50a73504dadf1ea9f166194824":[2,0,4,211], "group__enums.html#gae9000f9d205c46f30cd13f720227fb0f":[2,0,4,366], "group__enums.html#gae91c36d64809ed250046e6a318527329":[2,0,4,333], "group__enums.html#gae93e0e9cd4d79f3d4b67385a7fe692a2":[2,0,4,720], "group__enums.html#gae966f5109eadc1d0200121e64d6f5ecc":[2,0,4,744], "group__enums.html#gae999405d7d78fa2f9709c9ae610c28ae":[2,0,4,641], "group__enums.html#gae9c2c29f9e31b6d67b361ef80ef19308":[2,0,4,22], "group__enums.html#gae9edbd5db82e1eb98d1c004673f74e33":[2,0,4,953], "group__enums.html#gaea06fb8fa5a10ab3a229caebfa7c45b4":[2,0,4,33], "group__enums.html#gaea76fe9fab5f5f1b15e6a5f99b8713ed":[2,0,4,99], "group__enums.html#gaeac69d0ab689d67cbc9237eab00a650a":[2,0,4,957], "group__enums.html#gaeadc176ddcc7a0ec74005dd727b3c9fc":[2,0,4,481], "group__enums.html#gaeadd06bfcc874c91decede2483336c95":[2,0,4,453], "group__enums.html#gaeaefc10823665dbd78e75caaef3e4abe":[2,0,4,81], "group__enums.html#gaeb12c6131aeaa444f7164babfd893aa8":[2,0,4,27], "group__enums.html#gaeb2c0059360d9bf2cbfe082b1651d5d4":[2,0,4,1293], "group__enums.html#gaeb44148774ec30639ad65e4ce1f0268c":[2,0,4,332], "group__enums.html#gaeb4b1bbc285c7d1b6d6aea4c8e66b78f":[2,0,4,782], "group__enums.html#gaeb8c62a04e355cae3c95058548ee95c1":[2,0,4,110], "group__enums.html#gaebefdc02d2a9c0406597652740380cf1":[2,0,4,484], "group__enums.html#gaec19940c53990180053b5935ac97ca7f":[2,0,4,1159], "group__enums.html#gaec689682573bcd89e2a7260b15e5c28e":[2,0,4,743], "group__enums.html#gaec7260458cca564f0517c7d6f5f23e60":[2,0,4,989], "group__enums.html#gaecd181da3ae56145276db0225b661146":[2,0,4,565], "group__enums.html#gaed4802f00201a4b90d4adeace1806484":[2,0,4,29], "group__enums.html#gaed84b46aee38a1d3229c4979878232c6":[2,0,4,380], "group__enums.html#gaed9e4905385bc9ea0527b36fd0179bf8":[2,0,4,417], "group__enums.html#gaee07487f21aa14406ee9c4df238f0981":[2,0,4,536], "group__enums.html#gaee136ea692bbb0414f07b1c792150d31":[2,0,4,43], "group__enums.html#gaee6510dc68e59316c2e0e77ed70c039b":[2,0,4,155], "group__enums.html#gaee6d793116c41ddbbbe71d6d8c81463c":[2,0,4,640], "group__enums.html#gaee8f0906af2166304eee4f831ebb17e4":[2,0,4,601], "group__enums.html#gaeeba4018b968793b1a736e39d947e96d":[2,0,4,1251], "group__enums.html#gaeecd5ad1a2e3714a2060a25c8fd4ccba":[2,0,4,922], "group__enums.html#gaeee42f652844fe4b7f59bce22f3bb0c0":[2,0,4,1310], "group__enums.html#gaef305bbd226bb73392874c8c29260f86":[2,0,4,1028], "group__enums.html#gaefb1b39eb7f54939d4d8fe62abd25106":[2,0,4,1029], "group__enums.html#gaeff75f5c167b989f8939bbbe648f1076":[2,0,4,883], "group__enums.html#gaf02be7375e4fcc5d7edb9976d977ddc6":[2,0,4,1070], "group__enums.html#gaf0bdf31f11590668a33007df9eb5116a":[2,0,4,841], "group__enums.html#gaf0bfce7894d84318f7345183cb1431fe":[2,0,4,219], "group__enums.html#gaf0c22a6565acf86fe4f01873ed86164a":[2,0,4,594], "group__enums.html#gaf0d0af7550a7ef8d11d442284ac7ee21":[2,0,4,379], "group__enums.html#gaf10b1c22192964558659f6a75308d22b":[2,0,4,988], "group__enums.html#gaf15170c852958d975febfe1c462e8094":[2,0,4,430], "group__enums.html#gaf1706a1765b7ba63dd6c630d7f24f6e9":[2,0,4,355], "group__enums.html#gaf1f01a7351c0b36927875c71969a6f50":[2,0,4,154], "group__enums.html#gaf1f924294e31b7ca8f1cd3293bd2d0f4":[2,0,4,494], "group__enums.html#gaf227d4be34d47789a83d689b3bd45846":[2,0,4,865], "group__enums.html#gaf2395a6d9a8b0c8ae8b4918f46df5aef":[2,0,4,960], "group__enums.html#gaf255d4dc4c3bf70bbc711c5692bf33ce":[2,0,4,602], "group__enums.html#gaf2c90842c8b4f451beaa40010c28ae69":[2,0,4,441], "group__enums.html#gaf2db35574e4153706cfcba9c649beb57":[2,0,4,834], "group__enums.html#gaf2f335b294061ecd3ffba10cfde9de35":[2,0,4,823], "group__enums.html#gaf2fe5185e78a66f1dd38284dbcc3bc00":[2,0,4,474], "group__enums.html#gaf31201ba69869d512d5284e6b4957028":[2,0,4,175], "group__enums.html#gaf32e9a138026a892782deb16a7647b09":[2,0,4,635], "group__enums.html#gaf347ae810ff964a5e4753e4e65741477":[2,0,4,682], "group__enums.html#gaf373967ab53b8911287379d5a65735c0":[2,0,4,803], "group__enums.html#gaf3a6dfa5758743123988a4b071e7ff24":[2,0,4,694], "group__enums.html#gaf3c0c238bfc7a029f67da91b158851ed":[2,0,4,1182], "group__enums.html#gaf3e6404ae2045d571656e0a3adff962b":[2,0,4,1068], "group__enums.html#gaf453a6d01e4832a250bf368955a70457":[2,0,4,1338], "group__enums.html#gaf473d0eacf49a0dd8a279bc8550a975f":[2,0,4,1124], "group__enums.html#gaf47ead69cbc4537457d638753b480c57":[2,0,4,395], "group__enums.html#gaf4c1b6682229a5b7dd43ab6b82e69d60":[2,0,4,413], "group__enums.html#gaf4e09a66dcbcc5187a34f7e2c8e46275":[2,0,4,1082], "group__enums.html#gaf5140a8bf14d576ceae935918d91b9b8":[2,0,4,263], "group__enums.html#gaf51977ee9cc640e0618a41df0e543b4d":[2,0,4,1022], "group__enums.html#gaf537404e6d9e009aa2a3756e13a2c07c":[2,0,4,287], "group__enums.html#gaf539b336b2a2ac7105dd3ef4b9621405":[2,0,4,427], "group__enums.html#gaf5671be90f31e810e46f120f0db931de":[2,0,4,808], "group__enums.html#gaf58fc627ce1f015bd066dfd8f999c8fa":[2,0,4,761], "group__enums.html#gaf595d5d619e15bc32a0c9b1ec9860305":[2,0,4,676], "group__enums.html#gaf620f1477ee607a6fd832549eb29e433":[2,0,4,84], "group__enums.html#gaf63597879e4533279250ed5b8c6a665f":[2,0,4,729], "group__enums.html#gaf63ac839afdc3436fc9308da824ed238":[2,0,4,53], "group__enums.html#gaf63cb68e7435962f3319bde80d35aa7f":[2,0,4,689], "group__enums.html#gaf66a80ef73caae791fa34f3344bd3980":[2,0,4,1140], "group__enums.html#gaf6d84e3ba0bf01c8e1802b61fdf3df0d":[2,0,4,847], "group__enums.html#gaf7403de16d92d0c548cc9e59d393b3ea":[2,0,4,458], "group__enums.html#gaf79cf6ff363ca73299cf2a4b2aa2cafb":[2,0,4,377], "group__enums.html#gaf7a319b27b820d69c7f653d033332164":[2,0,4,787], "group__enums.html#gaf7be80dbad450415f44b75e978f48e1e":[2,0,4,685], "group__enums.html#gaf7c5eb57510a278d7054d93ad015f83f":[2,0,4,867], "group__enums.html#gaf7e384afc00e7a72ee037836cbfd6710":[2,0,4,717], "group__enums.html#gaf7e934d9b980e81b4fda9a73bdcc02c3":[2,0,4,67], "group__enums.html#gaf7f35cd893e22c90ed9fef9f048cd89e":[2,0,4,401], "group__enums.html#gaf8ac70a7c4e91b9c09e9a91f5d41840b":[2,0,4,290], "group__enums.html#gaf8cd357c7d5a22a6b3c19d05ffd750af":[2,0,4,233], "group__enums.html#gaf8d8255840102f61112e937e49c10ef1":[2,0,4,361], "group__enums.html#gaf905fbc6ef10e79f98b8abff70d7defd":[2,0,4,1239], "group__enums.html#gaf94f6c2b06201b978bf8c52af4709176":[2,0,4,2], "group__enums.html#gaf97c1e3f97cc1de9c9215bf555db475e":[2,0,4,1269], "group__enums.html#gaf9a178f373206720b5851e01836ff811":[2,0,4,1120], "group__enums.html#gaf9add175549fcbcb588817f160ba8fa9":[2,0,4,1038], "group__enums.html#gafab45b00f48f55afe1e45db93adaf7db":[2,0,4,1020], "group__enums.html#gafaff333aee92ad4f854d9e104ba88123":[2,0,4,279], "group__enums.html#gafb08e1553bf296a02c8cca0d63368390":[2,0,4,503], "group__enums.html#gafb2b443df7d84f52b4af3001f8f4374f":[2,0,4,434], "group__enums.html#gafb6c586250e913b2f761b18372570715":[2,0,4,201], "group__enums.html#gafb7087b77f2227ae910eca140cc1a7b4":[2,0,4,77], "group__enums.html#gafba85ef0837cf7aae953e8c60d44177b":[2,0,4,584], "group__enums.html#gafbe5c93a195d1205ad48be00bdcd2a44":[2,0,4,15], "group__enums.html#gafbfe7701317fc8fdf47c97aeb5bbbca2":[2,0,4,1201], "group__enums.html#gafc117e90f8d9ebcbcbed52f7968ba65c":[2,0,4,928], "group__enums.html#gafc16d5502fa30f7a8bae260416ccd8d3":[2,0,4,450], "group__enums.html#gafc5a7c3934cdce6cdafadbb4f117b7a9":[2,0,4,303], "group__enums.html#gafc80b8c578d68b39045b56847b85d3d7":[2,0,4,558], "group__enums.html#gafcdd97fe40a428fe7178e57611df62be":[2,0,4,1098], "group__enums.html#gafce38045c1ff6d0037a2c58b35b2a820":[2,0,4,1300], "group__enums.html#gafcfca0c5341fa5080bad5b72e8cded1e":[2,0,4,259], "group__enums.html#gafd093ead71ebde1c42a499dd6118b984":[2,0,4,612], "group__enums.html#gafd093ead71ebde1c42a499dd6118b984":[2,0,4,613], "group__enums.html#gafd17a40e4d8cdad26954c49d9b219b68":[2,0,4,815], "group__enums.html#gafd288e14729dbcb2069c8115419f575f":[2,0,4,216], "group__enums.html#gafd3ed7014629c3b3bac66b8e4c92c1f6":[2,0,4,454], "group__enums.html#gafdc7e80c6614df3e7b9e0cc6bdd49d9b":[2,0,4,411], "group__enums.html#gafdebd021d09edbbe0fbcb2b4d53086bb":[2,0,4,68], "group__enums.html#gafe38a322a5ef6f6e0608183b3d4b2a79":[2,0,4,1342], "group__enums.html#gafe4a82c515646ae814acdd66560dc858":[2,0,4,810], "group__enums.html#gafe6f9c3dd31636e66ad0eb39c80e6ddb":[2,0,4,59], "group__enums.html#gaff07b4bc6b773fbd930711be82f80f25":[2,0,4,338], "group__enums.html#gaff2c7fda25745126b9558462568ea826":[2,0,4,665], "group__enums.html#gaff5e3eff63c1496c7723be2f5a0c11b0":[2,0,4,564], "group__enums.html#gaff8c648e51b26a67a647b1be34b3f6c3":[2,0,4,378], "group__enums.html#gaff97e6af18520887a53b9fffd88f37b1":[2,0,4,240], "group__enums.html#gaffbc885b74a5651c7cbfd4b891f8c083":[2,0,4,1033], "group__enums.html#gaffe3eb05a0ccb1819ee19c8591ae8f45":[2,0,4,587], "group__enums.html#gaffe8f958aebbaa5b85f2dcbc6fed310b":[2,0,4,470], "group__enums.html#gga013b854e8b411972d74eb91f47067dbaa487bb94c3a587eddee30b6a4d794eecb":[2,0,4,1291,0], "group__enums.html#gga013b854e8b411972d74eb91f47067dbaa6fe65bf7d954f9545c3810d7fed280af":[2,0,4,1291,1], "group__enums.html#gga013b854e8b411972d74eb91f47067dbaa7a8e28d7fdffbcc4578b0a7e00ff2ce4":[2,0,4,1291,3], "group__enums.html#gga013b854e8b411972d74eb91f47067dbaab3f151ed18d11987e7bc96f42e73187a":[2,0,4,1291,2], "group__enums.html#gga026d3c63ab0f7809a5faec48a5294ae9a69a888a2a053a08b981f281c9fb23afb":[2,0,4,1193,0], "group__enums.html#gga026d3c63ab0f7809a5faec48a5294ae9ac0f6792d263b8116be7efca1d83e2814":[2,0,4,1193,1], "group__enums.html#gga04fd5a98ea526861a5b96d8278c69fe6a07447e548b0b2018e29276dbf8e2aff5":[2,0,4,1286,0], "group__enums.html#gga04fd5a98ea526861a5b96d8278c69fe6abdb1c83bf4b8dd7776cbdf27841b26f4":[2,0,4,1286,1], "group__enums.html#gga05c4daf4e9be065ccf42e11e42f887afa1fbec6db0dd1d0f993a7d21ee3b59db5":[2,0,4,1247,1], "group__enums.html#gga05c4daf4e9be065ccf42e11e42f887afa6dfb92309ef1c74c7bb375ca86518a33":[2,0,4,1247,0], "group__enums.html#gga0675cb10341a127b5f0797c52a10b4dca053dc763fe075a5bfe711a5153fe117b":[2,0,4,1322,1], "group__enums.html#gga0675cb10341a127b5f0797c52a10b4dca1b3413795ff5e8f682fa2e792211d29f":[2,0,4,1322,6], "group__enums.html#gga0675cb10341a127b5f0797c52a10b4dca23157d1270dc04eecdb1c8602eeeca50":[2,0,4,1322,8], "group__enums.html#gga0675cb10341a127b5f0797c52a10b4dca41ecb205b6ba28b2110fc38c1cb6c8c8":[2,0,4,1322,0], "group__enums.html#gga0675cb10341a127b5f0797c52a10b4dca6b74cf7be18a569ae8678526050fcaaf":[2,0,4,1322,7], "group__enums.html#gga0675cb10341a127b5f0797c52a10b4dca97fd2bb8c8dc9712b6bec78ac0d18dac":[2,0,4,1322,4], "group__enums.html#gga0675cb10341a127b5f0797c52a10b4dcaa844d3cbc68e5979f4b51810a29f3525":[2,0,4,1322,3], "group__enums.html#gga0675cb10341a127b5f0797c52a10b4dcadeb310b7d0058f21fe429db30a0884e5":[2,0,4,1322,2], "group__enums.html#gga0675cb10341a127b5f0797c52a10b4dcae3eb76c90138f3cde925bca8d606b767":[2,0,4,1322,5], "group__enums.html#gga071551897e9a1ff6f8645390f49de5eaa7f141f82b950a2905c58feef42a795b9":[2,0,4,1202,2], "group__enums.html#gga071551897e9a1ff6f8645390f49de5eaa8e7d9e09aa5f72d3952ea5ecde4b2423":[2,0,4,1202,3], "group__enums.html#gga071551897e9a1ff6f8645390f49de5eaabb87c4e1b0ab23f78d0b6c01f4399026":[2,0,4,1202,0], "group__enums.html#gga071551897e9a1ff6f8645390f49de5eaaf04f0bf5017f678eccb8c818e07ca171":[2,0,4,1202,1], "group__enums.html#gga08347733b255ab32eb0205091081e99aa132b98b791c1dc5218fe1b1463aa6dc1":[2,0,4,1179,0], "group__enums.html#gga08347733b255ab32eb0205091081e99aa1c18c395f7b2fff862bfdd840132c3f0":[2,0,4,1179,1], "group__enums.html#gga08347733b255ab32eb0205091081e99aa7d0c72de880bff6c1b4c2b76d48e0a78":[2,0,4,1179,2], "group__enums.html#gga0a2e83202c161f6ee13e5274a9bc8f48a26f462db61a7242b71197c73b930c7cd":[2,0,4,1228,0], "group__enums.html#gga0a2e83202c161f6ee13e5274a9bc8f48a762231ab22724cc6f54780e1951ced9d":[2,0,4,1228,1], "group__enums.html#gga0ad41be408a53284c7cf2cc811da9477a3354944839f34f66f67a91cfe26a20b5":[2,0,4,1200,4], "group__enums.html#gga0ad41be408a53284c7cf2cc811da9477a58719b8946e0cf0cc6005a7fba9d2936":[2,0,4,1200,0], "group__enums.html#gga0ad41be408a53284c7cf2cc811da9477a6f39a447f84f146532ff4ef5ffdbd77e":[2,0,4,1200,3], "group__enums.html#gga0ad41be408a53284c7cf2cc811da9477a90ed77a2a31b5decb7c42bc9c586a657":[2,0,4,1200,1], "group__enums.html#gga0ad41be408a53284c7cf2cc811da9477a9b47cbf842b6ac85cd656126eea1ba1e":[2,0,4,1200,5], "group__enums.html#gga0ad41be408a53284c7cf2cc811da9477aae01922dbda121686e0f2d969522d149":[2,0,4,1200,2], "group__enums.html#gga0ad41be408a53284c7cf2cc811da9477ac1b12573127a840c10d839f9b3939f4c":[2,0,4,1200,6], "group__enums.html#gga0d8012bab06b0032b271f15d9316ba04a2748d5863bc1d5f38b44e2ac54e1a92e":[2,0,4,1152,1], "group__enums.html#gga0d8012bab06b0032b271f15d9316ba04a66571a513d0fae87e5b241ab7ef3b107":[2,0,4,1152,0], "group__enums.html#gga0f699ea05e14afaedb5c637de83cf369a25d8133a240e668ac11e12890f671f37":[2,0,4,1234,5], "group__enums.html#gga0f699ea05e14afaedb5c637de83cf369a7a942fd1d6c99ece95eb44dfa7282bfa":[2,0,4,1234,0], "group__enums.html#gga0f699ea05e14afaedb5c637de83cf369a80504e36aad33123871b1cd3ce133cab":[2,0,4,1234,2], "group__enums.html#gga0f699ea05e14afaedb5c637de83cf369a9ac6499f5b05a0a85c7a4d2965c964f5":[2,0,4,1234,1], "group__enums.html#gga0f699ea05e14afaedb5c637de83cf369ab8a7178097cf97dc03b255077679290b":[2,0,4,1234,7], "group__enums.html#gga0f699ea05e14afaedb5c637de83cf369ac50c6ef06e82e3cd96c8073dde473f6f":[2,0,4,1234,6], "group__enums.html#gga0f699ea05e14afaedb5c637de83cf369ad9f873e91c71c9fb2210fd55550bb22c":[2,0,4,1234,3], "group__enums.html#gga0f699ea05e14afaedb5c637de83cf369adea6d7ec17a5447e3628860d7b7e0e41":[2,0,4,1234,4], "group__enums.html#gga10f725a0faf48d67400e361297998784a23bafd77f0f33076fcb9dc6ee572b3cb":[2,0,4,1339,4], "group__enums.html#gga10f725a0faf48d67400e361297998784a30b8418245657688183847e84199a443":[2,0,4,1339,3], "group__enums.html#gga10f725a0faf48d67400e361297998784ab079d6dbe6772f9f269c3c05e9bec78b":[2,0,4,1339,5], "group__enums.html#gga10f725a0faf48d67400e361297998784ac3e83494a25102769c4216a0a0fd1af1":[2,0,4,1339,2], "group__enums.html#gga10f725a0faf48d67400e361297998784ae155ab8b05d88ac62f29b2937a56ec29":[2,0,4,1339,1], "group__enums.html#gga10f725a0faf48d67400e361297998784aee2a10046be895b8328607945dada2f1":[2,0,4,1339,0], "group__enums.html#gga14202ddb3f5a89e19b7827f84dbd12e0a71255c86d9f6d6ce9fc3b8c5d1238a47":[2,0,4,1173,1] };
King0987654/windows2000
private/windows/media/avi/mciavi.16/window.c
<gh_stars>10-100 /****************************************************************************** Copyright (C) Microsoft Corporation 1985-1991. All rights reserved. Title: window.c - Multimedia Systems Media Control Interface driver for AVI. *****************************************************************************/ #include "graphic.h" #include "avitask.h" // for TASKIDLE //#define IDM_CONFIG 0x100 //#define IDM_SKIPFRAMES 0x110 #define IDM_MUTE 0x120 #define IDM_STRETCH 0x130 #ifdef WIN32 // Use a different class name on 32 bit systems to ease the 16/32 // coexistence problem. (We might want both classes defined at once.) TCHAR szClassName[] = TEXT("AVIWnd32"); #else char szClassName[] = "AVIWnd"; #endif DWORD NEAR PASCAL GraphicStop (NPMCIGRAPHIC npMCI, DWORD dwFlags); DWORD NEAR PASCAL GraphicPause (NPMCIGRAPHIC npMCI, DWORD dwFlags); DWORD NEAR PASCAL GraphicPlay (NPMCIGRAPHIC npMCI, DWORD dwFlags, LPMCI_ANIM_PLAY_PARMS lpPlay ); DWORD NEAR PASCAL GraphicSeek (NPMCIGRAPHIC npMCI, DWORD dwFlags, LPMCI_SEEK_PARMS lpSeek); BOOL NEAR PASCAL GraphicWindowInit (void) { WNDCLASS cls; // define the class of window we want to register cls.lpszClassName = szClassName; cls.style = CS_GLOBALCLASS | CS_OWNDC; cls.hCursor = LoadCursor (NULL, IDC_ARROW); cls.hIcon = NULL; cls.lpszMenuName = NULL; ////cls.hbrBackground = (HBRUSH)(COLOR_WINDOW + 1); cls.hbrBackground = GetStockObject(BLACK_BRUSH); cls.hInstance = ghModule; cls.lpfnWndProc = GraphicWndProc; cls.cbClsExtra = 0; cls.cbWndExtra = sizeof (NPMCIGRAPHIC); return RegisterClass (&cls); } #ifdef WIN32 /* * de-register the class on unloading the dll so that we can * successfully re-register the class next time we are loaded. * note that nt only unregisters a class when the app exits. */ BOOL NEAR PASCAL GraphicWindowFree(void) { return(UnregisterClass(szClassName, ghModule)); } #endif DWORD FAR PASCAL GraphicConfig(NPMCIGRAPHIC npMCI, DWORD dwFlags); #if 0 static void NEAR PASCAL Credits(HWND hwnd); #endif long FAR PASCAL _LOADDS GraphicWndProc (HWND hwnd, UINT wMsg, WPARAM wParam, LPARAM lParam) { PAINTSTRUCT ps; NPMCIGRAPHIC npMCI; HMENU hmenu; HDC hdc; RECT rc; MINMAXINFO FAR * lpmmi; TCHAR ach[80]; #ifndef WIN32 WORD ww; ww = GetWindowWord (hwnd, 0); #else DWORD ww; ww = GetWindowLong (hwnd, 0); #endif if ((ww == 0) && (wMsg != WM_CREATE)) { DPF(("null npMCI in windowproc!")); return DefWindowProc(hwnd, wMsg, wParam, lParam); } npMCI = (NPMCIGRAPHIC)ww; if (npMCI) { EnterCrit(npMCI); } switch (wMsg) { case WM_CREATE: npMCI = (NPMCIGRAPHIC)(UINT)(DWORD) ((LPCREATESTRUCT)lParam)->lpCreateParams; EnterCrit(npMCI); #ifndef WIN32 SetWindowWord (hwnd, 0, (WORD)npMCI); #else SetWindowLong (hwnd, 0, (UINT)npMCI); #endif hmenu = GetSystemMenu(hwnd, 0); if (hmenu) { /* Our system menu is too long--get rid of extra stuff. */ // DeleteMenu(hmenu, SC_RESTORE, MF_BYCOMMAND); // DeleteMenu(hmenu, SC_MINIMIZE, MF_BYCOMMAND); DeleteMenu(hmenu, SC_MAXIMIZE, MF_BYCOMMAND); DeleteMenu(hmenu, SC_TASKLIST, MF_BYCOMMAND); /* Add additional menu items to the end of the system menu */ // AppendMenu(hmenu, MF_SEPARATOR, 0, 0L); #ifdef IDM_CONFIG LoadString(ghModule, MCIAVI_MENU_CONFIG, ach, sizeof(ach)/sizeof(TCHAR)); AppendMenu(hmenu, MF_STRING, IDM_CONFIG, ach); #endif LoadString(ghModule, MCIAVI_MENU_STRETCH, ach, sizeof(ach)/sizeof(TCHAR)); AppendMenu(hmenu, MF_STRING, IDM_STRETCH, ach); LoadString(ghModule, MCIAVI_MENU_MUTE, ach, sizeof(ach)/sizeof(TCHAR)); AppendMenu(hmenu, MF_STRING, IDM_MUTE, ach); } break; case WM_INITMENU: hmenu = GetSystemMenu(hwnd, 0); if (hmenu) { #ifdef IDM_SKIPFRAMES CheckMenuItem(hmenu, IDM_SKIPFRAMES, MF_BYCOMMAND | ((npMCI->dwOptionFlags & MCIAVIO_SKIPFRAMES) ? MF_CHECKED : MF_UNCHECKED)); #endif CheckMenuItem(hmenu, IDM_STRETCH, MF_BYCOMMAND | ((npMCI->dwOptionFlags & MCIAVIO_STRETCHTOWINDOW) ? MF_CHECKED : MF_UNCHECKED)); #ifdef IDM_CONFIG /* If in configure box, disable menu item. */ EnableMenuItem(hmenu, IDM_CONFIG, MF_BYCOMMAND | (npMCI->wMessageCurrent == 0 ? MF_ENABLED : MF_GRAYED)); #endif /* If in stupid mode, disable stretch menu item. */ EnableMenuItem(hmenu, IDM_STRETCH, MF_BYCOMMAND | ((!(npMCI->dwOptionFlags & MCIAVIO_STUPIDMODE)) ? MF_ENABLED : MF_GRAYED)); EnableMenuItem(hmenu, IDM_MUTE, MF_BYCOMMAND | (npMCI->nAudioStreams ? MF_ENABLED : MF_GRAYED)); CheckMenuItem(hmenu, IDM_MUTE, MF_BYCOMMAND | (!(npMCI->dwFlags & MCIAVI_PLAYAUDIO) ? MF_CHECKED : MF_UNCHECKED)); } break; case WM_SYSCOMMAND: switch (wParam & 0xfff0) { case SC_KEYMENU: case SC_MOUSEMENU: gfEvilSysMenu++; LeaveCrit(npMCI); // Must not hold while in DefWindowProc lParam = DefWindowProc(hwnd, wMsg, wParam, lParam); gfEvilSysMenu--; return lParam; #ifdef IDM_SKIPFRAMES case IDM_SKIPFRAMES: npMCI->dwOptionFlags ^= MCIAVIO_SKIPFRAMES; break; #endif case IDM_STRETCH: npMCI->dwOptionFlags ^= MCIAVIO_STRETCHTOWINDOW; if (!(npMCI->dwOptionFlags & MCIAVIO_STRETCHTOWINDOW)) { SetWindowToDefaultSize(npMCI); } ResetDestRect(npMCI); break; case IDM_MUTE: DeviceMute(npMCI, (npMCI->dwFlags & MCIAVI_PLAYAUDIO) != 0); break; #ifdef IDM_CONFIG case IDM_CONFIG: npMCI->wMessageCurrent = MCI_CONFIGURE; gfEvil++; GraphicConfig(npMCI, 0L); gfEvil--; npMCI->wMessageCurrent = 0; break; #endif } break; case WM_CLOSE: // Hide default window DeviceStop(npMCI, MCI_WAIT); ShowWindow(hwnd, SW_HIDE); LeaveCrit(npMCI); return 0L; case WM_DESTROY: // The window may be destroyed 2 ways. // a. the device is closed. In this case the animation is // freed in DeviceClose which is called from GraphicClose // and the animation ID is NULL by the time this window is // destroyed. // b. the window is closed. In this case, the animation is // not closed and we should set the stage to NULL. A new // default window will be created if needed. if (IsTask(npMCI->hTask)) { DeviceStop(npMCI, MCI_WAIT); } if (npMCI->hwnd == npMCI->hwndDefault) npMCI->hwnd = NULL; npMCI->hwndDefault = NULL; break; case WM_ERASEBKGND: hdc = (HDC) wParam; if (!(npMCI->dwFlags & MCIAVI_SHOWVIDEO)) { FillRect(hdc, &npMCI->rcDest, GetStockObject(GRAY_BRUSH)); } SaveDC(hdc); ExcludeClipRect(hdc, npMCI->rcDest.left, npMCI->rcDest.top, npMCI->rcDest.right, npMCI->rcDest.bottom); GetClientRect(hwnd, &rc); FillRect(hdc, &rc, GetStockObject(BLACK_BRUSH)); RestoreDC(hdc, -1); /* Hack: if we're in a WAIT state, we won't get ** a WM_PAINT, so we need to invalidate the streams here */ GetClipBox(hdc, &rc); StreamInvalidate(npMCI, &rc); LeaveCrit(npMCI); return 0L; case WM_PAINT: #ifdef WIN32 /* * on NT we have to poll more often to avoid deadlock between * threads (a SetWindowPos call on one thread will cause * the window-creating thread to issue the WM_SIZE message - * synchronously). The side effect of this is that we poll * for messages at times when it is not safe to process all * messages. * * So unless we know it is safe to paint, we punt... */ //if (npMCI->wTaskState != TASKIDLE) if ((npMCI->wTaskState != TASKIDLE) && (npMCI->wTaskState != TASKPAUSED)) { npMCI->dwFlags |= MCIAVI_NEEDUPDATE; DPF0(("Punting on painting, wTaskState = %x", npMCI->wTaskState)); break; } #endif hdc = BeginPaint(hwnd, &ps); GetClientRect(hwnd, &rc); /* If updating fails, paint gray. */ if (DeviceUpdate(npMCI, MCI_DGV_UPDATE_PAINT, hdc, &ps.rcPaint) == MCIERR_DEVICE_NOT_READY) { GetClientRect(hwnd, &rc); FillRect(hdc, &rc, GetStockObject(DKGRAY_BRUSH)); } EndPaint(hwnd, &ps); return 0L; case WM_PALETTECHANGED: // We're not using the default window. We have no business here. if (npMCI->hwnd != hwnd) break; // // someone has realized a palette - so we need to re-realize our // palette (note that this will also cause drawdib to // check for PAL_INDICES vs PAL_COLOURS. // if ((HWND) wParam != hwnd) { DeviceRealize(npMCI); InvalidateRect(hwnd, NULL, FALSE); } break; case WM_QUERYNEWPALETTE: // We're not using the default window. We have no business here. if (npMCI->hwnd != hwnd) break; LeaveCrit(npMCI); // tomor -- maybe this should be after? return DeviceRealize(npMCI); case WM_WINDOWPOSCHANGED: CheckWindowMove(npMCI, TRUE); break; #ifdef WM_AVISWP case WM_AVISWP: { long res; res = SetWindowPos(hwnd, HWND_TOP, 0, 0, 0, 0, lParam); LeaveCrit(npMCI); return(res); } #endif case WM_SIZE: ResetDestRect(npMCI); break; case WM_QUERYENDSESSION: DeviceStop(npMCI, MCI_WAIT); break; case WM_ENDSESSION: if (wParam) { DestroyWindow(hwnd); // we may not be able to destroy window? } break; case WM_GETMINMAXINFO: lpmmi = (MINMAXINFO FAR *)(lParam); lpmmi->ptMinTrackSize.x = GetSystemMetrics(SM_CXSIZE) * 2; break; case WM_NCACTIVATE: case WM_ACTIVATE: DeviceSetActive(npMCI, (BOOL)wParam); break; case WM_AUDIO_ON: Assert(npMCI->dwFlags & MCIAVI_PLAYAUDIO); Assert(npMCI->dwFlags & MCIAVI_LOSTAUDIO); Assert(npMCI->hWave == NULL); npMCI->dwFlags &= ~MCIAVI_PLAYAUDIO; DeviceMute(npMCI, FALSE); break; case WM_AUDIO_OFF: Assert(npMCI->dwFlags & MCIAVI_PLAYAUDIO); Assert(!(npMCI->dwFlags & MCIAVI_LOSTAUDIO)); Assert(npMCI->hWave != NULL); DeviceMute(npMCI, TRUE); npMCI->dwFlags |= MCIAVI_LOSTAUDIO; npMCI->dwFlags |= MCIAVI_PLAYAUDIO; break; #if 0 case WM_LBUTTONDOWN: { DWORD dw; static DWORD dwLastClick; static DWORD dwClicks = 0; #define MAX_CLICKS 7 /* . = (0,300) - = (300,1000) word = (500,1500) */ /* AVI: .- ...- .. */ static DWORD adwClickHigh[MAX_CLICKS] = { 300, 1500, 300, 300, 300, 1500, 300 }; static DWORD adwClickLow[MAX_CLICKS] = { 0, 500, 0, 0, 0, 500, 0 }; dw = timeGetTime(); if (((dw - dwLastClick) > adwClickLow[dwClicks]) && ((dw - dwLastClick) <= adwClickHigh[dwClicks])) dwClicks++; else dwClicks = 0; dwLastClick = dw; if (dwClicks == MAX_CLICKS) { DeviceStop(npMCI, MCI_WAIT); Credits(hwnd); dwClicks = 0; } } #endif } if (npMCI) { LeaveCrit(npMCI); } return DefWindowProc(hwnd, wMsg, wParam, lParam); } #if 0 static void NEAR PASCAL Credits(HWND hwnd) { /* Credits... */ RECT rc; RECT rcUpdate; HDC hdc; MSG msg; int dyLine; int yLine; TEXTMETRIC tm; DWORD dwNextTime; long lScroll; DWORD rgb; HANDLE hResInfo; HANDLE hResData; LPSTR pchSrc, pchDst; char achLine[100]; int iEncrypt; #define EOFCHAR '@' // end of credits file /* load the credits */ if ((hResInfo = FindResource(ghModule, TEXT("MMS"), TEXT("MMSCR"))) == NULL) return; if ((hResData = LoadResource(ghModule, hResInfo)) == NULL) return; if ((pchSrc = LockResource(hResData)) == NULL) return; /* we want to get all mouse and keyboard events, to make * sure we stop the animation when the user clicks or * hits a key */ SetFocus(hwnd); SetCapture(hwnd); /* Scroll the credits up, one pixel at a time. pchSrc * points to the encrypted data; achLine contains a decrypted * line (null-terminated). dyLine is the height of each * line (constant), and yLine is between 0 and dyLine, * indicating how many pixels of the line have been scrolled * in vertically from the bottom */ hdc = GetDC(hwnd); SelectObject(hdc, GetStockObject(ANSI_VAR_FONT)); GetClientRect(hwnd, &rc); SetTextAlign(hdc, TA_CENTER); SetBkColor(hdc, RGB(0, 0, 0)); SetRect(&rcUpdate, 0, rc.bottom - 1, rc.right, rc.bottom); GetTextMetrics(hdc, &tm); if ((dyLine = tm.tmHeight + tm.tmExternalLeading) == 0) dyLine = 1; yLine = dyLine; dwNextTime = GetCurrentTime(); // time to do the next scroll lScroll = 0; iEncrypt = 0; while (TRUE) { /* If the user clicks the mouse or hits a key, exit. * However, ignore WM_LBUTTONUP because they will have * to let go of the mouse after clicking the icon. * Also, ignore mouse move messages. */ if (PeekMessage(&msg, hwnd, WM_KEYFIRST, WM_KEYLAST, PM_NOREMOVE | PM_NOYIELD)) break; // exit on key hit if (PeekMessage(&msg, hwnd, WM_MOUSEFIRST, WM_MOUSELAST, PM_NOREMOVE | PM_NOYIELD)) { if ((msg.message == WM_MOUSEMOVE) || (msg.message == WM_LBUTTONUP)) { /* remove and ignore message */ PeekMessage(&msg, hwnd, msg.message, msg.message, PM_REMOVE | PM_NOYIELD); } else break; // exit on click } /* scroll at a fixed no. of vertical pixels per sec. */ if (dwNextTime > GetCurrentTime()) continue; dwNextTime += 50L; // millseconds per scroll if (yLine == dyLine) { /* decrypt a line and copy to achLine */ pchDst = achLine; while (TRUE) { *pchDst = (char) (*pchSrc++ ^ (128 | (iEncrypt++ & 127))); if ((*pchDst == '\r') || (*pchDst == EOFCHAR)) break; pchDst++; } if (*pchDst == EOFCHAR) break; // no more lines *pchDst = 0; // null-terminate pchSrc++, iEncrypt++; // skip '\n' yLine = 0; } /* scroll screen up one pixel */ BitBlt(hdc, 0, 0, rcUpdate.right, rcUpdate.top, hdc, 0, 1, SRCCOPY); /* vary the text colors through a "rainbow" */ switch ((int) (lScroll++ / 4) % 5/*num-of-cases*/) { case 0: rgb = RGB(255, 0, 0); break; case 1: rgb = RGB(255, 255, 0); break; case 2: rgb = RGB( 0, 255, 0); break; case 3: rgb = RGB( 0, 255, 255); break; case 4: rgb = RGB(255, 0, 255); break; } SetTextColor(hdc, rgb); /* fill in the bottom pixel */ SaveDC(hdc); yLine++; IntersectClipRect(hdc, rcUpdate.left, rcUpdate.top, rcUpdate.right, rcUpdate.bottom); #ifdef WIN32 ExtTextOutA(hdc, rc.right / 2, rc.bottom - yLine, ETO_OPAQUE, &rcUpdate, achLine, lstrlenA(achLine), NULL); #else ExtTextOut(hdc, rc.right / 2, rc.bottom - yLine, ETO_OPAQUE, &rcUpdate, achLine, lstrlen(achLine), NULL); #endif RestoreDC(hdc, -1); } ReleaseDC(hwnd, hdc); ReleaseCapture(); UnlockResource(hResData); FreeResource(hResData); InvalidateRect(hwnd, NULL, TRUE); } #endif void FAR PASCAL SetWindowToDefaultSize(NPMCIGRAPHIC npMCI) { RECT rc; if (npMCI->hwnd && npMCI->hwnd == npMCI->hwndDefault) { rc = npMCI->rcMovie; if (npMCI->dwOptionFlags & MCIAVIO_ZOOMBY2) SetRect(&rc, 0, 0, rc.right*2, rc.bottom*2); AdjustWindowRect(&rc, GetWindowLong(npMCI->hwnd, GWL_STYLE), FALSE); if (IsIconic(npMCI->hwnd)) { WINDOWPLACEMENT wp; wp.length = sizeof(wp); GetWindowPlacement(npMCI->hwnd, &wp); wp.rcNormalPosition.right = wp.rcNormalPosition.left + (rc.right - rc.left); wp.rcNormalPosition.bottom = wp.rcNormalPosition.top + (rc.bottom - rc.top); SetWindowPlacement(npMCI->hwnd, &wp); } else { SetWindowPos(npMCI->hwnd, NULL, 0, 0, rc.right - rc.left, rc.bottom - rc.top, SWP_NOMOVE | SWP_NOZORDER | SWP_NOACTIVATE); } } } void FAR PASCAL ResetDestRect(NPMCIGRAPHIC npMCI) { RECT rc; /* WM_SIZE messages (on NT at least) are sometimes sent * during CreateWindow processing (eg if the initial window size * is not CW_DEFAULT). Some fields in npMCI are only filled in * after CreateWindow has returned. So there is a danger that at this * point some fields are not valid. */ if (npMCI->hwnd && npMCI->hwnd == npMCI->hwndDefault && (npMCI->dwOptionFlags & MCIAVIO_STRETCHTOWINDOW)) { GetClientRect(npMCI->hwnd, &rc); } else if (npMCI->streams > 0) { rc = npMCI->rcMovie; if (npMCI->dwOptionFlags & MCIAVIO_ZOOMBY2) { rc.right *= 2; rc.bottom *= 2; } } else { return; } if (!IsRectEmpty(&rc)) DevicePut(npMCI, &rc, MCI_DGV_PUT_DESTINATION); } void CheckWindowMove(NPMCIGRAPHIC npMCI, BOOL fForce) { #ifdef WIN32 POINT dwOrg; #else DWORD dwOrg; #endif UINT wRgn; HDC hdc; RECT rc; BOOL f; BOOL fGetDC; if (!(npMCI->dwFlags & MCIAVI_WANTMOVE)) return; if (!npMCI->hicDraw || !npMCI->hwnd || npMCI->nVideoStreams == 0) return; Assert(IsWindow(npMCI->hwnd)); Assert(npMCI->paStreamInfo); Assert(npMCI->nVideoStreams > 0); // // when the screen is locked for update by a window move operation // we dont want to turn off the video. // // we can tell if the screen is locked by checking a DC to the screen. // hdc = GetDC(NULL); f = GetClipBox(hdc, &rc) == NULLREGION; ReleaseDC(NULL, hdc); if (f) { npMCI->wRgnType = (UINT) -1; return; } if (fForce) npMCI->wRgnType = (UINT) -1; if (fGetDC = (npMCI->hdc == NULL)) hdc = GetDC (npMCI->hwnd); else hdc = npMCI->hdc; wRgn = GetClipBox(hdc, &rc); #ifdef WIN32 GetDCOrgEx(hdc, &dwOrg); #else dwOrg = GetDCOrg(hdc); #endif if (fGetDC) ReleaseDC(npMCI->hwnd, hdc); if (wRgn == npMCI->wRgnType && #ifdef WIN32 dwOrg.x == npMCI->dwOrg.x && dwOrg.y == npMCI->dwOrg.y && #else dwOrg == npMCI->dwOrg && #endif EqualRect(&rc, &npMCI->rcClip)) return; npMCI->wRgnType = wRgn; npMCI->dwOrg = dwOrg; npMCI->rcClip = rc; rc = npMCI->psiVideo->rcDest; ClientToScreen(npMCI->hwnd, (LPPOINT)&rc); ClientToScreen(npMCI->hwnd, (LPPOINT)&rc+1); if (wRgn == NULLREGION) SetRectEmpty(&rc); DPF2(("Sending ICM_DRAW_WINDOW message Rgn=%d, Org=(%d,%d) [%d, %d, %d, %d]\n", wRgn, dwOrg, rc)); if (ICDrawWindow(npMCI->hicDraw, &rc) != ICERR_OK) { DPF2(("Draw device does not want ICM_DRAW_WINDOW messages!\n")); npMCI->dwFlags &= ~MCIAVI_WANTMOVE; } }
dvbu-test/PDTool
CISAdminApi7.0.0/src/com/compositesw/services/system/admin/user/DestroyUserRequest.java
package com.compositesw.services.system.admin.user; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for destroyUserRequest complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="destroyUserRequest"> * &lt;complexContent> * &lt;extension base="{http://www.compositesw.com/services/system/admin/user}domainUserRequest"> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "destroyUserRequest") public class DestroyUserRequest extends DomainUserRequest { }
adamanikiej/mettle-components
services.js
import Debounce from './src/services/debounce.js' import HtmlMarker from './src/services/html-marker.js' import HttpFetch from './src/services/http-fetch.js' import I18n from './src/services/i18n.js' import Observable from './src/services/observable.js' import ReadyState from './src/services/ready-state.js' import Roles from './src/services/roles.js' import Router from './src/services/router.js' export { Debounce, HtmlMarker, HttpFetch, I18n, Observable, ReadyState, Roles, Router, }
viant/mly
service/domain/transformer.go
package domain import ( "context" "fmt" tf "github.com/tensorflow/tensorflow/tensorflow/go" "github.com/viant/gtly" "github.com/viant/mly/shared/common" "github.com/viant/mly/shared/common/storable" ) //Transformer represents output transformer type Transformer func(ctx context.Context, signature *Signature, input *gtly.Object, output interface{}) (common.Storable, error) //Transform transform default model output func Transform(ctx context.Context, signature *Signature, input *gtly.Object, output interface{}) (common.Storable, error) { fields := []*storable.Field{} for _, output := range signature.Outputs { fields = append(fields, &storable.Field{Name: output.Name, DataType: output.DataType}) } var pairs = []*kvPair{} result := storable.New(fields) var outputValue interface{} switch val := output.(type) { case [][]float32: outputValue = val[0][0] pairs = append(pairs, &kvPair{ k: signature.Outputs[0].Name, v: outputValue, }) case [][]float64: outputValue = val[0][0] pairs = append(pairs, &kvPair{ k: signature.Outputs[0].Name, v: outputValue, }) case [][]string: outputValue = val[0][0] pairs = append(pairs, &kvPair{ k: signature.Outputs[0].Name, v: outputValue, }) case [][]int64: outputValue = val[0][0] pairs = append(pairs, &kvPair{ k: signature.Outputs[0].Name, v: outputValue, }) case []*tf.Tensor: for i := range val { tensor := val[i].Value() switch t := tensor.(type) { case []float32: outputValue = t[0] case []float64: outputValue = t[0] case []string: outputValue = t[0] case []int64: outputValue = t[0] default: return nil, fmt.Errorf("unsupported type: %T", t) } pairs = append(pairs, &kvPair{ k: signature.Outputs[i].Name, v: outputValue, }) } } err := result.Set(func(pair common.Pair) error { for _, kvPair := range pairs { if err := pair(kvPair.k, kvPair.v); err != nil { return err } } return nil }) return result, err } type kvPair struct { k string v interface{} }
lenxin/spring-security
rsocket/src/main/java/org/springframework/security/rsocket/api/PayloadExchange.java
<filename>rsocket/src/main/java/org/springframework/security/rsocket/api/PayloadExchange.java<gh_stars>0 package org.springframework.security.rsocket.api; import io.rsocket.Payload; import org.springframework.util.MimeType; /** * Contract for a Payload interaction. * * @author <NAME> * @since 5.2 */ public interface PayloadExchange { PayloadExchangeType getType(); Payload getPayload(); MimeType getDataMimeType(); MimeType getMetadataMimeType(); }
xwt-benchmarks/mongodb-rdbms-sync
src/main/java/com/cisco/app/dbmigrator/migratorapp/core/event/OracleToMongoEvent.java
<filename>src/main/java/com/cisco/app/dbmigrator/migratorapp/core/event/OracleToMongoEvent.java package com.cisco.app.dbmigrator.migratorapp.core.event; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import org.apache.log4j.Logger; import org.bson.BsonDocument; import org.bson.BsonDocumentWrapper; import org.bson.Document; import org.bson.codecs.configuration.CodecRegistry; import org.bson.conversions.Bson; import org.bson.types.ObjectId; import com.cisco.app.dbmigrator.migratorapp.core.job.NodeBalancer; import com.cisco.app.dbmigrator.migratorapp.core.job.SyncStatus; import com.cisco.app.dbmigrator.migratorapp.core.map.OracleToMongoMap; import com.cisco.app.dbmigrator.migratorapp.core.meta.mongo.MongoAttribute; import com.cisco.app.dbmigrator.migratorapp.core.meta.mongo.MongoEntity; import com.cisco.app.dbmigrator.migratorapp.core.meta.mongo.MongoObject; import com.cisco.app.dbmigrator.migratorapp.core.thread.OrclToMngReader; import com.cisco.app.dbmigrator.migratorapp.core.thread.OrclToMngWriter; import com.cisco.app.dbmigrator.migratorapp.logging.dao.SyncEventDao; import com.cisco.app.dbmigrator.migratorapp.logging.dao.SyncMapDao; import com.cisco.app.dbmigrator.migratorapp.mail.Mailer; import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.Literal; import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.MatchAble; import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.entities.MatchOperator; import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.queries.SelectQueryBuilder; import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.sqlcomponents.LogicalOperation; import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.sqlcomponents.MatchOperation; import com.cisco.app.dbmigrator.migratorapp.sqlbuilder.sqlcomponents.SQLFilters; import com.cisco.app.dbmigrator.migratorapp.utilities.cache.DBCacheManager; import com.cisco.app.dbmigrator.migratorapp.utilities.mongo.MongoDbUtilities; import com.cisco.app.dbmigrator.migratorapp.utilities.oracle.DbResourceUtils; import com.mongodb.client.MongoCollection; import com.mongodb.client.model.Filters; import com.mongodb.client.result.DeleteResult; /** * Runnable Class to Process OracleDB to MongoDB data migration(one time event). * * @author pnilayam * */ @SuppressWarnings("rawtypes") public class OracleToMongoEvent extends SyncEvent<List<Document>>implements Cloneable { private static Logger logger = Logger.getLogger(OracleToMongoEvent.class); private String collectionName; private boolean saveNulls; private OracleParallelReadInfo parallelReadInfo; private SQLFilters rangeFilter; //private boolean process; private final CountDownLatch latch= new CountDownLatch(2); public SQLFilters getRangeFilter() { return rangeFilter; } public void setRangeFilter(SQLFilters rangeFilter) { this.rangeFilter = rangeFilter; } /* *//** * @return the process *//* public boolean isProcess() { return process; } *//** * @param process the process to set *//* public void setProcess(boolean process) { this.process = process; }*/ public void run() { try { //Mailer.sendmail(this, null, null, Mailer.STARTED); if (marker == null) { marker = new SyncMarker(); } if (parentEventId == null) { parentEventId = eventId; } marker.setStartTime(new Date()); dataBuffer = new LinkedBlockingQueue<List<Document>>(batchSize); eventDao = new SyncEventDao(); logger.info("OracleToMongoBasicEvent Thread Started at " + System.currentTimeMillis()); OracleToMongoMap map = (OracleToMongoMap) new SyncMapDao().getMapping(mapId); if (isRetry) { clearOldData(map); } MongoObject mongoObject = map.getMapObject(); if (rangeFilter != null) { if (mongoObject.getFilters() == null) { mongoObject.setFilters(rangeFilter); } else { mongoObject.getFilters().AND(rangeFilter); } } getStats(mongoObject, map.getSourceDbName(), map.getSourceUserName()); if (marker.getTotalRows() != 0) { eventDao.updateMarker(eventId, marker); Thread reader; reader = new Thread(new OrclToMngReader(map.getMapObject(), batchSize, dataBuffer, marker, saveNulls, map.getSourceDbName(), map.getSourceUserName(), eventId, latch)); reader.setName(eventName + "-Reader"); reader.start(); Thread writer; writer = new Thread(new OrclToMngWriter(dataBuffer, map.getTargetDbName(), map.getTargetUserName(), marker, eventId, collectionName , latch)); writer.setName(eventName + "-Writer"); writer.start(); } else { marker.setEndTime(new Date()); eventDao.updateMarker(eventId, marker); NodeBalancer.INSTANCE.markEventAsCompleted(eventId); } latch.await(); } catch (SyncError e) { e.printStackTrace(); e.setThreadName(eventName); eventDao.pushError(eventId, e); NodeBalancer.INSTANCE.markEventAsFailed(eventId); Mailer.sendmail(this, null, e, Mailer.FAILURE); } catch (InterruptedException e) { e.printStackTrace(); SyncError error = new SyncError(e); error.setThreadName(eventName); eventDao.pushError(eventId, error); NodeBalancer.INSTANCE.markEventAsFailed(eventId); Mailer.sendmail(this, null, e, Mailer.FAILURE); } finally { dataBuffer = null; marker = null; logger.info("OracleToMongoBasicEvent Thread Completed at " + System.currentTimeMillis()); //Mailer.sendmail(this, null, null, Mailer.COMPLETED); } } @Override public Object clone() throws CloneNotSupportedException { OracleToMongoEvent clonedEvent = (OracleToMongoEvent) super.clone(); clonedEvent.setEventId(new ObjectId()); clonedEvent.setParentEventId(this.getEventId()); clonedEvent.setCreatedBy("SYSTEM"); clonedEvent.setCreatedOn(new Date()); clonedEvent.setParallelReadInfo(null); clonedEvent.setStatus(SyncStatus.PENDING); clonedEvent.setDataBuffer(null); clonedEvent.setMarker(null); clonedEvent.setRangeFilter(null); return clonedEvent; } private void getStats(MongoObject mongoObject, String sourceDbName, String sourceSchemaName) throws SyncError { SelectQueryBuilder queryBuilder = new SelectQueryBuilder(); List<MatchAble> bindvalues = new ArrayList<MatchAble>(); PreparedStatement stmt = null; ResultSet rs = null; String countQuery = queryBuilder.select().from(mongoObject.getSourceTables().get(0)) .where(mongoObject.getFilters()).getCountQuery(bindvalues); Connection connection = null; try { connection = DBCacheManager.INSTANCE.getCachedOracleConnection(sourceDbName, sourceSchemaName); stmt = connection.prepareStatement(countQuery); if (bindvalues != null) { for (int index = 0; index < bindvalues.size(); index++) { stmt.setObject(index + 1, bindvalues.get(index).getSqlExpressionForMatchable()); } } rs = stmt.executeQuery(); logger.debug("Query Executed to get RowCount"); rs.next(); int totalRows = rs.getInt(1); logger.info("Rowcount Fecthed : " + totalRows); marker.setTotalRows(totalRows); } catch (SQLException e) { logger.error("Error while getting total count of rows to be processed", e); throw new SyncError(e); } finally { DbResourceUtils.closeResources(rs, stmt, connection); } } public OracleParallelReadInfo getParallelReadInfo() { return parallelReadInfo; } public void setParallelReadInfo(OracleParallelReadInfo parallelReadInfo) { this.parallelReadInfo = parallelReadInfo; } public boolean isSaveNulls() { return saveNulls; } public void setSaveNulls(boolean saveNulls) { this.saveNulls = saveNulls; } public String getCollectionName() { return collectionName; } public void setCollectionName(String collectionName) { this.collectionName = collectionName; } @Override public <TDocument> BsonDocument toBsonDocument(Class<TDocument> arg0, CodecRegistry codecRegistry) { return new BsonDocumentWrapper<OracleToMongoEvent>(this, codecRegistry.get(OracleToMongoEvent.class)); } private void clearOldData(OracleToMongoMap map) throws SyncError { try{ MongoCollection collection =DBCacheManager.INSTANCE.getCachedMongoPool(map.getTargetDbName(), map.getTargetUserName()) .getDatabase(map.getTargetDbName()).getCollection(collectionName); if(parentEventId!=null || parentEventId!=eventId){ OracleToMongoEvent parentEvent = (OracleToMongoEvent) eventDao.getEvent(parentEventId); String mongoAttributeName=null; List<MongoEntity> identifiers = map.getMapObject().getIdentifierList(); //TODO : modify to take care of complex identifiers if(identifiers!=null){ for(MongoEntity entity : identifiers){ MongoAttribute attribute= (MongoAttribute)entity; if(attribute.getMappedOracleColumn().equals(parentEvent.getParallelReadInfo().getRangeColumn())){ mongoAttributeName=attribute.getAttributeName(); break; } } } if(mongoAttributeName==null){ List<MongoEntity> attributeList = map.getMapObject().getAttributes(); for(MongoEntity entity : attributeList){ if(entity instanceof MongoAttribute){ MongoAttribute attribute= (MongoAttribute)entity; if(attribute.getMappedOracleColumn().equals(parentEvent.getParallelReadInfo())){ mongoAttributeName=attribute.getAttributeName(); break; } } } } Bson filter=null; if(rangeFilter!=null){ Bson firstRange = getRangeBson(rangeFilter.getMatchOperation(), mongoAttributeName); Bson secondRange = null; Set<LogicalOperation> logicalOperations = rangeFilter.getLogicaloperations(); for(LogicalOperation operation : logicalOperations){ secondRange = getRangeBson(operation.getMatchOperation(), mongoAttributeName); break;//It will have just one operation } filter = Filters.and(firstRange,secondRange); } DeleteResult result = collection.deleteMany(filter); logger.info("Number or old rows deleted for Thread : "+Thread.currentThread().getName()+ " : " + result.getDeletedCount()); }else{ collection.drop(); } } catch(Exception e) { throw new SyncError(e); } } private Bson getRangeBson(MatchOperation matchOperation , String attributeName){ Object literalValue=null; Bson filter =null; if(matchOperation!=null){ MatchAble rightExpression =matchOperation.getRightExpression(); if(rightExpression instanceof Literal){ literalValue = ((Literal)rightExpression).getLiteralValue(); }else{ Literal leftExpression =(Literal) matchOperation.getLeftExpression(); literalValue = leftExpression.getLiteralValue(); } MatchOperator matchOperator = matchOperation.getOperator(); filter = MongoDbUtilities.getFilterBson(matchOperator, attributeName, literalValue); } return filter; } }
okket/homebrew-cask
Casks/spacelauncher.rb
<gh_stars>1-10 cask "spacelauncher" do version "1.4.13,85" sha256 :no_check url "https://spacelauncherapp.com/download/SpaceLauncher.zip" name "SpaceLauncher" homepage "https://spacelauncherapp.com/" livecheck do url "https://spacelauncherapp.com/download/appcast.xml" strategy :sparkle end app "SpaceLauncher.app" end
lkun/enode
tests/src/test/java/org/enodeframework/tests/TestClasses/EventPublisherFailedTest.java
<reponame>lkun/enode package org.enodeframework.tests.TestClasses; import org.enodeframework.commanding.CommandResult; import org.enodeframework.commanding.CommandStatus; import org.enodeframework.common.io.AsyncTaskResult; import org.enodeframework.common.io.AsyncTaskStatus; import org.enodeframework.common.io.Task; import org.enodeframework.common.utilities.ObjectId; import org.enodeframework.tests.Commands.CreateTestAggregateCommand; import org.enodeframework.tests.Mocks.FailedType; import org.enodeframework.tests.Mocks.MockDomainEventPublisher; import org.junit.Assert; import org.junit.Test; public class EventPublisherFailedTest extends AbstractTest { @Test public void event_publisher_failed_test() { CreateTestAggregateCommand command = new CreateTestAggregateCommand(); command.aggregateRootId = ObjectId.generateNewStringId(); command.setTitle("Sample Note"); ((MockDomainEventPublisher) _domainEventPublisher).setExpectFailedCount(FailedType.UnKnownException, 5); AsyncTaskResult<CommandResult> asyncResult = Task.await(_commandService.executeAsync(command)); Assert.assertNotNull(asyncResult); Assert.assertEquals(AsyncTaskStatus.Success, asyncResult.getStatus()); CommandResult commandResult = asyncResult.getData(); Assert.assertNotNull(commandResult); Assert.assertEquals(CommandStatus.Success, commandResult.getStatus()); ((MockDomainEventPublisher) _domainEventPublisher).Reset(); command = new CreateTestAggregateCommand(); command.aggregateRootId = ObjectId.generateNewStringId(); command.setTitle("Sample Note"); ((MockDomainEventPublisher) _domainEventPublisher).setExpectFailedCount(FailedType.IOException, 5); asyncResult = Task.await(_commandService.executeAsync(command)); Assert.assertNotNull(asyncResult); Assert.assertEquals(AsyncTaskStatus.Success, asyncResult.getStatus()); commandResult = asyncResult.getData(); Assert.assertNotNull(commandResult); Assert.assertEquals(CommandStatus.Success, commandResult.getStatus()); ((MockDomainEventPublisher) _domainEventPublisher).Reset(); command = new CreateTestAggregateCommand(); command.aggregateRootId = ObjectId.generateNewStringId(); command.setTitle("Sample Note"); ((MockDomainEventPublisher) _domainEventPublisher).setExpectFailedCount(FailedType.TaskIOException, 5); asyncResult = Task.await(_commandService.executeAsync(command)); Assert.assertNotNull(asyncResult); Assert.assertEquals(AsyncTaskStatus.Success, asyncResult.getStatus()); commandResult = asyncResult.getData(); Assert.assertNotNull(commandResult); Assert.assertEquals(CommandStatus.Success, commandResult.getStatus()); ((MockDomainEventPublisher) _domainEventPublisher).Reset(); } }
pablobacho/XBee-PSoC
Tx example.cydsn/main.c
/* The MIT License (MIT) Copyright (c) 2014 CAIMANICS (<NAME>) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include <project.h> XBEE_packet_t packet; uint8_t payload[] = "HOLA"; int main() { uint8_t frameId = 0; /* Place your initialization/startup code here (e.g. MyInst_Start()) */ XBEE_Start(); CyGlobalIntEnable; /* Uncomment this line to enable global interrupts. */ for(;;) { /* Place your application code here. */ CyDelay(3000); XBEE_ZigBeeTransmitRequest(&packet, frameId++, XBEE_BROADCAST_HW_ADDRESS, XBEE_BROADCAST_NWK_ADDRESS, 0, 0, payload, 4); } } /* [] END OF FILE */
ciscoruiz/wepa
source/include/coffee/http/HttpRequest.hpp
<gh_stars>1-10 // MIT License // // Copyright (c) 2018 <NAME> (<EMAIL>) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // #ifndef _coffee_http_HttpRequest_hpp_ #define _coffee_http_HttpRequest_hpp_ #include <coffee/http/HttpMessage.hpp> #include <coffee/http/url/defines.hpp> #include <coffee/http/url/URL.hpp> namespace coffee { namespace http { /** * General definition for HTTP requests following RFC 2616. * * The first line of any HTTP-request will be: * /code * <METHOD> <URI> HTTP/<MAYOR VERSION>.<MINOR VERSION> * /endcode * * \author <EMAIL> */ class HttpRequest : public HttpMessage { public: struct Method { enum _v { Port, Options, Get, Head, Put, Delete, Trace, Connect }; static const char* asString(const Method::_v value) noexcept; }; static std::shared_ptr<HttpRequest> instantiate(const Method::_v method, const std::string& url, const uint32_t majorVersion = 1, const uint32_t minorVersion = 1) throw(basis::RuntimeException); const Method::_v getMethod() const noexcept { return m_method; } std::shared_ptr<url::URL> getURL() const noexcept { m_url; } const std::string& getPath() const throw(basis::RuntimeException) { return m_url->getComponent(url::ComponentName::Path); } protected: /** * Constructor. */ HttpRequest(const Method::_v method, const std::shared_ptr<url::URL>& url, const uint32_t majorVersion, const uint32_t minorVersion) : HttpMessage(majorVersion, minorVersion), m_method(method), m_url(url) {} /** * @return the first line in the HTTP message. */ std::string encodeFirstLine() const throw(basis::RuntimeException); private: const Method::_v m_method; std::shared_ptr<url::URL> m_url; }; } } #endif // _coffee_http_HttpRequest_hpp_
YunLemon/orientdb-gremlin
driver/src/test/java/org/apache/tinkerpop/gremlin/orientdb/OPartitionedReCreatableDatabasePoolTest.java
<reponame>YunLemon/orientdb-gremlin package org.apache.tinkerpop.gremlin.orientdb; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import com.orientechnologies.orient.core.db.OrientDB; import com.orientechnologies.orient.core.db.OrientDBConfig; import java.util.concurrent.ThreadLocalRandom; import org.junit.Test; public class OPartitionedReCreatableDatabasePoolTest { protected OPartitionedReCreatableDatabasePool pool() { OrientDB orientDB = new OrientDB("embedded:", OrientDBConfig.defaultConfig()); String dbName = "memorydb" + ThreadLocalRandom.current().nextInt(Integer.MAX_VALUE); orientDB.execute( "create database " + dbName + " memory users(admin identified by 'admin' role admin)"); return new OPartitionedReCreatableDatabasePool(orientDB, dbName, "admin", "admin", 5); } @Test public void testDatabaseAcquiredByOPartitionedReCreatableDatabasePool() throws Exception { OPartitionedReCreatableDatabasePool pool = pool(); assertFalse(pool.acquire().isClosed()); pool.close(); assertNull(pool.acquire()); pool.reCreatePool(); assertFalse(pool.acquire().isClosed()); } }
ashesh-0/MultiZoomGaze
code/run.py
""" This script can be used to train/evaluate different models described in MultiZoomGaze paper. """ import argparse import os import socket import sys from datetime import datetime import torch import torch.backends.cudnn as cudnn import torch.nn.parallel import torch.optim import torch.utils.data import torchvision import torchvision.transforms as transforms from torch.utils.cpp_extension import CUDA_HOME from backbones.backbone_type import BackboneType from core.loss import PinBallLoss from core.model_type import ModelType from core.train_utils import (checkpoint_fname, compute_angular_error, compute_angular_error_xyz, save_checkpoint) from data_loader import ImagerLoader from data_loader_multicrop import ImageLoaderMultiSizedCrops from data_loader_static_model import ImagerLoaderStaticModel from model import GazeLSTM from non_lstm_based_model import GazeMultiCropModel from run_utils import evaluate, train, validate from sinecosine_model.data_loader_sinecosine import (ImageLoaderSineCosine, ImageLoaderSineCosineMultiScale, ImageLoaderSineCosineMultiSizedCrops, ImageLoaderSineCosineMultiSizedRandomCrops) from sinecosine_model.data_loader_static_sinecosine import (ImageLoaderStaticMultiSineCosine, ImageLoaderStaticSineCosine, ImageLoaderStaticSineCosineMultiCenterCrops) from sinecosine_model.lazy_aggregation_model import LazyAggregationModel from sinecosine_model.mixed_loss import (RegularizedMultiSinAndCosLoss, RegularizedSinAndCosLoss, WeightedMseLoss, WeightedRegularizedSinAndCosLoss) from sinecosine_model.model import GazeSinCosLSTM from sinecosine_model.model_with_lstm_scaling import GazeSinCosLSTMLstmScalingV2 from sinecosine_model.non_lstm_based_model import AggregationType, GazeSinCosMultiCropModel from sinecosine_model.static_sinecosine_model import GazeStaticMultiSineAndCosineModel, GazeStaticSineAndCosineModel from sinecosine_model.train_utils import compute_angular_error_sine_and_cosine from static_model import GazeStaticModel from static_xyz.data_loader_static_xyz import ImagerLoaderStaticXyzModel from static_xyz.static_xyz_model import GazeStaticXyzModel WORKERS = 4 EPOCHS = 100 BATCH_SIZE = 64 BEST_ERROR = 100 LEARNING_RATE = 1e-4 def main(model_type, train_file, val_file, test_file, source_path, checkpoints_path, img_size, kfold_train, **params): global EPOCHS sum_writer = None # deprecated feature print('Train:', train_file) print('Val:', val_file) global args, BEST_ERROR checkpoint_fpath = os.path.join(checkpoints_path, f'gaze360_model_{model_type}.pth.tar') image_normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) train_img_transforms = transforms.Compose( [transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize]) val_img_transforms = transforms.Compose( [transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize]) unfreeze_epoch = None train_img_loader_kwargs = {} val_img_loader_kwargs = {} if model_type == ModelType.LSTM: print('Using LSTM model') model_v = GazeLSTM() ImageLoaderClass = ImagerLoader compute_angular_error_fn = compute_angular_error criterion = PinBallLoss().cuda() elif model_type == ModelType.LSTMBackward: raise Exception('Tri model is removed') elif model_type == ModelType.StaticModel: print('Using Static model') backbone_type = params['backbone_type'] checkpoint_fpath = checkpoint_fname( True, kfold_train, [ ('TYPE', model_type), ('bkb', backbone_type), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ], dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = GazeStaticModel(backbone_type=backbone_type) ImageLoaderClass = ImagerLoaderStaticModel compute_angular_error_fn = compute_angular_error criterion = PinBallLoss().cuda() elif model_type == ModelType.StaticBackwardModel: raise Exception('Tri model is removed') elif model_type == ModelType.StaticXyzModel: model_v = GazeStaticXyzModel() checkpoint_fpath = checkpoint_fname( True, kfold_train, [ ('TYPE', model_type), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ], dirname=checkpoints_path, ) print(checkpoint_fpath) ImageLoaderClass = ImagerLoaderStaticXyzModel compute_angular_error_fn = compute_angular_error_xyz criterion = PinBallLoss().cuda() elif model_type == ModelType.StaticSinModel: raise Exception('StaticSinModel is removed') elif model_type == ModelType.StaticCosModel: raise Exception('StaticCosModel is removed') elif model_type == ModelType.StaticSinCosModel: print('Using static sin cos model') backbone_type = params['backbone_type'] fc2 = 256 centercrop = None checkpoint_fpath = checkpoint_fname( True, kfold_train, [ ('TYPE', model_type), ('fc2', fc2), ('bkb', backbone_type), # ('centercrop', centercrop), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ], dirname=checkpoints_path, ) print(checkpoint_fpath) train_img_transforms = transforms.Compose([ transforms.Resize((img_size, img_size)), # transforms.CenterCrop(centercrop), # transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize, ]) val_img_transforms = transforms.Compose([ transforms.Resize((img_size, img_size)), # transforms.CenterCrop(centercrop), # transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize, ]) model_v = GazeStaticSineAndCosineModel(fc2=fc2, backbone_type=backbone_type) ImageLoaderClass = ImageLoaderStaticSineCosine compute_angular_error_fn = compute_angular_error_sine_and_cosine criterion = PinBallLoss().cuda() elif model_type == ModelType.StaticMultiSinCosRegModel: print('Using regularized multi sin cos static model') fc2 = 256 centercrop = 175 checkpoint_fpath = checkpoint_fname( True, kfold_train, [ ('TYPE', model_type), ('fc2', fc2), ('centercrop', centercrop), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ], dirname=checkpoints_path, ) print(checkpoint_fpath) ImageLoaderClass = ImageLoaderStaticMultiSineCosine train_img_transforms = transforms.Compose([ transforms.Resize((img_size, img_size)), transforms.CenterCrop(centercrop), transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize, ]) val_img_transforms = transforms.Compose([ transforms.Resize((img_size, img_size)), transforms.CenterCrop(centercrop), transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize, ]) model_v = GazeStaticMultiSineAndCosineModel(fc2=fc2) compute_angular_error_fn = compute_angular_error_sine_and_cosine criterion = RegularizedMultiSinAndCosLoss([0, 1, 3, 4], [2]).cuda() elif model_type == ModelType.StaticSinCosRegModel: print('Using regularized sin cos static model') backbone_type = params['backbone_type'] fc2 = 256 fc1 = None freeze_layer_idx = 0 unfreeze_epoch = 0 # backbone_loader_checkpoint_fpath = "/home/ashesh/model_best_gaze360_TYPE:10_fc2:256_time:False_diff_crop:224-150_tar_idx:3_seq_len:7_bsz:64_lr:0.0001_v:master_3.pth.tar" backbone_loader_checkpoint_fpath = None centercrop = 200 use_extended_head = False checkpoint_args = [ ('TYPE', model_type), ('fc1', fc1), ('fc2', fc2), ('bkb', backbone_type), ] if centercrop is not None: checkpoint_args += [('centercrop', centercrop)] if freeze_layer_idx > 0: checkpoint_args += [('freeze', freeze_layer_idx)] if backbone_loader_checkpoint_fpath is not None: checkpoint_args += [('bkb_load', 1)] checkpoint_args += [ ('imsz', img_size), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ] checkpoint_fpath = checkpoint_fname( True, kfold_train, checkpoint_args, dirname=checkpoints_path, ) print(checkpoint_fpath) if use_extended_head: ImageLoaderClass = ImageLoaderStaticSineCosineMultiCenterCrops train_img_transforms = transforms.Compose([ transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize, ]) val_img_transforms = transforms.Compose([ transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize, ]) else: ImageLoaderClass = ImageLoaderStaticSineCosine train_img_transforms = transforms.Compose([ transforms.Resize((img_size, img_size)), transforms.CenterCrop(centercrop), transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize, ]) val_img_transforms = transforms.Compose([ transforms.Resize((img_size, img_size)), transforms.CenterCrop(centercrop), transforms.Resize((img_size, img_size)), transforms.ToTensor(), image_normalize, ]) model_v = GazeStaticSineAndCosineModel( fc2=fc2, fc1=fc1, freeze_layer_idx=freeze_layer_idx, backbone_type=backbone_type, backbone_loader_checkpoint_fpath=backbone_loader_checkpoint_fpath, ) compute_angular_error_fn = compute_angular_error_sine_and_cosine criterion = RegularizedSinAndCosLoss().cuda() elif model_type == ModelType.SinCosRegModel: print('Using regularized sin cos LSTM model') fc2 = 256 backbone_type = params['backbone_type'] freeze_layer_idx = 0 unfreeze_epoch = 0 assert params['bidirectional_lstm'] in [0, 1] bidirectional_lstm = params['bidirectional_lstm'] == 1 assert isinstance(params['cropsize_list'], list) or params['cropsize_list'] is None cropsize_list = params['cropsize_list'] #[224, 200, 175, 150, 175, 200, 224] enable_time = params['enable_time'] if params['symmetric'] == 1 and cropsize_list is not None: cropsize_list = cropsize_list + cropsize_list[:-1][::-1] seq_len = len(cropsize_list) if cropsize_list is not None else 7 if bidirectional_lstm and params['symmetric']: target_seq_index = seq_len // 2 else: target_seq_index = seq_len - 1 ckp_tples = [ ('TYPE', model_type), ('bkb', backbone_type), ('fc2', fc2), ('time', enable_time), ] if cropsize_list is not None: ckp_tples += [('diff_crop', f'{cropsize_list[0]}-{cropsize_list[seq_len//2]}')] ckp_tples += [ ('tar_idx', target_seq_index), ('seq_len', seq_len), ('imsz', img_size), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ] if bidirectional_lstm is False: ckp_tples += [('bidir', int(bidirectional_lstm))] ckp_tples += [ ('v', f'master_{params["version"]}'), ] checkpoint_fpath = checkpoint_fname( False, kfold_train, ckp_tples, dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = GazeSinCosLSTM( fc2=fc2, freeze_layer_idx=freeze_layer_idx, target_seq_index=target_seq_index, seq_len=seq_len, backbone_type=backbone_type, bidirectional_lstm=bidirectional_lstm, ) train_img_loader_kwargs = {'enable_time': enable_time} val_img_loader_kwargs = {'enable_time': enable_time} if cropsize_list is None: ImageLoaderClass = ImageLoaderSineCosine else: ImageLoaderClass = ImageLoaderSineCosineMultiSizedCrops train_img_loader_kwargs['cropsize_list'] = cropsize_list train_img_loader_kwargs['seq_len'] = seq_len train_img_loader_kwargs['img_size'] = img_size val_img_loader_kwargs['cropsize_list'] = cropsize_list val_img_loader_kwargs['seq_len'] = seq_len val_img_loader_kwargs['img_size'] = img_size compute_angular_error_fn = compute_angular_error_sine_and_cosine criterion = RegularizedSinAndCosLoss().cuda() elif model_type == ModelType.SinCosModel: print('Using sin cos LSTM model') fc2 = 256 enable_time = params['enable_time'] cropsize_list = params['cropsize_list'] seq_len = len(cropsize_list) if params['symmetric'] == 1: cropsize_list = cropsize_list + cropsize_list[:-1][::-1] target_seq_index = seq_len // 2 else: target_seq_index = seq_len - 1 checkpoint_fpath = checkpoint_fname( enable_time, kfold_train, [ ('TYPE', model_type), ('fc2', fc2), ('time', enable_time), ('diff_crop', f'{cropsize_list[0]}-{cropsize_list[seq_len//2]}'), ('tar_idx', target_seq_index), ('seq_len', seq_len), ('imsz', img_size), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ], dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = GazeSinCosLSTM( fc2=fc2, target_seq_index=target_seq_index, seq_len=seq_len, ) train_img_loader_kwargs = {'enable_time': enable_time, 'seq_len': seq_len} val_img_loader_kwargs = {'enable_time': enable_time, 'seq_len': seq_len} if cropsize_list is None: ImageLoaderClass = ImageLoaderSineCosine else: ImageLoaderClass = ImageLoaderSineCosineMultiSizedCrops train_img_loader_kwargs['cropsize_list'] = cropsize_list val_img_loader_kwargs['cropsize_list'] = cropsize_list compute_angular_error_fn = compute_angular_error_sine_and_cosine criterion = PinBallLoss().cuda() elif model_type == ModelType.SinCosRegLstmScaleModel: img_size = 150 fc2 = 256 seq_len = 5 cropsize_list = [224, 160, 100] checkpoint_fpath = checkpoint_fname( False, kfold_train, [ ('TYPE', model_type), ('fc2', fc2), ('scale', f'{cropsize_list[0]}-{cropsize_list[-1]}-{len(cropsize_list)}'), ('T', seq_len), ('img_sz', img_size), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ], dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = GazeSinCosLSTMLstmScalingV2(seq_len, len(cropsize_list), fc2=fc2) ImageLoaderClass = ImageLoaderSineCosineMultiScale train_img_loader_kwargs = {'seq_len': seq_len, 'cropsize_list': cropsize_list, 'img_size': img_size} val_img_loader_kwargs = {'seq_len': seq_len, 'cropsize_list': cropsize_list, 'img_size': img_size} compute_angular_error_fn = compute_angular_error_sine_and_cosine criterion = RegularizedSinAndCosLoss().cuda() elif model_type == ModelType.StaticSinCosAllRegModel: raise Exception('StaticSineAndCosineAllModel is removed') elif model_type == ModelType.StaticWeightedSinCosRegModel: print('Using weighted regularized sin cos static model') w_lambda = 0.005 checkpoint_fpath = checkpoint_fname( True, kfold_train, [ ('TYPE', model_type), ('w_loss', w_lambda), ], dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = GazeStaticSineAndCosineModel() ImageLoaderClass = ImageLoaderStaticSineCosine compute_angular_error_fn = compute_angular_error_sine_and_cosine criterion = WeightedRegularizedSinAndCosLoss(w_lambda=w_lambda).cuda() elif model_type == ModelType.StaticWeightedMseSinCosModel: print('Using weighted mse sin cos static model') w_lambda = 0.1 checkpoint_fpath = checkpoint_fname( True, kfold_train, [ ('TYPE', model_type), ('w_loss', w_lambda), ], dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = GazeStaticSineAndCosineModel() ImageLoaderClass = ImageLoaderStaticSineCosine compute_angular_error_fn = compute_angular_error_sine_and_cosine criterion = WeightedMseLoss(w_lambda=w_lambda).cuda() elif model_type == ModelType.Gaze360MultiCropModel: print("Using MultiCrops with Gaze360 target and loss") backbone_type = BackboneType.Resnet18 enable_time = params['enable_time'] # cropsize_list = None # target_seq_index = None cropsize_list = [224, 200, 175, 150, 175, 200, 224] seq_len = len(cropsize_list) target_seq_index = 3 checkpoint_fpath = checkpoint_fname( False, kfold_train, [ ('TYPE', model_type), ('bkb', backbone_type), ('time', enable_time), ('diff_crop', f'{cropsize_list[0]}-{cropsize_list[seq_len//2]}'), ('tar_idx', target_seq_index), ('seq_len', seq_len), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ], dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = GazeLSTM( output_dim=3, target_seq_index=target_seq_index, seq_len=seq_len, backbone_type=backbone_type, ) train_img_loader_kwargs = {'enable_time': enable_time} val_img_loader_kwargs = {'enable_time': enable_time} ImageLoaderClass = ImageLoaderMultiSizedCrops train_img_loader_kwargs['cropsize_list'] = cropsize_list val_img_loader_kwargs['cropsize_list'] = cropsize_list compute_angular_error_fn = compute_angular_error criterion = PinBallLoss().cuda() elif model_type in [ ModelType.NonLstmSinCosRegModel, ModelType.NonLstmSinCosModel, ModelType.NonLstmSinCosRandomModel ]: if model_type == ModelType.NonLstmSinCosModel: print('Using Non-LSTM based Sine Cosine model') else: print('Using Non-LSTM based Sine Cosine Regularized model') backbone_type = params['backbone_type'] enable_time = params['enable_time'] assert isinstance(params['cropsize_list'], list) cropsize_list = params['cropsize_list'] assert params['symmetric'] == 0, 'symmetric=1 is not handled in this case. Pass full cropsize instead' seq_len = len(cropsize_list) atype = params['atype'] if isinstance(params['dataloader_params'], dict): future_prediction = bool(params['dataloader_params'].get('future_prediction', 0)) else: future_prediction = False if len(set(cropsize_list)) > 1: diff_crop_str = f'{cropsize_list[0]}-{cropsize_list[-1]}' else: diff_crop_str = f'One-{cropsize_list[0]}' ckp_tples = [ ('TYPE', model_type), ('bkb', backbone_type), ('time', enable_time), ('diff_crop', diff_crop_str), ('seq_len', seq_len), ('atype', atype), ] if future_prediction: ckp_tples += [('fp', 1)] ckp_tples += [ ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ] checkpoint_fpath = checkpoint_fname( False, kfold_train, ckp_tples, dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = GazeSinCosMultiCropModel( seq_len=seq_len, backbone_type=backbone_type, atype=atype, cropsize_list=cropsize_list, ) train_img_loader_kwargs = {'enable_time': enable_time, 'future_prediction': future_prediction} val_img_loader_kwargs = {'enable_time': enable_time, 'future_prediction': future_prediction} if model_type == ModelType.NonLstmSinCosRandomModel: ImageLoaderClass = ImageLoaderSineCosineMultiSizedRandomCrops else: ImageLoaderClass = ImageLoaderSineCosineMultiSizedCrops train_img_loader_kwargs['cropsize_list'] = cropsize_list train_img_loader_kwargs['seq_len'] = seq_len train_img_loader_kwargs['img_size'] = img_size val_img_loader_kwargs['cropsize_list'] = cropsize_list val_img_loader_kwargs['seq_len'] = seq_len val_img_loader_kwargs['img_size'] = img_size compute_angular_error_fn = compute_angular_error_sine_and_cosine if model_type in [ModelType.NonLstmSinCosModel, ModelType.NonLstmSinCosRandomModel]: criterion = PinBallLoss().cuda() elif model_type == ModelType.NonLstmSinCosRegModel: criterion = RegularizedSinAndCosLoss().cuda() else: raise Exception(f'Unexpected model_type: {ModelType.name(model_type)}') elif model_type == ModelType.NonLstmMultiCropModel: print("Using Non lstm based MultiCrops with Gaze360 target and loss") backbone_type = params['backbone_type'] enable_time = params['enable_time'] cropsize_list = params['cropsize_list'] seq_len = len(cropsize_list) assert params['symmetric'] == 0, 'symmetric=1 is not handled in this case. Pass full cropsize instead' atype = params['atype'] checkpoint_fpath = checkpoint_fname( False, kfold_train, [ ('TYPE', model_type), ('bkb', backbone_type), ('time', enable_time), ('diff_crop', f'{cropsize_list[0]}-{cropsize_list[seq_len//2]}'), ('seq_len', seq_len), ('atype', atype), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ], dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = GazeMultiCropModel( output_dim=3, backbone_type=backbone_type, cropsize_list=cropsize_list, atype=atype, ) train_img_loader_kwargs = {'enable_time': enable_time} val_img_loader_kwargs = {'enable_time': enable_time} ImageLoaderClass = ImageLoaderMultiSizedCrops train_img_loader_kwargs['cropsize_list'] = cropsize_list val_img_loader_kwargs['cropsize_list'] = cropsize_list compute_angular_error_fn = compute_angular_error criterion = PinBallLoss().cuda() elif model_type == ModelType.Gaze360LazyAggregationModel: print('Using Non-LSTM based lazy aggregation Sine Cosine model') backbone_type = params['backbone_type'] assert isinstance(params['cropsize_list'], list) cropsize_list = params['cropsize_list'] agg_layer_idx = params['agg_layer_idx'] assert params['symmetric'] == 0, 'symmetric=1 is not handled in this case. Pass full cropsize instead' seq_len = len(cropsize_list) if len(set(cropsize_list)) > 1: diff_crop_str = f'{cropsize_list[0]}-{cropsize_list[-1]}' else: diff_crop_str = f'One-{cropsize_list[0]}' ckp_tples = [ ('TYPE', model_type), ('bkb', backbone_type), ('diff_crop', diff_crop_str), ('seq_len', seq_len), ('agg_idx', agg_layer_idx), ('bsz', BATCH_SIZE), ('lr', LEARNING_RATE), ('v', f'master_{params["version"]}'), ] checkpoint_fpath = checkpoint_fname( False, kfold_train, ckp_tples, dirname=checkpoints_path, ) print(checkpoint_fpath) model_v = LazyAggregationModel(agg_layer_idx=agg_layer_idx, backbone_type=backbone_type) ImageLoaderClass = ImageLoaderStaticSineCosine compute_angular_error_fn = compute_angular_error_sine_and_cosine criterion = PinBallLoss().cuda() else: raise Exception(f'Invalid model_type: {model_type}') model = torch.nn.DataParallel(model_v).cuda() model.cuda() cudnn.benchmark = True image_loader = ImageLoaderClass(source_path, train_file, train_img_transforms, **train_img_loader_kwargs) train_loader = torch.utils.data.DataLoader(image_loader, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS, pin_memory=True) print('Train transforms', train_img_transforms) print('Val transforms', val_img_transforms) val_loader = torch.utils.data.DataLoader(ImageLoaderClass(source_path, val_file, val_img_transforms, **val_img_loader_kwargs), batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS, pin_memory=True) optimizer = torch.optim.Adam(model.parameters(), LEARNING_RATE) print('Overall Adam Optimizer') if params['evaluate']: print('Skipping Training') checkpoint_fpath = os.path.join(os.path.dirname(checkpoint_fpath), 'model_best_' + os.path.basename(checkpoint_fpath)) assert os.path.exists(checkpoint_fpath), checkpoint_fpath checkpoint = torch.load(checkpoint_fpath) model.load_state_dict(checkpoint['state_dict']) print(f"Loaded from {checkpoint_fpath}\n Epoch:{checkpoint['epoch']}") img_loader = ImageLoaderClass(source_path, test_file, val_img_transforms, **val_img_loader_kwargs) if evaluate(img_loader, model, criterion, compute_angular_error_fn, BATCH_SIZE, WORKERS): return test_loader = torch.utils.data.DataLoader(img_loader, batch_size=BATCH_SIZE, shuffle=True, num_workers=WORKERS, pin_memory=True) angular_error = validate(test_loader, model, criterion, 100, compute_angular_error_fn, sum_writer) print('Angular Error on Test set:', angular_error) return assert not os.path.exists(checkpoint_fpath) for epoch in range(0, EPOCHS): if epoch == unfreeze_epoch: model_v.unfreeze_all() # train for one epoch train(train_loader, model, criterion, optimizer, epoch, compute_angular_error_fn, sum_writer) # evaluate on validation set angular_error, _ = validate(val_loader, model, criterion, epoch, compute_angular_error_fn, sum_writer) # remember best angular error in validation and save checkpoint is_best = angular_error < BEST_ERROR BEST_ERROR = min(angular_error, BEST_ERROR) save_checkpoint({ 'epoch': epoch + 1, 'state_dict': model.state_dict(), 'best_prec1': BEST_ERROR, }, is_best, checkpoint_fpath) def parse_cropsize(inp_str): if inp_str == '': return None return [int(x) for x in inp_str.split(',')] def parse_dict(dict_str): """ , => delimiter : => key value separator. """ tokens = dict_str.split(',') output = {} for token in tokens: k, v = token.split(':') output[k] = v return output if __name__ == '__main__': print(socket.gethostname(), datetime.now().strftime("%y-%m-%d-%H:%M:%S")) print('Python version', sys.version) print('CUDA_HOME', CUDA_HOME) print('CudaToolKit Version', torch.version.cuda) print('torch Version', torch.__version__) print('torchvision Version', torchvision.__version__) parser = argparse.ArgumentParser() parser.add_argument('--model_type', type=ModelType.from_name) parser.add_argument('--backbone_type', type=BackboneType.from_name, default=BackboneType.Resnet18) parser.add_argument('--source_path', type=str, default='/tmp2/ashesh/gaze360_data/imgs/') parser.add_argument('--checkpoints_path', type=str, default='/home/ashesh/') parser.add_argument('--img_size', type=int, default=224) parser.add_argument('--evaluate', action='store_true') parser.add_argument('--evaluate_on', type=str, default='test.txt') parser.add_argument('--kfold', type=int, default=-1) parser.add_argument('--version', type=int, default=1) parser.add_argument('--atype', type=AggregationType.from_name, default=AggregationType.SPATIAL_MAX) parser.add_argument('--cropsize_list', type=parse_cropsize, default=None) parser.add_argument('--symmetric', type=int, default=0) parser.add_argument('--bidirectional', type=int, default=0) parser.add_argument('--enable_time', action='store_true') parser.add_argument('--dataloader_params', type=parse_dict, default=None) parser.add_argument('--sample_nbr_cnt', type=int, default=180) parser.add_argument('--agg_layer_idx', type=int) parser.add_argument('--magnify_ER_factor', type=float, default=1.5) args = parser.parse_args() assert args.model_type is not None cropsize_list = args.cropsize_list if cropsize_list is None: if args.enable_time: cropsize_list = [224, 200, 175, 150, 175, 200, 224] else: cropsize_list = [224, 200, 175, 150] params = { 'evaluate': args.evaluate, 'version': args.version, 'atype': args.atype, 'backbone_type': args.backbone_type, 'cropsize_list': cropsize_list, 'symmetric': args.symmetric, 'bidirectional_lstm': args.bidirectional, 'enable_time': args.enable_time, 'dataloader_params': args.dataloader_params, } model_type = args.model_type if model_type == ModelType.Gaze360LazyAggregationModel: params['agg_layer_idx'] = args.agg_layer_idx train_file = 'train.txt' val_file = 'validation.txt' test_file = args.evaluate_on main(model_type, train_file, val_file, test_file, args.source_path, args.checkpoints_path, args.img_size, args.kfold, **params)
drienyov/treadmill
tests/sproc/export_reports_test.py
"""Unit tests for treadmill.sproc.export_reports. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import io import unittest import mock # Disable W0611: Unused import import tests.treadmill_test_skip_windows # pylint: disable=W0611 from treadmill.sproc.export_reports import export_reports class ExportReportsTest(unittest.TestCase): """Test treadmill.sproc.export_reports.""" # 1483228800 is 2017-01-01T00:00:00Z @mock.patch('time.time', mock.Mock(return_value=1483228800)) @mock.patch('io.open', mock.mock_open(), create=True) def test_export_reports(self): """Test saving of state reports to file.""" zkclient = mock.Mock() zkclient.get_children.return_value = ['foo'] zkclient.get.return_value = ('save this', 'meta') cell_dir = '/foo/bar' export_reports(cell_dir, zkclient) # Ensure filenames are in UTC timezone io.open.assert_called_with( '/foo/bar/2017-01-01T00:00:00_foo.csv.bz2', 'wb' ) zkclient.get.assert_called_with('/reports/foo') io.open().write.assert_called_with('save this') if __name__ == '__main__': unittest.main()
AspireWorld-Project/AspireCore
src/main/java/org/ultramine/server/internal/UMEventHandler.java
package org.ultramine.server.internal; import cpw.mods.fml.common.eventhandler.EventPriority; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import cpw.mods.fml.common.functions.GenericIterableFactory; import cpw.mods.fml.common.gameevent.PlayerEvent.PlayerChangedDimensionEvent; import cpw.mods.fml.common.gameevent.PlayerEvent.PlayerLoggedInEvent; import cpw.mods.fml.common.gameevent.TickEvent; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.server.MinecraftServer; import net.minecraft.util.*; import net.minecraft.world.ChunkPosition; import net.minecraftforge.event.CommandEvent; import net.minecraftforge.event.ServerChatEvent; import net.minecraftforge.event.entity.living.LivingDeathEvent; import net.minecraftforge.event.entity.player.PlayerEvent; import org.ultramine.commands.basic.GenWorldCommand; import org.ultramine.core.permissions.Permissions; import org.ultramine.core.service.InjectService; import org.ultramine.server.ConfigurationHandler; import org.ultramine.server.Teleporter; import org.ultramine.server.UltramineServerConfig.ToolsConf.AutoBroacastConf; import org.ultramine.server.UltramineServerConfig.ToolsConf.AutoDebugInfoConf; import org.ultramine.server.chunk.ChunkProfiler; import org.ultramine.server.data.player.PlayerCoreData; import org.ultramine.server.event.ForgeModIdMappingEvent; import org.ultramine.server.event.PlayerDeathEvent; import org.ultramine.server.util.BasicTypeFormatter; import org.ultramine.server.util.WarpLocation; import static net.minecraft.util.EnumChatFormatting.*; public class UMEventHandler { @InjectService private static Permissions perms; @SideOnly(Side.SERVER) @SubscribeEvent(priority = EventPriority.HIGH) public void checkChatPermission(ServerChatEvent e) { if (e.player.playerNetServerHandler == null || e.player.getData() == null) return; PlayerCoreData data = e.player.getData().core(); if (data.isMuted()) { e.setCanceled(true); if (data.getUnmuteTime() != Long.MAX_VALUE) { e.player.addChatMessage(new ChatComponentTranslation("ultramine.ability.chat.muted", BasicTypeFormatter.formatTime(data.getUnmuteTime() - System.currentTimeMillis(), true)) .setChatStyle(new ChatStyle().setColor(EnumChatFormatting.RED))); } else { e.player.addChatMessage(new ChatComponentTranslation("ultramine.ability.chat.muted.forever") .setChatStyle(new ChatStyle().setColor(EnumChatFormatting.RED))); } } else if (e.player.isHidden()) { e.setCanceled(true); e.player.addChatMessage(new ChatComponentTranslation("ultramine.hidden.chat") .setChatStyle(new ChatStyle().setColor(EnumChatFormatting.RED))); } } @SubscribeEvent(priority = EventPriority.HIGH) public void onServerCommand(CommandEvent e) { if (e.sender instanceof EntityPlayerMP) { EntityPlayerMP player = (EntityPlayerMP) e.sender; if (player.playerNetServerHandler != null && player.getData() != null && player.getData().core().isCommandsMuted()) { e.setCanceled(true); if (player.getData().core().getUnmuteTime() != Long.MAX_VALUE) { player.addChatMessage(new ChatComponentTranslation("ultramine.ability.command.muted", BasicTypeFormatter.formatTime( player.getData().core().getUnmuteTime() - System.currentTimeMillis(), true)) .setChatStyle(new ChatStyle().setColor(EnumChatFormatting.RED))); } else { player.addChatMessage(new ChatComponentTranslation("ultramine.ability.command.muted.forever") .setChatStyle(new ChatStyle().setColor(EnumChatFormatting.RED))); } } } } @SubscribeEvent public void onServerTickCommon(TickEvent.ServerTickEvent e) { if (e.phase == TickEvent.Phase.START) { MinecraftServer server = MinecraftServer.getServer(); Teleporter.tick(); GenWorldCommand.tick(); ChunkProfiler.instance().tick(server.getTickCounter()); } } @SubscribeEvent @SideOnly(Side.SERVER) public void onServerTickServer(TickEvent.ServerTickEvent e) { if (e.phase == TickEvent.Phase.START) { MinecraftServer server = MinecraftServer.getServer(); server.getBackupManager().tick(); AutoDebugInfoConf cfg = ConfigurationHandler.getServerConfig().tools.autoDebugInfo; if (cfg.enabled && server.getTickCounter() % (cfg.intervalSeconds * 20) == 0) { double tps = Math.round(server.currentTPS * 10) / 10d; double downtime = server.currentWait / 1000 / 1000d; double peakdowntime = server.peakWait / 1000 / 1000d; int load = (int) Math.round((50 - downtime) / 50 * 100); int peakload = (int) Math.round((50 - peakdowntime) / 50 * 100); ChatComponentText loadcomp = new ChatComponentText(Integer.toString(load).concat("%")); ChatComponentText peakloadcomp = new ChatComponentText(Integer.toString(peakload).concat("%")); ChatComponentText tpscomp = new ChatComponentText(Double.toString(tps)); loadcomp.getChatStyle().setColor(load > 100 ? RED : DARK_GREEN); peakloadcomp.getChatStyle().setColor(peakload >= 200 ? RED : DARK_GREEN); tpscomp.getChatStyle().setColor(tps < 15 ? RED : DARK_GREEN); ChatComponentTranslation full = new ChatComponentTranslation("ultramine.autobroadcast.debugmsg", loadcomp, peakloadcomp, tpscomp, server.getConfigurationManager().playerEntityList.size()); full.getChatStyle().setColor(YELLOW); server.addChatMessage(full); for (EntityPlayerMP player : GenericIterableFactory .newCastingIterable(server.getConfigurationManager().playerEntityList, EntityPlayerMP.class)) if (perms.has(player, "show.debuginfo")) { player.addChatMessage(full); } } AutoBroacastConf msgcfg = ConfigurationHandler.getServerConfig().tools.autobroadcast; if (msgcfg.enabled && server.getTickCounter() % (msgcfg.intervalSeconds * 20) == 0) { if (msgcfg.messages.length != 0) { if (msgcfg.showAllMessages) { for (String msg : msgcfg.messages) { broadcastMessage(msg); } } else { broadcastMessage(msgcfg.messages[server.getTickCounter() % (msgcfg.intervalSeconds * 20 * msgcfg.messages.length) / (msgcfg.intervalSeconds * 20)]); } } } } } @SubscribeEvent @SideOnly(Side.SERVER) public void onPlayerTickServer(TickEvent.PlayerTickEvent e) { if (e.phase == TickEvent.Phase.END && e.side.isServer()) { EntityPlayerMP player = (EntityPlayerMP) e.player; int x = MathHelper.floor_double(player.posX); int z = MathHelper.floor_double(player.posZ); if (!player.getServerForPlayer().getBorder().isInsideBorder(x, z)) { ChunkPosition pos = player.getServerForPlayer().getBorder().correctPosition(x, z); player.playerNetServerHandler.setPlayerLocation(pos.chunkPosX, player.lastTickPosY, pos.chunkPosZ, player.rotationYaw, player.rotationPitch); } } } private static void broadcastMessage(String msg) { ChatComponentText msgcomp = new ChatComponentText(msg); msgcomp.getChatStyle().setColor(DARK_GREEN); MinecraftServer.getServer().getConfigurationManager().sendChatMsg(msgcomp); } @SubscribeEvent public void onPlayerClone(PlayerEvent.Clone e) { if (e.entityPlayer.isEntityPlayerMP()) { ((EntityPlayerMP) e.entityPlayer).setData(((EntityPlayerMP) e.original).getData()); ((EntityPlayerMP) e.entityPlayer).setStatisticsFile( MinecraftServer.getServer().getConfigurationManager().func_152602_a(e.entityPlayer)); } } @SubscribeEvent public void onLivingDeath(LivingDeathEvent e) { if (e.entityLiving.isEntityPlayerMP()) { EntityPlayerMP player = (EntityPlayerMP) e.entityLiving; Teleporter tp = player.getData().core().getTeleporter(); if (tp != null) { tp.cancel(); } player.getData().core().setLastLocation(WarpLocation.getFromPlayer(player)); } } @SubscribeEvent(priority = EventPriority.HIGH) public void onPlayerChangedDimension(PlayerChangedDimensionEvent e) { MinecraftServer.getServer().getConfigurationManager().getDataLoader() .handlePlayerDimensionChange((EntityPlayerMP) e.player, e.fromDim, e.toDim); } @SideOnly(Side.SERVER) @SubscribeEvent(priority = EventPriority.LOWEST) public void onPlayerLoggedIn(PlayerLoggedInEvent e) { ((EntityPlayerMP) e.player).getData().core().onLogin(); } @SubscribeEvent public void onForgeModIdMapping(ForgeModIdMappingEvent e) { UMInternalRegistry.onRemap(); } @SubscribeEvent public void onPlayerKeepInvCheckEvent(PlayerDeathEvent e) { if (e.damageSource == DamageSource.command) { e.setDeathMessage(null); } if (perms.has(e.entityPlayer, "ability.admin.keepinventory")) { e.setKeepInventory(true); } } }
defstream/nl3
lib/text/singularize.js
/*jslint node: true */ /*global module, require*/ 'use strict'; var singular = require('pluralize').singular; /** * Given text, returns the singularized form. ie: cats => cat. * @param {String} text [description] * @return {String} [description] */ module.exports = function singularize(text) { //@info if text exists, return the singularized form, otherwise return the input. return (text && singular(text)) || text; };
nbeaver/mx-trunk
libMx/d_bkprecision_912x_aio.h
/* * Name: d_bkprecision_912x_aio.h * * Purpose: Header file for MX analog I/O drivers for the BK Precision 912x * series of power supplies. * * Author: <NAME> * *------------------------------------------------------------------------- * * Copyright 2008 Illinois Institute of Technology * * See the file "LICENSE" for information on usage and redistribution * of this file, and for a DISCLAIMER OF ALL WARRANTIES. * */ #ifndef __D_BKPRECISION_912X_AIO_H__ #define __D_BKPRECISION_912X_AIO_H__ /*--- Input and output types ---*/ #define MXT_BKPRECISION_912X_VOLTAGE 1 #define MXT_BKPRECISION_912X_CURRENT 2 /*--- Input types only ---*/ #define MXT_BKPRECISION_912X_POWER 3 #define MXT_BKPRECISION_912X_DVM 4 #define MXT_BKPRECISION_912X_RESISTANCE 5 /* ===== Data structures ===== */ typedef struct { MX_RECORD *record; MX_RECORD *bkprecision_912x_record; char input_type_name[MXU_BKPRECISION_NAME_LENGTH+1]; unsigned long input_type; } MX_BKPRECISION_912X_AINPUT; typedef struct { MX_RECORD *record; MX_RECORD *bkprecision_912x_record; char output_type_name[MXU_BKPRECISION_NAME_LENGTH+1]; unsigned long output_type; } MX_BKPRECISION_912X_AOUTPUT; #define MXD_BKPRECISION_912X_AINPUT_STANDARD_FIELDS \ {-1, -1, "bkprecision_912x_record", MXFT_RECORD, NULL, 0, {0}, \ MXF_REC_TYPE_STRUCT, \ offsetof(MX_BKPRECISION_912X_AINPUT, bkprecision_912x_record), \ {0}, NULL, (MXFF_IN_DESCRIPTION | MXFF_IN_SUMMARY)}, \ \ {-1, -1, "input_type_name", MXFT_STRING, NULL, \ 1, {MXU_BKPRECISION_NAME_LENGTH}, \ MXF_REC_TYPE_STRUCT, \ offsetof(MX_BKPRECISION_912X_AINPUT, input_type_name),\ {sizeof(char)}, NULL, (MXFF_IN_DESCRIPTION | MXFF_IN_SUMMARY)} #define MXD_BKPRECISION_912X_AOUTPUT_STANDARD_FIELDS \ {-1, -1, "bkprecision_912x_record", MXFT_RECORD, NULL, 0, {0}, \ MXF_REC_TYPE_STRUCT, \ offsetof(MX_BKPRECISION_912X_AOUTPUT, bkprecision_912x_record),\ {0}, NULL, (MXFF_IN_DESCRIPTION | MXFF_IN_SUMMARY)}, \ \ {-1, -1, "output_type_name", MXFT_STRING, NULL, \ 1, {MXU_BKPRECISION_NAME_LENGTH}, \ MXF_REC_TYPE_STRUCT, \ offsetof(MX_BKPRECISION_912X_AOUTPUT, output_type_name),\ {sizeof(char)}, NULL, (MXFF_IN_DESCRIPTION | MXFF_IN_SUMMARY)} /* Define all of the interface functions. */ /* First the input functions. */ MX_API mx_status_type mxd_bkprecision_912x_ain_create_record_structures( MX_RECORD *record ); MX_API mx_status_type mxd_bkprecision_912x_ain_open( MX_RECORD *record ); MX_API mx_status_type mxd_bkprecision_912x_ain_read( MX_ANALOG_INPUT *ainput ); extern MX_RECORD_FUNCTION_LIST mxd_bkprecision_912x_ain_record_function_list; extern MX_ANALOG_INPUT_FUNCTION_LIST mxd_bkprecision_912x_ain_analog_input_function_list; extern long mxd_bkprecision_912x_ain_num_record_fields; extern MX_RECORD_FIELD_DEFAULTS *mxd_bkprecision_912x_ain_rfield_def_ptr; /* Second the output functions. */ MX_API mx_status_type mxd_bkprecision_912x_aout_create_record_structures( MX_RECORD *record ); MX_API mx_status_type mxd_bkprecision_912x_aout_open( MX_RECORD *record ); MX_API mx_status_type mxd_bkprecision_912x_aout_read( MX_ANALOG_OUTPUT *aoutput ); MX_API mx_status_type mxd_bkprecision_912x_aout_write( MX_ANALOG_OUTPUT *aoutput ); extern MX_RECORD_FUNCTION_LIST mxd_bkprecision_912x_aout_record_function_list; extern MX_ANALOG_OUTPUT_FUNCTION_LIST mxd_bkprecision_912x_aout_analog_output_function_list; extern long mxd_bkprecision_912x_aout_num_record_fields; extern MX_RECORD_FIELD_DEFAULTS *mxd_bkprecision_912x_aout_rfield_def_ptr; #endif /* __D_BKPRECISION_912X_AIO_H__ */
chotto-martini/stocks_analyzer_core
app/com/stocks_analyzer/core/components/db/secure/dao/UsersDao.java
<reponame>chotto-martini/stocks_analyzer_core<gh_stars>0 package com.stocks_analyzer.core.components.db.secure.dao; import java.util.Date; import org.apache.commons.lang3.RandomStringUtils; import com.avaje.ebean.Ebean; import com.avaje.ebean.SqlUpdate; import com.stocks_analyzer.core.components.db.secure.UsersComponent; import com.stocks_analyzer.core.models.secure.AppSecureModel; import com.stocks_analyzer.core.models.secure.Users; /** * ユーザー(users)モデルのネイティブクエリによる操作クラス。 * <p>アプリケーションとは祖結合としたい為、直接の参照は行わない。 * <br>利用する場合、{@link UsersComponent}を経由しアクセスする。 * * @author chotto-martini * @since 1.0.0 */ public class UsersDao { /** * ユーザー(userss)作成する。 * @param accountId アカウントID * @param password パスワード * @param nickname ニックネーム * @return 作成したユーザーモデルを返す。 * * @author chotto-martini * @since 1.0.0 */ protected static Users create(String accountId, String password, String nickname) { String sql = "INSERT INTO users (account_id, password, nickname, access_token, use_datetime, created, modified) " + "VALUES (:account_id, PASSWORD(:password), :nickname, :access_token, :use_datetime, :created, :modified)"; Date now = new Date(); String accessToken = RandomStringUtils.randomAlphanumeric(128); SqlUpdate sqlUpdate = Ebean.getServer(AppSecureModel.EBEAN_SERVER_DB_SECURE).createSqlUpdate(sql); sqlUpdate.setParameter("account_id", accountId); sqlUpdate.setParameter("password", password); sqlUpdate.setParameter("nickname", nickname); sqlUpdate.setParameter("access_token", accessToken); sqlUpdate.setParameter("use_datetime", now); sqlUpdate.setParameter("created", now); sqlUpdate.setParameter("modified", now); sqlUpdate.execute(); return Users.find.where().eq("access_token", accessToken).findUnique(); } }
zcjtom/cm_api
java/src/main/java/com/cloudera/api/v10/ServicesResourceV10.java
<filename>java/src/main/java/com/cloudera/api/v10/ServicesResourceV10.java // Licensed to Cloudera, Inc. under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. Cloudera, Inc. licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.api.v10; import static com.cloudera.api.Parameters.SERVICE_NAME; import com.cloudera.api.model.ApiCommand; import com.cloudera.api.v8.ServicesResourceV8; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON }) public interface ServicesResourceV10 extends ServicesResourceV8 { /** * @return The role command resource handler. */ @Override @Path("/{serviceName}/roleCommands") public RoleCommandsResourceV10 getRoleCommandsResource( @PathParam(SERVICE_NAME) String serviceName); /** * Runs Hue's dumpdata command. * * Available since API v10. * * @param serviceName The name of the service * @return Information about the submitted command. */ @POST @Path("/{serviceName}/commands/hueDumpDb") public ApiCommand hueDumpDbCommand( @PathParam(SERVICE_NAME) String serviceName); /** * Runs Hue's loaddata command. * * Available since API v10. * * @param serviceName The name of the service * @return Information about the submitted command. */ @POST @Path("/{serviceName}/commands/hueLoadDb") public ApiCommand hueLoadDbCommand( @PathParam(SERVICE_NAME) String serviceName); /** * Runs Hue's syncdb command. * * Available since API v10. * * @param serviceName The name of the service * @return Information about the submitted command. */ @POST @Path("/{serviceName}/commands/hueSyncDb") public ApiCommand hueSyncDbCommand( @PathParam(SERVICE_NAME) String serviceName); /** * Create the Oozie Server Database. Only works with embedded postgresql * database. * <p> * This command is to be run whenever a new user and database need to be * created in the embedded postgresql database for an Oozie service. This * command should usually be followed by a call to createOozieDb. * <p> * Available since API v10. * * @param serviceName * Name of the Oozie service on which to run the command. * @return Information about the submitted command */ @POST @Consumes() @Path("/{serviceName}/commands/oozieCreateEmbeddedDatabase") public ApiCommand oozieCreateEmbeddedDatabaseCommand( @PathParam(SERVICE_NAME) String serviceName); /** * Create the Sqoop2 Server Database tables. * <p> * This command is to be run whenever a new database has been specified. Will * do nothing if tables already exist. Will not perform an upgrade. Only * available when Sqoop2 Server is stopped. * <p> * Available since API v10. * * @param serviceName Name of the Sentry service on which to run the command. * @return Information about the submitted command */ @POST @Consumes() @Path("/{serviceName}/commands/sqoopCreateDatabaseTables") public ApiCommand sqoopCreateDatabaseTablesCommand( @PathParam(SERVICE_NAME) String serviceName); }
apexxs/dldt
inference-engine/tests/unit/builders/network_builder_test.cpp
// Copyright (C) 2018 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #include <gtest/gtest.h> #include <string.h> #include <ie_builders.hpp> #include "builder_test.hpp" using namespace testing; using namespace InferenceEngine; class NetworkBuilderTest : public BuilderTestCommon { protected: std::vector<std::string> alexNetNames = { "in1", "mean", "conv1", "relu1", "norm1", "pool1", "conv2", "relu2", "norm2", "pool2", "conv3", "relu3", "conv4", "relu4", "conv5", "relu5", "pool5", "fc6", "relu6", "fc7", "relu7", "fc8", "prob", "sf_out" }; public: Builder::Network prepateAlexnetBuilder() { Context ctx; Builder::Network builder(ctx, "AlexNet"); idx_t layerId = builder.addLayer(Builder::InputLayer(alexNetNames[0]).setPort(Port({1,3, 227, 227}))); layerId = builder.addLayer({{layerId}}, Builder::ScaleShiftLayer(alexNetNames[1]).setBiases(generateBlob(Precision::FP32, {3}, Layout::C))); layerId = builder.addLayer({{layerId}}, Builder::ConvolutionLayer(alexNetNames[2]).setKernel({11, 11}).setStrides({4, 4}).setOutDepth(96) .setWeights(generateBlob(Precision::FP32, {96, 3, 11, 11}, Layout::OIHW)) .setBiases(generateBlob(Precision::FP32, {96}, Layout::C))); layerId = builder.addLayer({{layerId}}, Builder::ReLULayer(alexNetNames[3])); layerId = builder.addLayer({{layerId}}, Builder::NormLayer(alexNetNames[4]).setAlpha(9.999999747378752e-05f).setBeta(0.75f).setSize(5).setAcrossMaps(true)); layerId = builder.addLayer({{layerId}}, Builder::PoolingLayer(alexNetNames[5]).setExcludePad(false).setKernel({3, 3}).setPaddingsBegin({0, 0}) .setPaddingsEnd({0, 0}).setPoolingType(Builder::PoolingLayer::PoolingType::MAX).setStrides({2, 2})); layerId = builder.addLayer({{layerId}}, Builder::ConvolutionLayer(alexNetNames[6]).setKernel({5, 5}).setStrides({1, 1}).setOutDepth(256) .setPaddingsBegin({2, 2}).setPaddingsEnd({2, 2}).setGroup(2).setDilation({1, 1}) .setWeights(generateBlob(Precision::FP32, {96, 256, 5, 5}, Layout::OIHW)) .setBiases(generateBlob(Precision::FP32, {256}, Layout::C))); layerId = builder.addLayer({{layerId}}, Builder::ReLULayer(alexNetNames[7])); layerId = builder.addLayer({{layerId}}, Builder::NormLayer(alexNetNames[8]).setAlpha(9.999999747378752e-05f).setBeta(0.75f).setSize(5).setAcrossMaps(true)); layerId = builder.addLayer({{layerId}}, Builder::PoolingLayer(alexNetNames[9]).setExcludePad(false).setKernel({3, 3}).setPaddingsBegin({0, 0}) .setPaddingsEnd({0, 0}).setPoolingType(Builder::PoolingLayer::PoolingType::MAX).setStrides({2, 2})); layerId = builder.addLayer({{layerId}}, Builder::ConvolutionLayer(alexNetNames[10]).setKernel({3, 3}).setStrides({1, 1}).setOutDepth(384) .setPaddingsBegin({1, 1}).setPaddingsEnd({1, 1}).setGroup(1).setDilation({1, 1}) .setWeights(generateBlob(Precision::FP32, {256, 384, 3, 3}, Layout::OIHW)) .setBiases(generateBlob(Precision::FP32, {384}, Layout::C))); layerId = builder.addLayer({{layerId}}, Builder::ReLULayer(alexNetNames[11])); layerId = builder.addLayer({{layerId}}, Builder::ConvolutionLayer(alexNetNames[12]).setKernel({3, 3}).setStrides({1, 1}).setOutDepth(384) .setPaddingsBegin({1, 1}).setPaddingsEnd({1, 1}).setGroup(2).setDilation({1, 1}) .setWeights(generateBlob(Precision::FP32, {384, 384, 3, 3}, Layout::OIHW)) .setBiases(generateBlob(Precision::FP32, {384}, Layout::C))); layerId = builder.addLayer({{layerId}}, Builder::ReLULayer(alexNetNames[13])); layerId = builder.addLayer({{layerId}}, Builder::ConvolutionLayer(alexNetNames[14]).setKernel({3, 3}).setStrides({1, 1}).setOutDepth(256) .setPaddingsBegin({1, 1}).setPaddingsEnd({1, 1}).setGroup(2).setDilation({1, 1}) .setWeights(generateBlob(Precision::FP32, {256, 384, 3, 3}, Layout::OIHW)) .setBiases(generateBlob(Precision::FP32, {384}, Layout::C))); layerId = builder.addLayer({{layerId}}, Builder::ReLULayer(alexNetNames[15])); layerId = builder.addLayer({{layerId}}, Builder::PoolingLayer(alexNetNames[16]).setExcludePad(false).setKernel({3, 3}).setPaddingsBegin({0, 0}) .setPaddingsEnd({0, 0}).setPoolingType(Builder::PoolingLayer::PoolingType::MAX).setStrides({2, 2})); layerId = builder.addLayer({{layerId}}, Builder::FullyConnectedLayer(alexNetNames[17]).setOutputNum(4096) .setWeights(generateBlob(Precision::FP32, {4096, 256, 6, 6}, Layout::OIHW)) .setBiases(generateBlob(Precision::FP32, {4096}, Layout::C))); layerId = builder.addLayer({{layerId}}, Builder::ReLULayer(alexNetNames[18])); layerId = builder.addLayer({{layerId}}, Builder::FullyConnectedLayer(alexNetNames[19]).setOutputNum(4096) .setWeights(generateBlob(Precision::FP32, {4096, 4096}, Layout::NC)) .setBiases(generateBlob(Precision::FP32, {4096}, Layout::C))); layerId = builder.addLayer({{layerId}}, Builder::ReLULayer(alexNetNames[20])); layerId = builder.addLayer({{layerId}}, Builder::FullyConnectedLayer(alexNetNames[21]).setOutputNum(1000) .setWeights(generateBlob(Precision::FP32, {1000, 4096}, Layout::NC)) .setBiases(generateBlob(Precision::FP32, {1000}, Layout::C))); layerId = builder.addLayer({{layerId}}, Builder::SoftMaxLayer(alexNetNames[22]).setAxis(1)); idx_t outputId = builder.addLayer({PortInfo(layerId)}, Builder::OutputLayer(alexNetNames[23])); return builder; } const INetwork::Ptr createAlexnet() { return prepateAlexnetBuilder().build(); } void compareWithICNNNetwork(const INetwork& network, const ICNNNetwork& cnnNetwork) { for (const auto& layer : network) { auto connections = network.getLayerConnections(layer->getId()); CNNLayerPtr cnnLayer; StatusCode sts = cnnNetwork.getLayerByName(layer->getName().c_str(), cnnLayer, nullptr); if (sts != OK && layer->getType() == "Output") continue; else if (sts != OK) THROW_IE_EXCEPTION << "Cannot find CNNLayer by name: " << layer->getName(); // Output connections for (size_t i = 0; i < cnnLayer->outData.size(); i++) { for (const auto& it : cnnLayer->outData[i]->inputTo) { size_t j = 0; for (; j < it.second->insData.size(); j++) { auto lockedData = it.second->insData[j].lock(); if (lockedData && lockedData.get() == cnnLayer->outData[i].get()) { break; } } for (auto conIt = connections.begin(); conIt != connections.end(); conIt++) { if (conIt->from().layerId() == layer->getId() && conIt->from().portId() == i && network.getLayer(conIt->to().layerId())->getName() == it.second->name && conIt->to().portId() == j) { connections.erase(conIt); break; } } } } // Input connections for (size_t i = 0; i < cnnLayer->insData.size(); i++) { auto inData = cnnLayer->insData[i].lock(); if (!inData) continue; auto creatorLayer = inData->creatorLayer.lock(); if (!creatorLayer) continue; size_t j = 0; for (; j < creatorLayer->outData.size(); j++) { if (creatorLayer->outData[j] && creatorLayer->outData[j].get() == inData.get()) { break; } } for (auto conIt = connections.begin(); conIt != connections.end(); conIt++) { if (conIt->to().layerId() == layer->getId() && conIt->from().portId() == j && network.getLayer(conIt->from().layerId())->getName() == creatorLayer->name && conIt->to().portId() == i) { connections.erase(conIt); break; } } } if (connections.size() == 1 && network.getLayer(connections[0].to().layerId())->getType() == "Output") connections.erase(connections.begin()); if (!connections.empty()) THROW_IE_EXCEPTION << "Not all connections were connected."; } } void compareICNNNetworks(const ICNNNetwork& newNetwork, const ICNNNetwork& oldNetwork) { CNNNetwork network((ICNNNetwork*)&newNetwork); if (newNetwork.layerCount() != oldNetwork.layerCount()) THROW_IE_EXCEPTION << "ICNNNetworks have different numbers of layers!"; for (const auto& layer : network) { CNNLayerPtr oldLayer; StatusCode sts = oldNetwork.getLayerByName(layer->name.c_str(), oldLayer, nullptr); bool success = sts == OK && layer->name == oldLayer->name && layer->type == oldLayer->type && layer->insData.size() == oldLayer->insData.size() && layer->outData.size() == oldLayer->outData.size() && layer->precision == oldLayer->precision; for (size_t i = 0; i < layer->insData.size() && success; i++) { auto lockedOldData = oldLayer->insData[i].lock(); auto lockedData = layer->insData[i].lock(); success = success && lockedOldData->name == lockedData->name && lockedOldData->getTensorDesc() == lockedData->getTensorDesc(); } for (size_t i = 0; i < layer->outData.size() && success; i++) { success = success && oldLayer->outData[i]->name == layer->outData[i]->name && oldLayer->outData[i]->getTensorDesc() == layer->outData[i]->getTensorDesc(); } if (!success) THROW_IE_EXCEPTION << "ICNNNetworks have different layers!"; } InputsDataMap newInput; OutputsDataMap newOutput; newNetwork.getInputsInfo(newInput); newNetwork.getOutputsInfo(newOutput); InputsDataMap oldInput; OutputsDataMap oldOutput; oldNetwork.getInputsInfo(oldInput); oldNetwork.getOutputsInfo(oldOutput); bool success = newInput.size() == oldInput.size(); for (const auto& it : newInput) { if (!success) break; success = success && oldInput.find(it.first) != oldInput.end(); } if (!success) THROW_IE_EXCEPTION << "ICNNNetworks have different inputs!"; success = newOutput.size() == oldOutput.size(); for (const auto& it : newOutput) { if (!success) break; success = success && oldOutput.find(it.first) != oldOutput.end(); } if (!success) THROW_IE_EXCEPTION << "ICNNNetworks have different outputs!"; } }; TEST_F(NetworkBuilderTest, checkReshapeAlexNet) { std::map<std::string, std::vector<SizeVector>> inPorts = { {alexNetNames[0], {}}, {alexNetNames[1], {{1, 3, 227, 227}}}, {alexNetNames[2], {{1, 3, 227, 227}}}, {alexNetNames[3], {{1, 96, 55, 55}}}, {alexNetNames[4], {{1, 96, 55, 55}}}, {alexNetNames[5], {{1, 96, 55, 55}}}, {alexNetNames[6], {{1, 96, 27, 27}}}, {alexNetNames[7], {{1, 256, 27, 27}}}, {alexNetNames[8], {{1, 256, 27, 27}}}, {alexNetNames[9], {{1, 256, 27, 27}}}, {alexNetNames[10], {{1, 256, 13, 13}}}, {alexNetNames[11], {{1, 384, 13, 13}}}, {alexNetNames[12], {{1, 384, 13, 13}}}, {alexNetNames[13], {{1, 384, 13, 13}}}, {alexNetNames[14], {{1, 384, 13, 13}}}, {alexNetNames[15], {{1, 256, 13, 13}}}, {alexNetNames[16], {{1, 256, 13, 13}}}, {alexNetNames[17], {{1, 256, 6, 6}}}, {alexNetNames[18], {{1, 4096}}}, {alexNetNames[19], {{1, 4096}}}, {alexNetNames[20], {{1, 4096}}}, {alexNetNames[21], {{1, 4096}}}, {alexNetNames[22], {{1, 1000}}}, {alexNetNames[23], {{1, 1000}}} }; std::map<std::string, std::vector<SizeVector>> outPorts = { {alexNetNames[0], {{1, 3, 227, 227}}}, {alexNetNames[1], {{1, 3, 227, 227}}}, {alexNetNames[2], {{1, 96, 55, 55}}}, {alexNetNames[3], {{1, 96, 55, 55}}}, {alexNetNames[4], {{1, 96, 55, 55}}}, {alexNetNames[5], {{1, 96, 27, 27}}}, {alexNetNames[6], {{1, 256, 27, 27}}}, {alexNetNames[7], {{1, 256, 27, 27}}}, {alexNetNames[8], {{1, 256, 27, 27}}}, {alexNetNames[9], {{1, 256, 13, 13}}}, {alexNetNames[10], {{1, 384, 13, 13}}}, {alexNetNames[11], {{1, 384, 13, 13}}}, {alexNetNames[12], {{1, 384, 13, 13}}}, {alexNetNames[13], {{1, 384, 13, 13}}}, {alexNetNames[14], {{1, 256, 13, 13}}}, {alexNetNames[15], {{1, 256, 13, 13}}}, {alexNetNames[16], {{1, 256, 6, 6}}}, {alexNetNames[17], {{1, 4096}}}, {alexNetNames[18], {{1, 4096}}}, {alexNetNames[19], {{1, 4096}}}, {alexNetNames[20], {{1, 4096}}}, {alexNetNames[21], {{1, 1000}}}, {alexNetNames[22], {{1, 1000}}}, {alexNetNames[23], {}} }; Builder::Network builder = prepateAlexnetBuilder(); for (const auto &layer : builder.getLayers()) { if (layer.getType() == "Input") { ASSERT_EQ(outPorts[layer.getName()][0], layer.getOutputPorts()[0].shape()); } else { for (size_t j = 0; j < layer.getOutputPorts().size(); j++) { ASSERT_TRUE(layer.getOutputPorts()[j].shape().empty()); } } } INetwork::Ptr graph; ASSERT_NO_THROW(graph = builder.build()); for (const auto &layer : *graph) { for (size_t i = 0; i < layer->getInputPorts().size(); i++) { ASSERT_EQ(inPorts[layer->getName()][i], layer->getInputPorts()[i].shape()); } for (size_t i = 0; i < layer->getOutputPorts().size(); i++) { ASSERT_EQ(outPorts[layer->getName()][i], layer->getOutputPorts()[i].shape()); } } } TEST_F(NetworkBuilderTest, checkNoImplWithCorrectPorts) { Context ctx; Builder::Network builder(ctx, "TestAlexNet"); idx_t inId = builder.addLayer(Builder::InputLayer(alexNetNames[0]).setPort(Port({1,3, 227, 227}))); idx_t convId = builder.addLayer({{inId}}, Builder::ConvolutionLayer(alexNetNames[2]).setKernel({11, 11}).setStrides({4, 4}).setOutDepth(96) .setInputPort(Port({1,3, 227, 227})).setOutputPort(Port({1, 96, 55, 55})) .setWeights(generateBlob(Precision::FP32, {96, 3, 11, 11}, Layout::OIHW)) .setBiases(generateBlob(Precision::FP32, {96}, Layout::C))); idx_t testLayerId = builder.addLayer({PortInfo(convId)}, Builder::Layer("TestLayer", "testPort") .setInputPorts({Port({1, 96, 55, 55})}).setOutputPorts({Port({1, 96, 55, 55})})); idx_t outputId = builder.addLayer({PortInfo(testLayerId)}, Builder::OutputLayer("out").setPort({Port({1, 96, 55, 55})})); ASSERT_NO_THROW(builder.build()); } TEST_F(NetworkBuilderTest, checkNoImplWithIncorrectPorts) { Context ctx; Builder::Network builder(ctx, "TestAlexNet"); idx_t inId = builder.addLayer(Builder::InputLayer(alexNetNames[0]).setPort(Port({1,3, 227, 227}))); idx_t convId = builder.addLayer({{inId}}, Builder::ConvolutionLayer(alexNetNames[2]).setKernel({11, 11}).setStrides({4, 4}).setOutDepth(96) .setInputPort(Port({1,3, 227, 227})).setOutputPort(Port({1, 96, 55, 55})) .setWeights(generateBlob(Precision::FP32, {96, 3, 11, 11}, Layout::OIHW)) .setBiases(generateBlob(Precision::FP32, {96}, Layout::C))); idx_t testLayerId = builder.addLayer({PortInfo(convId)}, Builder::Layer("TestLayer", "testPort") .setInputPorts({Port({1, 3, 55, 55})}).setOutputPorts({Port({1, 96, 55, 55})})); ASSERT_THROW(builder.build(), InferenceEngine::details::InferenceEngineException); } TEST_F(NetworkBuilderTest, createNetworkIterator) { const INetwork::Ptr graph = createAlexnet(); ASSERT_NO_THROW(graph->begin()); } TEST_F(NetworkBuilderTest, checkNetworkSize) { const INetwork::Ptr graph = createAlexnet(); ASSERT_EQ(24, graph->size()); } TEST_F(NetworkBuilderTest, iterateNetworkForeach) { const INetwork::Ptr graph = createAlexnet(); size_t idx = 0; for (const auto& layer : *graph) { ASSERT_NE(idx, alexNetNames.size()); ASSERT_EQ(alexNetNames[idx], layer->getName()); idx++; } } TEST_F(NetworkBuilderTest, iterateNetworkFor) { const INetwork::Ptr graph = createAlexnet(); size_t idx = 0; for (auto it = graph->begin(); it != graph->end(); it++) { ASSERT_EQ(alexNetNames[idx], (*it)->getName()); idx++; } } TEST_F(NetworkBuilderTest, convertFromICNNNetwork) { std::string model = R"V0G0N( <net name="PVANET" version="2" batch="1"> <layers> <layer name="data" type="Input" precision="FP32" id="0"> <output> <port id="0"> <dim>1</dim> <dim>3</dim> <dim>544</dim> <dim>992</dim> </port> </output> </layer> <layer name="conv1_1_conv" type="Convolution" precision="FP32" id="2"> <convolution_data stride-x="2" stride-y="2" pad-x="3" pad-y="3" kernel-x="7" kernel-y="7" output="16" group="1"/> <input> <port id="2"> <dim>1</dim> <dim>3</dim> <dim>544</dim> <dim>992</dim> </port> </input> <output> <port id="3"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </output> <weights offset="0" size="9408"/> <biases offset="9408" size="64"/> </layer> <layer name="conv1_1_neg" type="Power" precision="FP32" id="3"> <power_data power="1" scale="-1" shift="0"/> <input> <port id="4"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="5"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </output> </layer> <layer name="conv1_1_concat" type="Concat" precision="FP32" id="4"> <concat_data axis="1"/> <input> <port id="6"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> <port id="7"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="8"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </output> </layer> <layer name="conv1_1_scale" type="ScaleShift" precision="FP32" id="5"> <input> <port id="9"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="10"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </output> <weights offset="9472" size="128"/> <biases offset="9600" size="128"/> </layer> <layer name="conv1_1_relu" type="ReLU" precision="FP32" id="6"> <data negative_slope="0" engine="caffe.ReLUParameter.DEFAULT"/> <input> <port id="11"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="12"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </output> </layer> <layer name="pool1" type="Pooling" precision="FP32" id="7"> <pooling_data kernel-x="3" kernel-y="3" pad-x="0" pad-y="0" stride-x="2" stride-y="2" rounding-type="ceil" pool-method="max"/> <input> <port id="13"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="14"> <dim>1</dim> <dim>32</dim> <dim>136</dim> <dim>248</dim> </port> </output> </layer> </layers> <edges> <edge from-layer="0" from-port="0" to-layer="2" to-port="2"/> <edge from-layer="2" from-port="3" to-layer="3" to-port="4"/> <edge from-layer="2" from-port="3" to-layer="4" to-port="6"/> <edge from-layer="3" from-port="5" to-layer="4" to-port="7"/> <edge from-layer="4" from-port="8" to-layer="5" to-port="9"/> <edge from-layer="5" from-port="10" to-layer="6" to-port="11"/> <edge from-layer="6" from-port="12" to-layer="7" to-port="13"/> </edges> </net>)V0G0N"; InferenceEngine::CNNNetReader net_reader; ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length())); InferenceEngine::TBlob<uint8_t> *weights = new InferenceEngine::TBlob<uint8_t>(InferenceEngine::Precision::U8, InferenceEngine::C, {9728}); weights->allocate(); fill_data((float *) weights->buffer(), weights->size() / sizeof(float)); InferenceEngine::TBlob<uint8_t>::Ptr weights_ptr = InferenceEngine::TBlob<uint8_t>::Ptr(weights); net_reader.SetWeights(weights_ptr); INetwork::Ptr network = Builder::Network(net_reader.getNetwork()).build(); try { compareWithICNNNetwork(*network, net_reader.getNetwork()); } catch (InferenceEngine::details::InferenceEngineException &ex) { FAIL() << ex.what(); } } TEST_F(NetworkBuilderTest, convertFromICNNNetworkToICNNNetwork) { std::string model = R"V0G0N( <net name="PVANET" version="2" batch="1"> <layers> <layer name="data" type="Input" precision="FP32" id="0"> <output> <port id="0"> <dim>1</dim> <dim>3</dim> <dim>544</dim> <dim>992</dim> </port> </output> </layer> <layer name="conv1_1_conv" type="Convolution" precision="FP32" id="2"> <convolution_data stride-x="2" stride-y="2" pad-x="3" pad-y="3" kernel-x="7" kernel-y="7" output="16" group="1"/> <input> <port id="2"> <dim>1</dim> <dim>3</dim> <dim>544</dim> <dim>992</dim> </port> </input> <output> <port id="3"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </output> <weights offset="0" size="9408"/> <biases offset="9408" size="64"/> </layer> <layer name="conv1_1_neg" type="Power" precision="FP32" id="3"> <power_data power="1" scale="-1" shift="0"/> <input> <port id="4"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="5"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </output> </layer> <layer name="conv1_1_concat" type="Concat" precision="FP32" id="4"> <concat_data axis="1"/> <input> <port id="6"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> <port id="7"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="8"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </output> </layer> <layer name="conv1_1_scale" type="ScaleShift" precision="FP32" id="5"> <input> <port id="9"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="10"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </output> <weights offset="9472" size="128"/> <biases offset="9600" size="128"/> </layer> <layer name="conv1_1_relu" type="ReLU" precision="FP32" id="6"> <data negative_slope="0" engine="caffe.ReLUParameter.DEFAULT"/> <input> <port id="11"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="12"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </output> </layer> <layer name="pool1" type="Pooling" precision="FP32" id="7"> <pooling_data kernel-x="3" kernel-y="3" pad-x="0" pad-y="0" stride-x="2" stride-y="2" rounding-type="ceil" pool-method="max"/> <input> <port id="13"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="14"> <dim>1</dim> <dim>32</dim> <dim>136</dim> <dim>248</dim> </port> </output> </layer> </layers> <edges> <edge from-layer="0" from-port="0" to-layer="2" to-port="2"/> <edge from-layer="2" from-port="3" to-layer="3" to-port="4"/> <edge from-layer="2" from-port="3" to-layer="4" to-port="6"/> <edge from-layer="3" from-port="5" to-layer="4" to-port="7"/> <edge from-layer="4" from-port="8" to-layer="5" to-port="9"/> <edge from-layer="5" from-port="10" to-layer="6" to-port="11"/> <edge from-layer="6" from-port="12" to-layer="7" to-port="13"/> </edges> </net>)V0G0N"; InferenceEngine::CNNNetReader net_reader; ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length())); InferenceEngine::TBlob<uint8_t> *weights = new InferenceEngine::TBlob<uint8_t>(InferenceEngine::Precision::U8, InferenceEngine::C, {9728}); weights->allocate(); fill_data((float *) weights->buffer(), weights->size() / sizeof(float)); InferenceEngine::TBlob<uint8_t>::Ptr weights_ptr = InferenceEngine::TBlob<uint8_t>::Ptr(weights); net_reader.SetWeights(weights_ptr); std::shared_ptr<ICNNNetwork> network = Builder::convertToICNNNetwork(Builder::Network(net_reader.getNetwork()).build()); try { compareICNNNetworks(*network, net_reader.getNetwork()); } catch (InferenceEngine::details::InferenceEngineException &ex) { FAIL() << ex.what(); } } TEST_F(NetworkBuilderTest, connectTwoNetworks) { std::string model = R"V0G0N( <net name="PVANET" version="2" batch="1"> <layers> <layer name="data" type="Input" precision="FP32" id="0"> <output> <port id="0"> <dim>1</dim> <dim>3</dim> <dim>544</dim> <dim>992</dim> </port> </output> </layer> <layer name="conv1_1_conv" type="Convolution" precision="FP32" id="2"> <convolution_data stride-x="2" stride-y="2" pad-x="3" pad-y="3" pad-r="3" pad-b="3" kernel-x="7" kernel-y="7" output="16" group="1"/> <input> <port id="2"> <dim>1</dim> <dim>3</dim> <dim>544</dim> <dim>992</dim> </port> </input> <output> <port id="3"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </output> <weights offset="0" size="9408"/> <biases offset="9408" size="64"/> </layer> <layer name="conv1_1_neg" type="Power" precision="FP32" id="3"> <power_data power="1" scale="-1" shift="0"/> <input> <port id="4"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="5"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </output> </layer> <layer name="conv1_1_concat" type="Concat" precision="FP32" id="4"> <concat_data axis="1"/> <input> <port id="6"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> <port id="7"> <dim>1</dim> <dim>16</dim> <dim>272</dim> <dim>496</dim> </port> </input> <output> <port id="8"> <dim>1</dim> <dim>32</dim> <dim>272</dim> <dim>496</dim> </port> </output> </layer> </layers> <edges> <edge from-layer="0" from-port="0" to-layer="2" to-port="2"/> <edge from-layer="2" from-port="3" to-layer="3" to-port="4"/> <edge from-layer="2" from-port="3" to-layer="4" to-port="6"/> <edge from-layer="3" from-port="5" to-layer="4" to-port="7"/> </edges> </net>)V0G0N"; InferenceEngine::CNNNetReader net_reader; ASSERT_NO_THROW(net_reader.ReadNetwork(model.data(), model.length())); InferenceEngine::TBlob<uint8_t> *weights = new InferenceEngine::TBlob<uint8_t>(InferenceEngine::Precision::U8, InferenceEngine::C, {9472}); weights->allocate(); fill_data((float *) weights->buffer(), weights->size() / sizeof(float)); InferenceEngine::TBlob<uint8_t>::Ptr weights_ptr = InferenceEngine::TBlob<uint8_t>::Ptr(weights); net_reader.SetWeights(weights_ptr); Builder::Network originalNetwork(net_reader.getNetwork()); Builder::Network addNetwork(net_reader.getNetwork()); // Find output idx_t lastLayerId(0); for (const auto& layer : originalNetwork.getLayers()) { if (layer.getType() != "Output") continue; const auto connections = originalNetwork.getLayerConnections(layer.getId()); ASSERT_EQ(1, connections.size()); ASSERT_EQ(layer.getId(), connections[0].to().layerId()); ASSERT_EQ(0, connections[0].from().portId()); lastLayerId = connections[0].from().layerId(); originalNetwork.disconnect(connections[0]); originalNetwork.removeLayer(layer.getId()); break; } std::map<idx_t, idx_t> oldNewId; for (const auto& layer : addNetwork.getLayers()) { if (layer.getType() == "Input") { oldNewId[layer.getId()] = lastLayerId; continue; } oldNewId[layer.getId()] = originalNetwork.addLayer(layer); const auto connections = addNetwork.getLayerConnections(layer.getId()); for (const auto& connection : connections) { if (oldNewId.find(connection.from().layerId()) == oldNewId.end() || oldNewId.find(connection.to().layerId()) == oldNewId.end()) continue; originalNetwork.connect({oldNewId[connection.from().layerId()], connection.from().portId()}, {oldNewId[connection.to().layerId()], connection.to().portId()}); } if (layer.getType() == "Convolution") { Builder::ConvolutionLayer(originalNetwork.getLayer(oldNewId[layer.getId()])).setWeights(generateBlob(Precision::FP32, {16, 32, 7, 7}, Layout::OIHW)); } } ASSERT_NO_THROW(originalNetwork.build()); } TEST_F(NetworkBuilderTest, createLayersWithTheSameNames) { InferenceEngine::Builder::Network netBuilder(""); // Connect conolutional layer with it's inputs and outputs. InferenceEngine::Builder::InputLayer inpLayer("data"); inpLayer.setPort(InferenceEngine::Port({1, 1, 10, 10})); auto inpLayerId = netBuilder.addLayer(inpLayer); // Create convolutional layer const size_t outCn = 1, inpCn = 1, kernelH = 3, kernelW = 3; InferenceEngine::Builder::ConvolutionLayer ieLayer("conv1"); ieLayer.setKernel({outCn, inpCn, kernelH, kernelW}); ieLayer.setStrides({1, 1, 1, 1}); ieLayer.setDilation({1, 1, 1, 1}); ieLayer.setPaddingsBegin({0, 0, 0, 0}); ieLayer.setPaddingsEnd({0, 0, 0, 0}); ieLayer.setGroup(1); ieLayer.setOutDepth(outCn); auto convLayerId = netBuilder.addLayer({inpLayerId}, ieLayer); // Connect convolution layer with it's output InferenceEngine::Builder::OutputLayer outLayer("conv1"); auto convOutLayerId = netBuilder.addLayer({convLayerId}, outLayer); ASSERT_NE(netBuilder.getLayer(convLayerId).getName(), netBuilder.getLayer(convOutLayerId).getName()); InferenceEngine::Builder::ReLULayer reLULayer("relu1"); reLULayer.setNegativeSlope(0); auto reluLayerId = netBuilder.addLayer({convLayerId}, reLULayer); InferenceEngine::Builder::OutputLayer outReLULayer("relu1"); auto reluOutLayerId = netBuilder.addLayer({reluLayerId}, outReLULayer); ASSERT_NE(netBuilder.getLayer(reluLayerId).getName(), netBuilder.getLayer(reluOutLayerId).getName()); ASSERT_NO_THROW(netBuilder.build()); } TEST_F(NetworkBuilderTest, RemoveLayerAndBuild) { auto builder = prepateAlexnetBuilder(); builder.removeLayer(builder.getLayers()[2].getId()); ASSERT_THROW(builder.build(), InferenceEngine::details::InferenceEngineException); } TEST_F(NetworkBuilderTest, DocumentationExample) { // Create graph with name InferenceEngine::Builder::Network graph("Example1"); // Create network // In-place add input layer idx_t inputLayerId = graph.addLayer(Builder::InputLayer("in").setPort(Port({1, 3, 22, 22}))); // In-place add ReLU layer builder with a negative slope 0.1 and connect it with 0 output port of the Input layer builder // In this example layerId is equal new Input layer builder ID, port index isn't set because 0 is a default value ({layerId} == {layerId, 0}) idx_t relu1Id = graph.addLayer({{inputLayerId}}, Builder::ReLULayer("relu1").setNegativeSlope(0.1f)); // In-place add ScaleShift layer builder InferenceEngine::Blob::Ptr blobWithScaleShiftBiases = make_shared_blob<float>(TensorDesc(Precision::FP32, {3}, Layout::C)); blobWithScaleShiftBiases->allocate(); auto *data = blobWithScaleShiftBiases->buffer().as<float *>(); data[0] = 1; data[1] = 2; data[2] = 3; idx_t scaleShiftId = graph.addLayer(Builder::ScaleShiftLayer("scaleShift1").setBiases(blobWithScaleShiftBiases)); // Connect ScaleShift layer with relu1 graph.connect({relu1Id}, {scaleShiftId}); // Also port indexes could be defined (0 is default value) builder.connect({layerId, outPortIdx}, {scaleShiftId, inPortIdx}); // Create ReLU layer with a negative slope 0.2 using generic layer builder and connect it with scaleShift idx_t relu2Id = graph.addLayer({{scaleShiftId}}, Builder::Layer("ReLU", "relu2").setParameters({{"negative_slope", 0.2f}}).setOutputPorts({Port()}).setInputPorts({Port()})); // All branches in the graph should be ended by Output layer. Let's create Output layer idx_t outId = graph.addLayer({{relu2Id, 0}}, Builder::OutputLayer("out")); // Build original network InferenceEngine::INetwork::Ptr finalNetwork = graph.build(); std::shared_ptr<InferenceEngine::ICNNNetwork> cnnNetwork = InferenceEngine::Builder::convertToICNNNetwork(finalNetwork); // Modify network // Remove relu2 layer from the topology std::vector<InferenceEngine::Connection> connections = graph.getLayerConnections(relu2Id); for (const auto& connection : connections) { graph.disconnect(connection); } graph.removeLayer(relu2Id); // Connect scaleShift1 and out graph.connect({scaleShiftId}, {outId}); // Build network without relu2 InferenceEngine::INetwork::Ptr changedNetwork = graph.build(); }
EkberAziz/Data_Structures-CSE_222-
HW01_CourseAutomationSystem/AIKEBOER_AIZEZI_131044086_HW01/src/test/java/com/mycompany/aikeboer_aizezi_131044086_hw01/FileTest.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.mycompany.aikeboer_aizezi_131044086_hw01; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; /** * * @author <NAME> */ public class FileTest { public FileTest() { } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { } @After public void tearDown() { } /** * Test of setSize method, of class File. */ @Test public void testSetSize() { System.out.println("setSize"); int size = 0; File instance = new File(); instance.setSize(size); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of setType method, of class File. */ @Test public void testSetType() { System.out.println("setType"); String type = ""; File instance = new File(); instance.setType(type); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of getSize method, of class File. */ @Test public void testGetSize() { System.out.println("getSize"); File instance = new File(); int expResult = 0; int result = instance.getSize(); assertEquals(expResult, result); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of getType method, of class File. */ @Test public void testGetType() { System.out.println("getType"); File instance = new File(); String expResult = ""; String result = instance.getType(); assertEquals(expResult, result); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } /** * Test of toString method, of class File. */ @Test public void testToString() { System.out.println("toString"); File instance = new File(); String expResult = ""; String result = instance.toString(); assertEquals(expResult, result); // TODO review the generated test code and remove the default call to fail. fail("The test case is a prototype."); } }
GuoSuiming/mindspore
mindspore/nn/layer/combined.py
<filename>mindspore/nn/layer/combined.py # Copyright 2020 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Combined cells.""" from mindspore import nn from mindspore.ops.primitive import Primitive from mindspore._checkparam import Validator from .normalization import BatchNorm2d, BatchNorm1d from .activation import get_activation, LeakyReLU from ..cell import Cell __all__ = [ 'Conv2dBnAct', 'DenseBnAct' ] class Conv2dBnAct(Cell): r""" A combination of convolution, Batchnorm, and activation layer. This part is a more detailed overview of Conv2d operation. Args: in_channels (int): The number of input channel :math:`C_{in}`. out_channels (int): The number of output channel :math:`C_{out}`. kernel_size (Union[int, tuple]): The data type is int or a tuple of 2 integers. Specifies the height and width of the 2D convolution window. Single int means the value is for both height and width of the kernel. A tuple of 2 ints means the first value is for the height and the other is for the width of the kernel. stride (int): Specifies stride for all spatial dimensions with the same value. The value of stride must be greater than or equal to 1 and lower than any one of the height and width of the input. Default: 1. pad_mode (str): Specifies padding mode. The optional values are "same", "valid", "pad". Default: "same". padding (int): Implicit paddings on both sides of the input. Default: 0. dilation (int): Specifies the dilation rate to use for dilated convolution. If set to be :math:`k > 1`, there will be :math:`k - 1` pixels skipped for each sampling location. Its value must be greater than or equal to 1 and lower than any one of the height and width of the input. Default: 1. group (int): Splits filter into groups, `in_ channels` and `out_channels` must be divisible by the number of groups. Default: 1. has_bias (bool): Specifies whether the layer uses a bias vector. Default: False. weight_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the convolution kernel. It can be a Tensor, a string, an Initializer or a number. When a string is specified, values from 'TruncatedNormal', 'Normal', 'Uniform', 'HeUniform' and 'XavierUniform' distributions as well as constant 'One' and 'Zero' distributions are possible. Alias 'xavier_uniform', 'he_uniform', 'ones' and 'zeros' are acceptable. Uppercase and lowercase are both acceptable. Refer to the values of Initializer for more details. Default: 'normal'. bias_init (Union[Tensor, str, Initializer, numbers.Number]): Initializer for the bias vector. Possible Initializer and string are the same as 'weight_init'. Refer to the values of Initializer for more details. Default: 'zeros'. has_bn (bool): Specifies to used batchnorm or not. Default: False. momentum (float): Momentum for moving average for batchnorm, must be [0, 1]. Default:0.9 eps (float): Term added to the denominator to improve numerical stability for batchnorm, should be greater than 0. Default: 1e-5. activation (Union[str, Cell, Primitive]): Specifies activation type. The optional values are as following: 'softmax', 'logsoftmax', 'relu', 'relu6', 'tanh', 'gelu', 'sigmoid', 'prelu', 'leakyrelu', 'hswish', 'hsigmoid'. Default: None. alpha (float): Slope of the activation function at x < 0 for LeakyReLU. Default: 0.2. after_fake(bool): Determine whether there must be a fake quantization operation after Cond2dBnAct. Inputs: - **input** (Tensor) - Tensor of shape :math:`(N, C_{in}, H_{in}, W_{in})`. Outputs: Tensor of shape :math:`(N, C_{out}, H_{out}, W_{out})`. Supported Platforms: ``Ascend`` ``GPU`` Examples: >>> net = nn.Conv2dBnAct(120, 240, 4, has_bn=True, activation='relu') >>> input = Tensor(np.ones([1, 120, 1024, 640]), mindspore.float32) >>> result = net(input) >>> output = result.shape >>> print(output) (1, 240, 1024, 640) """ def __init__(self, in_channels, out_channels, kernel_size, stride=1, pad_mode='same', padding=0, dilation=1, group=1, has_bias=False, weight_init='normal', bias_init='zeros', has_bn=False, momentum=0.997, eps=1e-5, activation=None, alpha=0.2, after_fake=True): super(Conv2dBnAct, self).__init__() self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, pad_mode=pad_mode, padding=padding, dilation=dilation, group=group, has_bias=has_bias, weight_init=weight_init, bias_init=bias_init) self.has_bn = Validator.check_bool(has_bn, "has_bn") self.has_act = activation is not None self.after_fake = Validator.check_bool(after_fake, "after_fake") if has_bn: self.batchnorm = BatchNorm2d(out_channels, eps, momentum) if activation == "leakyrelu": self.activation = LeakyReLU(alpha) else: self.activation = get_activation(activation) if isinstance(activation, str) else activation if activation is not None and not isinstance(self.activation, (Cell, Primitive)): raise TypeError("The activation must be str or Cell or Primitive,"" but got {}.".format(activation)) def construct(self, x): x = self.conv(x) if self.has_bn: x = self.batchnorm(x) if self.has_act: x = self.activation(x) return x class DenseBnAct(Cell): r""" A combination of Dense, Batchnorm, and the activation layer. This part is a more detailed overview of Dense op. Args: in_channels (int): The number of channels in the input space. out_channels (int): The number of channels in the output space. weight_init (Union[Tensor, str, Initializer, numbers.Number]): The trainable weight_init parameter. The dtype is same as input. The values of str refer to the function `initializer`. Default: 'normal'. bias_init (Union[Tensor, str, Initializer, numbers.Number]): The trainable bias_init parameter. The dtype is same as input. The values of str refer to the function `initializer`. Default: 'zeros'. has_bias (bool): Specifies whether the layer uses a bias vector. Default: True. has_bn (bool): Specifies to use batchnorm or not. Default: False. momentum (float): Momentum for moving average for batchnorm, must be [0, 1]. Default:0.9 eps (float): Term added to the denominator to improve numerical stability for batchnorm, should be greater than 0. Default: 1e-5. activation (Union[str, Cell, Primitive]): Specifies activation type. The optional values are as following: 'softmax', 'logsoftmax', 'relu', 'relu6', 'tanh', 'gelu', 'sigmoid', 'prelu', 'leakyrelu', 'hswish', 'hsigmoid'. Default: None. alpha (float): Slope of the activation function at x < 0 for LeakyReLU. Default: 0.2. after_fake(bool): Determine whether there must be a fake quantization operation after DenseBnAct. Inputs: - **input** (Tensor) - Tensor of shape :math:`(N, in\_channels)`. Outputs: Tensor of shape :math:`(N, out\_channels)`. Supported Platforms: ``Ascend`` Examples: >>> net = nn.DenseBnAct(3, 4) >>> input = Tensor(np.random.randint(0, 255, [2, 3]), mindspore.float32) >>> result = net(input) >>> output = result.shape >>> print(output) (2, 4) """ def __init__(self, in_channels, out_channels, weight_init='normal', bias_init='zeros', has_bias=True, has_bn=False, momentum=0.9, eps=1e-5, activation=None, alpha=0.2, after_fake=True): super(DenseBnAct, self).__init__() self.dense = nn.Dense( in_channels, out_channels, weight_init, bias_init, has_bias) self.has_bn = Validator.check_bool(has_bn, "has_bn") self.has_act = activation is not None self.after_fake = Validator.check_bool(after_fake, "after_fake") if has_bn: self.batchnorm = BatchNorm1d(out_channels, eps, momentum) if activation == "leakyrelu": self.activation = LeakyReLU(alpha) else: self.activation = get_activation(activation) if isinstance(activation, str) else activation if activation is not None and not isinstance(self.activation, (Cell, Primitive)): raise TypeError("The activation must be str or Cell or Primitive,"" but got {}.".format(activation)) def construct(self, x): x = self.dense(x) if self.has_bn: x = self.batchnorm(x) if self.has_act: x = self.activation(x) return x
porenes-ledger/ledger-live-common
src/families/algorand/tokens.js
// @flow export const addPrefixToken = (tokenId: string) => `algorand/asa/${tokenId}`; export const extractTokenId = (tokenId: string) => { return tokenId.split("/")[2]; };
clayne/Containers
libraries/datastruct/hash/lookup.c
/* -------------------------------------------------------------------------- * Name: lookup.c * Purpose: Associative array implemented as a hash * ----------------------------------------------------------------------- */ #include <stdlib.h> #include "datastruct/hash.h" #include "impl.h" const void *hash_lookup(hash_t *h, const void *key) { hash__node_t **n; n = hash_lookup_node(h, key); return (*n != NULL) ? (*n)->item.value : h->default_value; }
camrun91/apollos-apps
packages/apollos-ui-kit/src/ActionCard/ActionCard.stories.js
import React from 'react'; import { storiesOf } from '@apollosproject/ui-storybook'; import CenteredView from '../CenteredView'; import BackgroundView from '../BackgroundView'; import Chip from '../Chip'; import Icon from '../Icon'; import { BodyText } from '../typography'; import ActionCard from './index'; storiesOf('ui-kit/ActionCard', module) .addDecorator((story) => ( <BackgroundView> {/* eslint-disable-next-line react-native/no-inline-styles */} <CenteredView style={{ alignItems: 'stretch' }}>{story()}</CenteredView> </BackgroundView> )) .add('default', () => ( <ActionCard> <BodyText> “Mauris non tempor quam, et lacinia sapien. Mauris accumsan eros eget libero posuere vulputate. Etiam elit elitbibendum.” </BodyText> </ActionCard> )) .add('icon + label', () => ( <ActionCard icon={'text'} label={'Key Idea'}> <BodyText> “Mauris non tempor quam, et lacinia sapien. Mauris accumsan eros eget libero posuere vulputate. Etiam elit elitbibendum.” </BodyText> </ActionCard> )) .add('action', () => ( <ActionCard action={ <Chip title={'Share'}> <Icon name="share" /> </Chip> } > <BodyText> “Mauris non tempor quam, et lacinia sapien. Mauris accumsan eros eget libero posuere vulputate. Etiam elit elitbibendum.” </BodyText> </ActionCard> ));
guinardpaul/MERN-MP-Comp-tences
client/src/components/UI/Table/TableItems.js
import React from 'react'; /** Not Used */ const tableItems = props => <td>{props.data}</td>; export default tableItems;
tech-advantage/edc-client-java
src/main/java/fr/techad/edc/client/internal/DocumentationManagerImpl.java
/* * Copyright (c) 2017. All rights reserved */ package fr.techad.edc.client.internal; import fr.techad.edc.client.DocumentationManager; import fr.techad.edc.client.io.EdcReader; import fr.techad.edc.client.model.ContextItem; import fr.techad.edc.client.model.InvalidUrlException; import fr.techad.edc.client.util.KeyUtil; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import java.io.IOException; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; /** * TECH ADVANTAGE * All right reserved * Created by cochon on 19/06/2017. */ public class DocumentationManagerImpl implements DocumentationManager { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentationManagerImpl.class); private EdcReader reader; private KeyUtil keyUtil; private Map<String, ContextItem> contexts; @Inject public DocumentationManagerImpl(EdcReader reader, KeyUtil keyBuilder) { this.reader = reader; this.keyUtil = keyBuilder; } @Override public ContextItem getContext(String mainKey, String subKey, String languageCode, Map<String, String> defaultLanguages) throws IOException, InvalidUrlException { LOGGER.debug("Get Context item with mainkey: {}, subKey: {}, languageCode:{}", mainKey, subKey, languageCode); loadContext(); ContextItem contextItem = contexts.get(keyUtil.getKey(mainKey, subKey, languageCode)); LOGGER.debug("Context item with mainkey: {}, subKey: {}, languageCode:{}", mainKey, subKey, languageCode); if (contextItem == null && StringUtils.isNotBlank(languageCode)) { LOGGER.debug("Context item was null, getting from defaultLanguages:{}", defaultLanguages); contextItem = this.findDefaultContextItem(mainKey, subKey, defaultLanguages); } LOGGER.debug("Returning context item {}", contextItem); return contextItem; } @Override public void forceReload() { LOGGER.debug("Force reload on next call"); contexts = null; } @Override public void loadContext() throws IOException, InvalidUrlException { if (contexts == null) { LOGGER.debug("No contexts defined, read it"); contexts = reader.readContext(); } } /** * Return the context item in the default language for the given keys. * Will find the publication from the key and subKey, identify its default language, * and then return the context item corresponding to that language. * * @param mainKey the mainKey * @param subKey the subKey * @param defaultLangCodes a map containing the publication id as key and default language code as value * @return the context item in the default language of the publication */ private ContextItem findDefaultContextItem(String mainKey, String subKey, Map<String, String> defaultLangCodes) { ContextItem defaultContext = null; // Find the item corresponding to the mainKey and subKey Set<ContextItem> presentItems = contexts.entrySet().stream() .filter(e -> keyUtil.containsKey(e.getKey(), mainKey, subKey)) .map(Map.Entry::getValue) .collect(Collectors.toSet()); if (!presentItems.isEmpty()) { String exportId = presentItems.stream().map(ContextItem::getPublicationId).findFirst().orElse(""); String defaultLang = defaultLangCodes.get(exportId); // Find the context item in the default language defaultContext = contexts.get(keyUtil.getKey(mainKey, subKey, defaultLang)); } return defaultContext; } }
wangsun1983/Obotcha
io/FileInputStream.cpp
/** * @file FileInputStream.cpp * @brief FileInputStream obtains input bytes from a file in a file system. * @details none * @mainpage none * @author sunli.wang * @email <EMAIL> * @version 0.0.1 * @date 2019-07-12 * @license none */ #include "FileInputStream.hpp" #include "FileNotFoundException.hpp" #include "Log.hpp" #include <iostream> namespace obotcha { _FileInputStream::_FileInputStream(File f) : _FileInputStream(f->getAbsolutePath()) {} _FileInputStream::_FileInputStream(const char *path) : _FileInputStream(createString(path)) {} _FileInputStream::_FileInputStream(String path) { mPath = createString(path); this->fd = -1; isFdImport = false; } _FileInputStream::_FileInputStream(int fd) { mPath = nullptr; this->fd = fd; isFdImport = true; } ByteArray _FileInputStream::read(int size) { ByteArray data = createByteArray(size); int length = ::read(fd, data->toValue(), data->size()); if (length <= 0) { return nullptr; } else if (length < data->size()) { data->quickShrink(length); } return data; } int _FileInputStream::seekTo(int index) { return lseek(fd, index, SEEK_SET); } long _FileInputStream::readTo(ByteArray buff, int pos, int length) { int len = (length == 0) ? buff->size() : length; return ::read(fd, buff->toValue() + pos, len); } long _FileInputStream::read(ByteArray data) { return ::read(fd, data->toValue(), data->size()); } long _FileInputStream::read(ByteArray data, int start) { return ::read(fd, &data->toValue()[start], data->size() - start); } ByteArray _FileInputStream::readAll() { struct stat stbuf; if ((fstat(fd, &stbuf) != 0) || (!S_ISREG(stbuf.st_mode))) { return nullptr; } return this->read(stbuf.st_size); } bool _FileInputStream::open() { if (fd >= 0) { return false; } fd = ::open(mPath->toChars(), O_RDONLY); return (fd >= 0); } void _FileInputStream::close() { if (fd >= 0) { ::close(fd); fd = -1; } } void _FileInputStream::reset() { if (fd >= 0) { lseek(fd, 0, SEEK_SET); } } _FileInputStream::~_FileInputStream() { //if fd is transfer by called function,no need to close. if(!isFdImport) { close(); } } } // namespace obotcha
lechium/iOS1351Headers
System/Library/Frameworks/PassKit.framework/PKExtensionVendorContext.h
/* * This header is generated by classdump-dyld 1.5 * on Friday, April 30, 2021 at 11:37:30 AM Mountain Standard Time * Operating System: Version 13.5.1 (Build 17F80) * Image Source: /System/Library/Frameworks/PassKit.framework/PassKit * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>. */ #import <PassKit/PKExtensionBaseContext.h> #import <libobjc.A.dylib/PKExtensionVendorContextProtocol.h> @class PKEntitlementWhitelist, NSString; @interface PKExtensionVendorContext : PKExtensionBaseContext <PKExtensionVendorContextProtocol> { PKEntitlementWhitelist* _whitelist; } @property (readonly) unsigned long long hash; @property (readonly) Class superclass; @property (copy,readonly) NSString * description; @property (copy,readonly) NSString * debugDescription; -(id)hostContext; -(void)handleHostApplicationWillResignActive:(BOOL)arg1 ; -(void)handleHostApplicationDidBecomeActive; -(void)authorizationDidRequestMerchantSessionCompleteWithSession:(id)arg1 error:(id)arg2 ; -(void)authorizationDidAuthorizePaymentCompleteWithResult:(id)arg1 ; -(void)authorizationDidAuthorizePurchaseCompleteWithStatus:(long long)arg1 ; -(void)authorizationDidAuthorizePeerPaymentQuoteCompleteWithResult:(id)arg1 ; -(void)authorizationDidAuthorizeDisbursementWithResult:(id)arg1 ; -(void)authorizationDidAuthorizeApplePayTrustSignatureCompleteWithResult:(id)arg1 ; -(void)authorizationDidUpdateAccountServicePaymentMethodCompleteWithUpdate:(id)arg1 signatureRequest:(id)arg2 ; -(void)authorizationDidSelectShippingMethodCompleteWithUpdate:(id)arg1 ; -(void)authorizationDidSelectShippingAddressCompleteWithUpdate:(id)arg1 ; -(void)authorizationDidSelectPaymentMethodCompleteWithUpdate:(id)arg1 ; -(void)handleHostApplicationDidCancel; -(void)handleDismissWithCompletion:(/*^block*/id)arg1 ; -(void)prepareWithPaymentRequest:(id)arg1 completion:(/*^block*/id)arg2 ; -(void)authorizationDidAuthorizePaymentCompleteWithStatus:(long long)arg1 ; -(void)authorizationDidSelectShippingMethodCompleteWithStatus:(long long)arg1 paymentSummaryItems:(id)arg2 ; -(void)authorizationDidSelectShippingAddressCompleteWithStatus:(long long)arg1 shippingMethods:(id)arg2 paymentSummaryItems:(id)arg3 ; -(void)authorizationDidSelectPaymentMethodCompleteWithPaymentSummaryItems:(id)arg1 ; -(id)entitlementWhitelist; -(id)hostContextWithErrorHandler:(/*^block*/id)arg1 ; @end
DonaldPeat/ether-campaign
node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js
<reponame>DonaldPeat/ether-campaign<filename>node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js /* MIT License http://www.opensource.org/licenses/mit-license.php Author <NAME> @sokra */ "use strict"; class AggressiveMergingPlugin { constructor(options) { if(options !== undefined && typeof options !== "object" || Array.isArray(options)) { throw new Error("Argument should be an options object. To use defaults, pass in nothing.\nFor more info on options, see https://webpack.js.org/plugins/"); } this.options = options || {}; } apply(compiler) { const options = this.options; const minSizeReduce = options.minSizeReduce || 1.5; function getParentsWeight(chunk) { return chunk.parents.map((p) => { return p.isInitial() ? options.entryChunkMultiplicator || 10 : 1; }).reduce((a, b) => { return a + b; }, 0); } compiler.plugin("this-compilation", (compilation) => { compilation.plugin("optimize-chunks-advanced", (chunks) => { let combinations = []; chunks.forEach((a, idx) => { if(a.isInitial()) return; for(let i = 0; i < idx; i++) { const b = chunks[i]; if(b.isInitial()) continue; combinations.push({ a, b, improvement: undefined }); } }); combinations.forEach((pair) => { const a = pair.b.size({ chunkOverhead: 0 }); const b = pair.a.size({ chunkOverhead: 0 }); const ab = pair.b.integratedSize(pair.a, { chunkOverhead: 0 }); let newSize; if(ab === false) { pair.improvement = false; return; } else if(options.moveToParents) { const aOnly = ab - b; const bOnly = ab - a; const common = a + b - ab; newSize = common + getParentsWeight(pair.b) * aOnly + getParentsWeight(pair.a) * bOnly; } else { newSize = ab; } pair.improvement = (a + b) / newSize; }); combinations = combinations.filter((pair) => { return pair.improvement !== false; }); combinations.sort((a, b) => { return b.improvement - a.improvement; }); const pair = combinations[0]; if(!pair) return; if(pair.improvement < minSizeReduce) return; if(options.moveToParents) { const commonModules = pair.b.modules.filter((m) => { return pair.a.modules.indexOf(m) >= 0; }); const aOnlyModules = pair.b.modules.filter((m) => { return commonModules.indexOf(m) < 0; }); const bOnlyModules = pair.a.modules.filter((m) => { return commonModules.indexOf(m) < 0; }); aOnlyModules.forEach((m) => { pair.b.removeModule(m); m.removeChunk(pair.b); pair.b.parents.forEach((c) => { c.addModule(m); m.addChunk(c); }); }); bOnlyModules.forEach((m) => { pair.a.removeModule(m); m.removeChunk(pair.a); pair.a.parents.forEach((c) => { c.addModule(m); m.addChunk(c); }); }); } if(pair.b.integrate(pair.a, "aggressive-merge")) { chunks.splice(chunks.indexOf(pair.a), 1); return true; } }); }); } } module.exports = AggressiveMergingPlugin;
BantorSchwanzVor/plotscanner-leak
net/minecraft/entity/ai/EntityAIRestrictOpenDoor.java
package net.minecraft.entity.ai; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityCreature; import net.minecraft.pathfinding.PathNavigateGround; import net.minecraft.util.math.BlockPos; import net.minecraft.village.Village; import net.minecraft.village.VillageDoorInfo; public class EntityAIRestrictOpenDoor extends EntityAIBase { private final EntityCreature entityObj; private VillageDoorInfo frontDoor; public EntityAIRestrictOpenDoor(EntityCreature creatureIn) { this.entityObj = creatureIn; if (!(creatureIn.getNavigator() instanceof PathNavigateGround)) throw new IllegalArgumentException("Unsupported mob type for RestrictOpenDoorGoal"); } public boolean shouldExecute() { if (this.entityObj.world.isDaytime()) return false; BlockPos blockpos = new BlockPos((Entity)this.entityObj); Village village = this.entityObj.world.getVillageCollection().getNearestVillage(blockpos, 16); if (village == null) return false; this.frontDoor = village.getNearestDoor(blockpos); if (this.frontDoor == null) return false; return (this.frontDoor.getDistanceToInsideBlockSq(blockpos) < 2.25D); } public boolean continueExecuting() { if (this.entityObj.world.isDaytime()) return false; return (!this.frontDoor.getIsDetachedFromVillageFlag() && this.frontDoor.isInsideSide(new BlockPos((Entity)this.entityObj))); } public void startExecuting() { ((PathNavigateGround)this.entityObj.getNavigator()).setBreakDoors(false); ((PathNavigateGround)this.entityObj.getNavigator()).setEnterDoors(false); } public void resetTask() { ((PathNavigateGround)this.entityObj.getNavigator()).setBreakDoors(true); ((PathNavigateGround)this.entityObj.getNavigator()).setEnterDoors(true); this.frontDoor = null; } public void updateTask() { this.frontDoor.incrementDoorOpeningRestrictionCounter(); } } /* Location: C:\Users\BSV\AppData\Local\Temp\Rar$DRa6216.20396\Preview\Preview.jar!\net\minecraft\entity\ai\EntityAIRestrictOpenDoor.class * Java compiler version: 8 (52.0) * JD-Core Version: 1.1.3 */
freedomloveme/Aspose.Words-for-Java
ApiExamples/src/main/java/ExHtmlLoadOptions.java
<reponame>freedomloveme/Aspose.Words-for-Java<gh_stars>0 ////////////////////////////////////////////////////////////////////////// // Copyright (c) 2001-2018 Aspose Pty Ltd. All Rights Reserved. // // This file is part of Aspose.Words. The source code in this file // is only intended as a supplement to the documentation, and is provided // "as is", without warranty of any kind, either expressed or implied. ////////////////////////////////////////////////////////////////////////// import org.testng.annotations.Test; import com.aspose.words.HtmlLoadOptions; import com.aspose.words.Document; import org.testng.Assert; import com.aspose.words.HtmlControlType; import com.aspose.words.NodeCollection; import com.aspose.words.NodeType; import com.aspose.words.StructuredDocumentTag; import com.aspose.words.FormField; import java.io.ByteArrayInputStream; public class ExHtmlLoadOptions extends ApiExampleBase { @Test public void supportVml() throws Exception { //ExStart //ExFor:HtmlLoadOptions.SupportVml //ExSummary:Shows how to parse HTML document with conditional comments like "<!--[if gte vml 1]>" and "<![if !vml]>" HtmlLoadOptions loadOptions = new HtmlLoadOptions(); //If value is true, then we parse "<!--[if gte vml 1]>", else parse "<![if !vml]>" loadOptions.setSupportVml(true); //Wait for a response, when loading external resources loadOptions.setWebRequestTimeout(1000); Document doc = new Document(getMyDir() + "Shape.VmlAndDml.htm", loadOptions); doc.save(getArtifactsDir() + "Shape.VmlAndDml.docx"); //ExEnd } @Test public void webRequestTimeoutDefaultValue() { HtmlLoadOptions loadOptions = new HtmlLoadOptions(); Assert.assertEquals(loadOptions.getWebRequestTimeout(), 100000); } @Test public void getSelectAsSdt() throws Exception { //ExStart //ExFor:HtmlLoadOptions.PreferredControlType //ExSummary:Shows how to set preffered type of document nodes that will represent imported <input> and <select> elements. final String HTML = "\r\n <html>\r\n <select name='ComboBox' size='1'>\r\n <option value='val1'>item1</option>\r\n <option value='val2'></option> \r\n </select>\r\n </html>\r\n "; HtmlLoadOptions htmlLoadOptions = new HtmlLoadOptions(); htmlLoadOptions.setPreferredControlType(HtmlControlType.STRUCTURED_DOCUMENT_TAG); Document doc = new Document(new ByteArrayInputStream(HTML.getBytes("UTF-8")), htmlLoadOptions); NodeCollection nodes = doc.getChildNodes(NodeType.STRUCTURED_DOCUMENT_TAG, true); StructuredDocumentTag tag = (StructuredDocumentTag)nodes.get(0); //ExEnd Assert.assertEquals(tag.getListItems().getCount(), 2); Assert.assertEquals(tag.getListItems().get(0).getValue(), "val1"); Assert.assertEquals(tag.getListItems().get(1).getValue(), "val2"); } @Test public void getInputAsFormField() throws Exception { final String HTML = "\r\n <html>\r\n <input type='text' value='Input value text' />\r\n </html>\r\n "; // By default "HtmlLoadOptions.PreferredControlType" value is "HtmlControlType.FormField" // So, we do not set this value HtmlLoadOptions htmlLoadOptions = new HtmlLoadOptions(); Document doc = new Document(new ByteArrayInputStream(HTML.getBytes("UTF-8")), htmlLoadOptions); NodeCollection nodes = doc.getChildNodes(NodeType.FORM_FIELD, true); Assert.assertEquals(nodes.getCount(), 1); FormField formField = (FormField)nodes.get(0); Assert.assertEquals(formField.getResult(), "Input value text"); } }
lechium/iOS1351Headers
usr/libexec/installcoordinationd/IXClientProtocolInterface.h
<filename>usr/libexec/installcoordinationd/IXClientProtocolInterface.h // // Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20). // // Copyright (C) 1997-2019 <NAME>. // #import <objc/NSObject.h> @interface IXClientProtocolInterface : NSObject { } + (void)configureInterface:(id)arg1; // IMP=0x000000010005a460 + (id)interfaceProtocol; // IMP=0x000000010005a454 + (id)interface; // IMP=0x000000010005a3d4 + (id)new; // IMP=0x000000010005a3c8 - (id)init; // IMP=0x000000010005a3bc @end
sphereio/jvm-sdk-deploy-tests
commercetools-models/src/test/java/io/sphere/sdk/models/AddressTest.java
package io.sphere.sdk.models; import com.neovisionaries.i18n.CountryCode; import org.junit.Test; import static org.assertj.core.api.Assertions.*; public class AddressTest { @Test public void equalsIgnoreId() { final Address addressWithoutId = Address.of(CountryCode.DE); final Address addressWithId = addressWithoutId.withId("foo"); assertThat(addressWithoutId) .isNotEqualTo(addressWithId) .matches(address -> address.equalsIgnoreId(addressWithId)); } @Test public void fax() { final String fax = "030000000"; final Address address = Address.of(CountryCode.DE).withFax(fax); assertThat(address.getFax()).isEqualTo(fax); } @Test public void externalId() { final String externalId = "030000000"; final Address address = Address.of(CountryCode.DE).withExternalId(externalId); assertThat(address.getExternalId()).isEqualTo(externalId); } }
michalstutzmann/scala-util
src/main/scala/com/github/mwegrz/scalautil/kafka/KafkaClient.scala
<reponame>michalstutzmann/scala-util package com.github.mwegrz.scalautil.kafka import akka.{ Done, NotUsed } import akka.actor.ActorSystem import akka.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffset } import akka.kafka.scaladsl.{ Consumer, Producer } import akka.kafka.{ ConsumerSettings, ProducerMessage, ProducerSettings, Subscriptions } import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ BidiFlow, Flow, Keep, Sink, Source } import com.github.mwegrz.scalautil.akka.kafka.scaladsl.{ KafkaCommitableFlow, KafkaFlow } import com.typesafe.config.Config import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.clients.producer.ProducerRecord import org.apache.kafka.common.serialization.{ ByteArrayDeserializer, ByteArraySerializer } import com.github.mwegrz.scalautil.ConfigOps import scala.concurrent.Future object KafkaClient { def apply( config: Config )(implicit actorSystem: ActorSystem, actorMaterializer: ActorMaterializer): KafkaClient = new DefaultKafkaClient(config) } trait KafkaClient { def source[A](topic: String)(fromBinary: (Array[Byte], Array[Byte]) => A): Source[A, NotUsed] def committableSource[A](topic: String)( fromBinary: (Array[Byte], Array[Byte]) => A ): Source[(A, CommittableOffset), NotUsed] def sink[A](topic: String)(toBinary: A => (Array[Byte], Array[Byte])): Sink[A, Future[Done]] def committableSink[A](topic: String)( toBinary: A => (Array[Byte], Array[Byte]) ): Sink[(A, CommittableOffset), Future[Done]] def flow[A, B](inTopic: String, outTopics: Set[String])( toBinary: A => (Array[Byte], Array[Byte]), fromBinary: (Array[Byte], Array[Byte]) => B ): Flow[A, B, NotUsed] def committableFlow[A, B](inTopic: String, outTopics: Set[String])( toBinary: A => (Array[Byte], Array[Byte]), fromBinary: (Array[Byte], Array[Byte]) => B ): Flow[(A, CommittableOffset), (B, CommittableOffset), NotUsed] } class DefaultKafkaClient private[kafka] (config: Config)(implicit actorSystem: ActorSystem, actorMaterializer: ActorMaterializer ) extends KafkaClient { private implicit val producerSettings: ProducerSettings[Array[Byte], Array[Byte]] = ProducerSettings( config.getConfig("producer").withReferenceDefaults("akka.kafka.producer"), new ByteArraySerializer, new ByteArraySerializer ) private implicit val consumerSettings: ConsumerSettings[Array[Byte], Array[Byte]] = ConsumerSettings( config.getConfig("consumer").withReferenceDefaults("akka.kafka.consumer"), new ByteArrayDeserializer, new ByteArrayDeserializer ) override def source[A]( topic: String )(fromBinary: (Array[Byte], Array[Byte]) => A): Source[A, NotUsed] = Consumer .plainSource(consumerSettings, Subscriptions.topics(topic)) .map { r => fromBinary(r.key(), r.value()) } .mapMaterializedValue(_ => NotUsed) def committableSource[A]( topic: String )(fromBinary: (Array[Byte], Array[Byte]) => A): Source[(A, CommittableOffset), NotUsed] = Consumer .committableSource(consumerSettings, Subscriptions.topics(topic)) .map { m => val b = fromBinary(m.record.key(), m.record.value()) (b, m.committableOffset) } .mapMaterializedValue(_ => NotUsed) override def sink[A]( topic: String )(toBinary: A => (Array[Byte], Array[Byte])): Sink[A, Future[Done]] = Flow[A] .map { a => val (key, value) = toBinary(a) new ProducerRecord[Array[Byte], Array[Byte]]( topic, key, value ) } .toMat(Producer.plainSink(producerSettings))(Keep.right) def committableSink[A]( topic: String )(toBinary: A => (Array[Byte], Array[Byte])): Sink[(A, CommittableOffset), Future[Done]] = Flow[(A, CommittableOffset)] .map { case (a, offset) => val (key, value) = toBinary(a) ProducerMessage.Message( new ProducerRecord[Array[Byte], Array[Byte]]( topic, key, value ), offset ) } .toMat(Producer.commitableSink(producerSettings))(Keep.right) override def flow[A, B](inTopic: String, outTopics: Set[String])( toBinary: A => (Array[Byte], Array[Byte]), fromBinary: (Array[Byte], Array[Byte]) => B ): Flow[A, B, NotUsed] = { val kafkaFlow = KafkaFlow(producerSettings, consumerSettings, Subscriptions.topics(outTopics)) val downlink = Flow[A].map { a => val (key, value) = toBinary(a) new ProducerRecord[Array[Byte], Array[Byte]]( inTopic, key, value ) } val uplink: Flow[ConsumerRecord[Array[Byte], Array[Byte]], B, NotUsed] = Flow[ConsumerRecord[Array[Byte], Array[Byte]]].map { r => val b = fromBinary(r.key(), r.value()) b } val bidiFlow = BidiFlow.fromFlows(downlink, uplink) bidiFlow.joinMat(kafkaFlow)(Keep.left) } override def committableFlow[A, B](inTopic: String, outTopics: Set[String])( toBinary: A => (Array[Byte], Array[Byte]), fromBinary: (Array[Byte], Array[Byte]) => B ): Flow[(A, CommittableOffset), (B, CommittableOffset), NotUsed] = { val kafkaFlow = KafkaCommitableFlow(producerSettings, consumerSettings, Subscriptions.topics(outTopics)) val downlink = Flow[(A, CommittableOffset)].map { case (a, offset) => val (key, value) = toBinary(a) ProducerMessage.Message( new ProducerRecord[Array[Byte], Array[Byte]]( inTopic, key, value ), offset ) } val uplink: Flow[CommittableMessage[Array[Byte], Array[Byte]], (B, CommittableOffset), NotUsed] = Flow[CommittableMessage[Array[Byte], Array[Byte]]].map { m => val b = fromBinary(m.record.key(), m.record.value()) (b, m.committableOffset) } val bidiFlow = BidiFlow.fromFlows(downlink, uplink) bidiFlow.joinMat(kafkaFlow)(Keep.left) } }
dev-nm/CommunityServer
web/studio/ASC.Web.Studio/js/asc/core/clipboard.js
<filename>web/studio/ASC.Web.Studio/js/asc/core/clipboard.js /* * * (c) Copyright Ascensio System Limited 2010-2020 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ window.ASC.Clipboard = (function () { var isInit = false; var enable = false; var init = function () { if (isInit === false) { isInit = true; } ASC.Clipboard.enable = (typeof Clipboard != "undefined" && Clipboard.isSupported()); }; var create = function (text, buttonId, options) { if (!ASC.Clipboard.enable) { return null; } var opt = { onComplete: null, textareaId: null, }; jq.extend(opt, options); if (opt.textareaId) { var cfg = { target: function () { return jq("#" + opt.textareaId)[0]; } }; } else { cfg = { text: function () { return text; } }; } var clip = new Clipboard("#" + buttonId, cfg); if (opt.onComplete) { clip.on("success", opt.onComplete); } return clip; }; var destroy = function (clip) { if (clip) { clip.destroy(); } return null; }; return { init: init, enable: enable, create: create, destroy: destroy, }; })(); (function ($) { $(function () { ASC.Clipboard.init(); }); })(jQuery);
alexbibov/ra_framework
ra/src/ray_generator_with_output_buffer.h
<gh_stars>1-10 #ifndef RA_RAY_GENERATOR_WITH_OUTPUT_BUFFER_H #define RA_RAY_GENERATOR_WITH_OUTPUT_BUFFER_H #include "ray_generator.h" #include "ray_casters/ra_ray_casters_fwd.h" namespace ra { class RaRayGeneratorWithOutputBuffer : public RaRayGenerator { public: RaRayGeneratorWithOutputBuffer(RaProgram const& ray_generation_shader, RaAbstractBuffer const& output_buffer, std::string const& output_buffer_binding_name, uint32_t num_rays_x, uint32_t num_rays_y = 1U, uint32_t num_rays_z = 1U, uint32_t entry_point_index = 0U); RaRayGeneratorWithOutputBuffer(RaProgram const& ray_generation_shader, RaMissShaderAssembly const& miss_shader_assembly, RaAbstractBuffer const& output_buffer, std::string const& output_buffer_binding_name, uint32_t num_rays_x, uint32_t num_rays_y = 1U, uint32_t num_rays_z = 1U, uint32_t entry_point_index = 0U); RaAbstractBuffer const& outputBuffer() const override; private: RaAbstractBuffer m_output_buffer; }; } #endif
ihmcrobotics/ihmc-open-robotics-software
ihmc-trajectory-optimization/src/main/java/us/ihmc/trajectoryOptimization/DiscreteHybridDynamics.java
package us.ihmc.trajectoryOptimization; import org.ejml.data.DMatrixRMaj; public interface DiscreteHybridDynamics<E extends Enum> { void setTimeStepSize(double deltaT); int getStateVectorSize(); int getControlVectorSize(); int getConstantVectorSize(); /** f */ void getNextState(E hybridState, DMatrixRMaj currentState, DMatrixRMaj currentControl, DMatrixRMaj constants, DMatrixRMaj matrixToPack); /** f_x */ void getDynamicsStateGradient(E hybridState, DMatrixRMaj currentState, DMatrixRMaj currentControl, DMatrixRMaj constants, DMatrixRMaj matrixToPack); /** f_u */ void getDynamicsControlGradient(E hybridState, DMatrixRMaj currentState, DMatrixRMaj currentControl, DMatrixRMaj constants, DMatrixRMaj matrixToPack); /** f_xx */ void getDynamicsStateHessian(E hybridState, int stateVariable, DMatrixRMaj currentState, DMatrixRMaj currentControl, DMatrixRMaj constants, DMatrixRMaj matrixToPack); /** f_uu */ void getDynamicsControlHessian(E hybridState, int controlVariable, DMatrixRMaj currentState, DMatrixRMaj currentControl, DMatrixRMaj constants, DMatrixRMaj matrixToPack); /** f_ux */ void getDynamicsStateGradientOfControlGradient(E hybridState, int stateVariable, DMatrixRMaj currentState, DMatrixRMaj currentControl, DMatrixRMaj constants, DMatrixRMaj matrixToPack); /** f_xu */ void getDynamicsControlGradientOfStateGradient(E hybridState, int controlVariable, DMatrixRMaj currentState, DMatrixRMaj currentControl, DMatrixRMaj constants, DMatrixRMaj matrixToPack); void getContinuousAMatrix(DMatrixRMaj A); void getContinuousBMatrix(DMatrixRMaj A); }
webos21/xi
java/jcl/src/java/java/util/concurrent/ConcurrentLinkedDeque.java
<filename>java/jcl/src/java/java/util/concurrent/ConcurrentLinkedDeque.java /* * Written by <NAME> and <NAME> with assistance from members of * JCP JSR-166 Expert Group and released to the public domain, as explained * at http://creativecommons.org/licenses/publicdomain */ package java.util.concurrent; import java.util.AbstractCollection; import java.util.ArrayList; import java.util.Collection; import java.util.ConcurrentModificationException; import java.util.Deque; import java.util.Iterator; import java.util.NoSuchElementException; /** * A concurrent linked-list implementation of a {@link Deque} (double-ended * queue). Concurrent insertion, removal, and access operations execute safely * across multiple threads. Iterators are <i>weakly consistent</i>, returning * elements reflecting the state of the deque at some point at or since the * creation of the iterator. They do <em>not</em> throw * {@link ConcurrentModificationException}, and may proceed concurrently with * other operations. * * <p> * This class and its iterators implement all of the <em>optional</em> methods * of the {@link Collection} and {@link Iterator} interfaces. Like most other * concurrent collection implementations, this class does not permit the use of * {@code null} elements. because some null arguments and return values cannot * be reliably distinguished from the absence of elements. Arbitrarily, the * {@link Collection#remove} method is mapped to {@code removeFirstOccurrence}, * and {@link Collection#add} is mapped to {@code addLast}. * * <p> * Beware that, unlike in most collections, the {@code size} method is * <em>NOT</em> a constant-time operation. Because of the asynchronous nature of * these deques, determining the current number of elements requires a traversal * of the elements. * * <p> * This class is {@code Serializable}, but relies on default serialization * mechanisms. Usually, it is a better idea for any serializable class using a * {@code ConcurrentLinkedDeque} to instead serialize a snapshot of the elements * obtained by method {@code toArray}. * * @hide * * @author Doug Lea * @author <NAME> * @param <E> * the type of elements held in this collection */ public class ConcurrentLinkedDeque<E> extends AbstractCollection<E> implements Deque<E>, java.io.Serializable { /* * This is an implementation of a concurrent lock-free deque supporting * interior removes but not interior insertions, as required to fully * support the Deque interface. * * We extend the techniques developed for ConcurrentLinkedQueue and * LinkedTransferQueue (see the internal docs for those classes). * * At any time, there is precisely one "first" active node with a null prev * pointer. Similarly there is one "last" active node with a null next * pointer. New nodes are simply enqueued by null-CASing. * * A node p is considered "active" if it either contains an element, or is * an end node and neither next nor prev pointers are self-links: * * p.item != null || (p.prev == null && p.next != p) || (p.next == null && * p.prev != p) * * The head and tail pointers are only approximations to the start and end * of the deque. The first node can always be found by following prev * pointers from head; likewise for tail. However, head and tail may be * pointing at deleted nodes that have been unlinked and so may not be * reachable from any live node. * * There are 3 levels of node deletion: - logical deletion atomically * removes the element - "unlinking" makes a deleted node unreachable from * active nodes, and thus eventually reclaimable by GC - "gc-unlinking" * further does the reverse of making active nodes unreachable from deleted * nodes, making it easier for the GC to reclaim future deleted nodes * * TODO: find a better name for "gc-unlinked" * * Logical deletion of a node simply involves CASing its element to null. * Physical deletion is merely an optimization (albeit a critical one), and * can be performed at our convenience. At any time, the set of * non-logically-deleted nodes maintained by prev and next links are * identical, that is the live elements found via next links from the first * node is equal to the elements found via prev links from the last node. * However, this is not true for nodes that have already been logically * deleted - such nodes may only be reachable in one direction. * * When a node is dequeued at either end, e.g. via poll(), we would like to * break any references from the node to live nodes, to stop old garbage * from causing retention of new garbage with a generational or conservative * GC. We develop further the self-linking trick that was very effective in * other concurrent collection classes. The idea is to replace prev and next * pointers to active nodes with special values that are interpreted to mean * off-the-list-at-one-end. These are approximations, but good enough to * preserve the properties we want in our traversals, e.g. we guarantee that * a traversal will never hit the same element twice, but we don't guarantee * whether a traversal that runs out of elements will be able to see more * elements later after more elements are added at that end. Doing * gc-unlinking safely is particularly tricky, since any node can be in use * indefinitely (for example by an iterator). We must make sure that the * nodes pointed at by head/tail do not get gc-unlinked, since head/tail are * needed to get "back on track" by other nodes that are gc-unlinked. * gc-unlinking accounts for much of the implementation complexity. * * Since neither unlinking nor gc-unlinking are necessary for correctness, * there are many implementation choices regarding frequency (eagerness) of * these operations. Since volatile reads are likely to be much cheaper than * CASes, saving CASes by unlinking multiple adjacent nodes at a time may be * a win. gc-unlinking can be performed rarely and still be effective, since * it is most important that long chains of deleted nodes are occasionally * broken. * * The actual representation we use is that p.next == p means to goto the * first node, and p.next == null && p.prev == p means that the iteration is * at an end and that p is a (final static) dummy node, NEXT_TERMINATOR, and * not the last active node. Finishing the iteration when encountering such * a TERMINATOR is good enough for read-only traversals. When the last * active node is desired, for example when enqueueing, goto tail and * continue traversal. * * The implementation is completely directionally symmetrical, except that * most public methods that iterate through the list follow next pointers * ("forward" direction). * * There is one desirable property we would like to have, but don't: it is * possible, when an addFirst(A) is racing with pollFirst() removing B, for * an iterating observer to see A B C and subsequently see A C, even though * no interior removes are ever performed. I believe this wart can only be * removed at significant runtime cost. * * Empirically, microbenchmarks suggest that this class adds about 40% * overhead relative to ConcurrentLinkedQueue, which feels as good as we can * hope for. */ /** * A node from which the first node on list (that is, the unique node with * node.prev == null) can be reached in O(1) time. Invariants: - the first * node is always O(1) reachable from head via prev links - all live nodes * are reachable from the first node via succ() - head != null - (tmp = * head).next != tmp || tmp != head Non-invariants: - head.item may or may * not be null - head may not be reachable from the first or last node, or * from tail */ private transient volatile Node<E> head = new Node<E>(null); private final static Node<Object> PREV_TERMINATOR, NEXT_TERMINATOR; static { PREV_TERMINATOR = new Node<Object>(null); PREV_TERMINATOR.next = PREV_TERMINATOR; NEXT_TERMINATOR = new Node<Object>(null); NEXT_TERMINATOR.prev = NEXT_TERMINATOR; } @SuppressWarnings("unchecked") Node<E> prevTerminator() { return (Node<E>) PREV_TERMINATOR; } @SuppressWarnings("unchecked") Node<E> nextTerminator() { return (Node<E>) NEXT_TERMINATOR; } /** * A node from which the last node on list (that is, the unique node with * node.next == null) can be reached in O(1) time. Invariants: - the last * node is always O(1) reachable from tail via next links - all live nodes * are reachable from the last node via pred() - tail != null * Non-invariants: - tail.item may or may not be null - tail may not be * reachable from the first or last node, or from head */ private transient volatile Node<E> tail = head; static final class Node<E> { volatile Node<E> prev; volatile E item; volatile Node<E> next; Node(E item) { // Piggyback on imminent casNext() or casPrev() lazySetItem(item); } boolean casItem(E cmp, E val) { return UNSAFE.compareAndSwapObject(this, itemOffset, cmp, val); } void lazySetItem(E val) { UNSAFE.putOrderedObject(this, itemOffset, val); } void lazySetNext(Node<E> val) { UNSAFE.putOrderedObject(this, nextOffset, val); } boolean casNext(Node<E> cmp, Node<E> val) { return UNSAFE.compareAndSwapObject(this, nextOffset, cmp, val); } void lazySetPrev(Node<E> val) { UNSAFE.putOrderedObject(this, prevOffset, val); } boolean casPrev(Node<E> cmp, Node<E> val) { return UNSAFE.compareAndSwapObject(this, prevOffset, cmp, val); } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE = sun.misc.Unsafe .getUnsafe(); private static final long prevOffset = objectFieldOffset(UNSAFE, "prev", Node.class); private static final long itemOffset = objectFieldOffset(UNSAFE, "item", Node.class); private static final long nextOffset = objectFieldOffset(UNSAFE, "next", Node.class); } /** * Links e as first element. */ private void linkFirst(E e) { checkNotNull(e); final Node<E> newNode = new Node<E>(e); retry: for (;;) { for (Node<E> h = head, p = h;;) { Node<E> q = p.prev; if (q == null) { if (p.next == p) continue retry; newNode.lazySetNext(p); // CAS piggyback if (p.casPrev(null, newNode)) { if (p != h) // hop two nodes at a time casHead(h, newNode); return; } else { p = p.prev; // lost CAS race to another thread } } else if (p == q) continue retry; else p = q; } } } /** * Links e as last element. */ private void linkLast(E e) { checkNotNull(e); final Node<E> newNode = new Node<E>(e); retry: for (;;) { for (Node<E> t = tail, p = t;;) { Node<E> q = p.next; if (q == null) { if (p.prev == p) continue retry; newNode.lazySetPrev(p); // CAS piggyback if (p.casNext(null, newNode)) { if (p != t) // hop two nodes at a time casTail(t, newNode); return; } else { p = p.next; // lost CAS race to another thread } } else if (p == q) continue retry; else p = q; } } } // TODO: Is there a better cheap way of performing some cleanup // operation "occasionally"? static class Count { int count = 0; } private final static ThreadLocal<Count> tlc = new ThreadLocal<Count>() { protected Count initialValue() { return new Count(); } }; private static boolean shouldGCUnlinkOccasionally() { return (tlc.get().count++ & 0x3) == 0; } private final static int HOPS = 2; /** * Unlinks non-null node x. */ void unlink(Node<E> x) { assert x != null; assert x.item == null; assert x != PREV_TERMINATOR; assert x != NEXT_TERMINATOR; final Node<E> prev = x.prev; final Node<E> next = x.next; if (prev == null) { unlinkFirst(x, next); } else if (next == null) { unlinkLast(x, prev); } else { // Unlink interior node. // // This is the common case, since a series of polls at the // same end will be "interior" removes, except perhaps for // the first one, since end nodes cannot be physically removed. // // At any time, all active nodes are mutually reachable by // following a sequence of either next or prev pointers. // // Our strategy is to find the unique active predecessor // and successor of x. Try to fix up their links so that // they point to each other, leaving x unreachable from // active nodes. If successful, and if x has no live // predecessor/successor, we additionally try to leave // active nodes unreachable from x, by rechecking that // the status of predecessor and successor are unchanged // and ensuring that x is not reachable from tail/head, // before setting x's prev/next links to their logical // approximate replacements, self/TERMINATOR. Node<E> activePred, activeSucc; boolean isFirst, isLast; int hops = 1; // Find active predecessor for (Node<E> p = prev;; ++hops) { if (p.item != null) { activePred = p; isFirst = false; break; } Node<E> q = p.prev; if (q == null) { if (p == p.next) return; activePred = p; isFirst = true; break; } else if (p == q) return; else p = q; } // Find active successor for (Node<E> p = next;; ++hops) { if (p.item != null) { activeSucc = p; isLast = false; break; } Node<E> q = p.next; if (q == null) { if (p == p.prev) return; activeSucc = p; isLast = true; break; } else if (p == q) return; else p = q; } // TODO: better HOP heuristics if (hops < HOPS // always squeeze out interior deleted nodes && (isFirst | isLast)) return; // Squeeze out deleted nodes between activePred and // activeSucc, including x. skipDeletedSuccessors(activePred); skipDeletedPredecessors(activeSucc); // Try to gc-unlink, if possible if ((isFirst | isLast) && // shouldGCUnlinkOccasionally() && // Recheck expected state of predecessor and successor (activePred.next == activeSucc) && (activeSucc.prev == activePred) && (isFirst ? activePred.prev == null : activePred.item != null) && (isLast ? activeSucc.next == null : activeSucc.item != null)) { // Ensure x is not reachable from head or tail updateHead(); updateTail(); x.lazySetPrev(isFirst ? prevTerminator() : x); x.lazySetNext(isLast ? nextTerminator() : x); } } } /** * Unlinks non-null first node. */ private void unlinkFirst(Node<E> first, Node<E> next) { assert first != null && next != null && first.item == null; Node<E> o = null, p = next; for (int hops = 0;; ++hops) { Node<E> q; if (p.item != null || (q = p.next) == null) { if (hops >= HOPS) { if (p == p.prev) return; if (first.casNext(next, p)) { skipDeletedPredecessors(p); if (// shouldGCUnlinkOccasionally() && first.prev == null && (p.next == null || p.item != null) && p.prev == first) { updateHead(); updateTail(); o.lazySetNext(o); o.lazySetPrev(prevTerminator()); } } } return; } else if (p == q) return; else { o = p; p = q; } } } /** * Unlinks non-null last node. */ private void unlinkLast(Node<E> last, Node<E> prev) { assert last != null && prev != null && last.item == null; Node<E> o = null, p = prev; for (int hops = 0;; ++hops) { Node<E> q; if (p.item != null || (q = p.prev) == null) { if (hops >= HOPS) { if (p == p.next) return; if (last.casPrev(prev, p)) { skipDeletedSuccessors(p); if (// shouldGCUnlinkOccasionally() && last.next == null && (p.prev == null || p.item != null) && p.next == last) { updateHead(); updateTail(); o.lazySetPrev(o); o.lazySetNext(nextTerminator()); } } } return; } else if (p == q) return; else { o = p; p = q; } } } private final void updateHead() { first(); } private final void updateTail() { last(); } private void skipDeletedPredecessors(Node<E> x) { whileActive: do { Node<E> prev = x.prev; assert prev != null; assert x != NEXT_TERMINATOR; assert x != PREV_TERMINATOR; Node<E> p = prev; findActive: for (;;) { if (p.item != null) break findActive; Node<E> q = p.prev; if (q == null) { if (p.next == p) continue whileActive; break findActive; } else if (p == q) continue whileActive; else p = q; } // found active CAS target if (prev == p || x.casPrev(prev, p)) return; } while (x.item != null || x.next == null); } private void skipDeletedSuccessors(Node<E> x) { whileActive: do { Node<E> next = x.next; assert next != null; assert x != NEXT_TERMINATOR; assert x != PREV_TERMINATOR; Node<E> p = next; findActive: for (;;) { if (p.item != null) break findActive; Node<E> q = p.next; if (q == null) { if (p.prev == p) continue whileActive; break findActive; } else if (p == q) continue whileActive; else p = q; } // found active CAS target if (next == p || x.casNext(next, p)) return; } while (x.item != null || x.prev == null); } /** * Returns the successor of p, or the first node if p.next has been linked * to self, which will only be true if traversing with a stale pointer that * is now off the list. */ final Node<E> succ(Node<E> p) { // TODO: should we skip deleted nodes here? Node<E> q = p.next; return (p == q) ? first() : q; } /** * Returns the predecessor of p, or the last node if p.prev has been linked * to self, which will only be true if traversing with a stale pointer that * is now off the list. */ final Node<E> pred(Node<E> p) { Node<E> q = p.prev; return (p == q) ? last() : q; } /** * Returns the first node, the unique node which has a null prev link. The * returned node may or may not be logically deleted. Guarantees that head * is set to the returned node. */ Node<E> first() { retry: for (;;) { for (Node<E> h = head, p = h;;) { Node<E> q = p.prev; if (q == null) { if (p == h // It is possible that p is PREV_TERMINATOR, // but if so, the CAS will fail. || casHead(h, p)) return p; else continue retry; } else if (p == q) { continue retry; } else { p = q; } } } } /** * Returns the last node, the unique node which has a null next link. The * returned node may or may not be logically deleted. Guarantees that tail * is set to the returned node. */ Node<E> last() { retry: for (;;) { for (Node<E> t = tail, p = t;;) { Node<E> q = p.next; if (q == null) { if (p == t // It is possible that p is NEXT_TERMINATOR, // but if so, the CAS will fail. || casTail(t, p)) return p; else continue retry; } else if (p == q) { continue retry; } else { p = q; } } } } // Minor convenience utilities /** * Throws NullPointerException if argument is null. * * @param v * the element */ private static void checkNotNull(Object v) { if (v == null) throw new NullPointerException(); } /** * Returns element unless it is null, in which case throws * NoSuchElementException. * * @param v * the element * @return the element */ private E screenNullResult(E v) { if (v == null) throw new NoSuchElementException(); return v; } /** * Creates an array list and fills it with elements of this list. Used by * toArray. * * @return the arrayList */ private ArrayList<E> toArrayList() { ArrayList<E> c = new ArrayList<E>(); for (Node<E> p = first(); p != null; p = succ(p)) { E item = p.item; if (item != null) c.add(item); } return c; } // Fields and constructors private static final long serialVersionUID = 876323262645176354L; /** * Constructs an empty deque. */ public ConcurrentLinkedDeque() { } /** * Constructs a deque initially containing the elements of the given * collection, added in traversal order of the collection's iterator. * * @param c * the collection of elements to initially contain * @throws NullPointerException * if the specified collection or any of its elements are null */ public ConcurrentLinkedDeque(Collection<? extends E> c) { this(); addAll(c); } /** * Inserts the specified element at the front of this deque. * * @throws NullPointerException * {@inheritDoc} */ public void addFirst(E e) { linkFirst(e); } /** * Inserts the specified element at the end of this deque. This is identical * in function to the {@code add} method. * * @throws NullPointerException * {@inheritDoc} */ public void addLast(E e) { linkLast(e); } /** * Inserts the specified element at the front of this deque. * * @return {@code true} always * @throws NullPointerException * {@inheritDoc} */ public boolean offerFirst(E e) { linkFirst(e); return true; } /** * Inserts the specified element at the end of this deque. * * <p> * This method is equivalent to {@link #add}. * * @return {@code true} always * @throws NullPointerException * {@inheritDoc} */ public boolean offerLast(E e) { linkLast(e); return true; } public E peekFirst() { for (Node<E> p = first(); p != null; p = succ(p)) { E item = p.item; if (item != null) return item; } return null; } public E peekLast() { for (Node<E> p = last(); p != null; p = pred(p)) { E item = p.item; if (item != null) return item; } return null; } /** * @throws NoSuchElementException * {@inheritDoc} */ public E getFirst() { return screenNullResult(peekFirst()); } /** * @throws NoSuchElementException * {@inheritDoc} */ public E getLast() { return screenNullResult(peekLast()); } public E pollFirst() { for (Node<E> p = first(); p != null; p = succ(p)) { E item = p.item; if (item != null && p.casItem(item, null)) { unlink(p); return item; } } return null; } public E pollLast() { for (Node<E> p = last(); p != null; p = pred(p)) { E item = p.item; if (item != null && p.casItem(item, null)) { unlink(p); return item; } } return null; } /** * @throws NoSuchElementException * {@inheritDoc} */ public E removeFirst() { return screenNullResult(pollFirst()); } /** * @throws NoSuchElementException * {@inheritDoc} */ public E removeLast() { return screenNullResult(pollLast()); } // *** Queue and stack methods *** /** * Inserts the specified element at the tail of this deque. * * @return {@code true} (as specified by {@link java.util.Queue#offer}) * @throws NullPointerException * if the specified element is null */ public boolean offer(E e) { return offerLast(e); } /** * Inserts the specified element at the tail of this deque. * * @return {@code true} (as specified by {@link Collection#add}) * @throws NullPointerException * if the specified element is null */ public boolean add(E e) { return offerLast(e); } public E poll() { return pollFirst(); } public E remove() { return removeFirst(); } public E peek() { return peekFirst(); } public E element() { return getFirst(); } public void push(E e) { addFirst(e); } public E pop() { return removeFirst(); } /** * Removes the first element {@code e} such that {@code o.equals(e)}, if * such an element exists in this deque. If the deque does not contain the * element, it is unchanged. * * @param o * element to be removed from this deque, if present * @return {@code true} if the deque contained the specified element * @throws NullPointerException * if the specified element is {@code null} */ public boolean removeFirstOccurrence(Object o) { checkNotNull(o); for (Node<E> p = first(); p != null; p = succ(p)) { E item = p.item; if (item != null && o.equals(item) && p.casItem(item, null)) { unlink(p); return true; } } return false; } /** * Removes the last element {@code e} such that {@code o.equals(e)}, if such * an element exists in this deque. If the deque does not contain the * element, it is unchanged. * * @param o * element to be removed from this deque, if present * @return {@code true} if the deque contained the specified element * @throws NullPointerException * if the specified element is {@code null} */ public boolean removeLastOccurrence(Object o) { checkNotNull(o); for (Node<E> p = last(); p != null; p = pred(p)) { E item = p.item; if (item != null && o.equals(item) && p.casItem(item, null)) { unlink(p); return true; } } return false; } /** * Returns {@code true} if this deque contains at least one element * {@code e} such that {@code o.equals(e)}. * * @param o * element whose presence in this deque is to be tested * @return {@code true} if this deque contains the specified element */ public boolean contains(Object o) { if (o == null) return false; for (Node<E> p = first(); p != null; p = succ(p)) { E item = p.item; if (item != null && o.equals(item)) return true; } return false; } /** * Returns {@code true} if this collection contains no elements. * * @return {@code true} if this collection contains no elements */ public boolean isEmpty() { return peekFirst() == null; } /** * Returns the number of elements in this deque. If this deque contains more * than {@code Integer.MAX_VALUE} elements, it returns * {@code Integer.MAX_VALUE}. * * <p> * Beware that, unlike in most collections, this method is <em>NOT</em> a * constant-time operation. Because of the asynchronous nature of these * deques, determining the current number of elements requires traversing * them all to count them. Additionally, it is possible for the size to * change during execution of this method, in which case the returned result * will be inaccurate. Thus, this method is typically not very useful in * concurrent applications. * * @return the number of elements in this deque */ public int size() { long count = 0; for (Node<E> p = first(); p != null; p = succ(p)) if (p.item != null) ++count; return (count >= Integer.MAX_VALUE) ? Integer.MAX_VALUE : (int) count; } /** * Removes the first element {@code e} such that {@code o.equals(e)}, if * such an element exists in this deque. If the deque does not contain the * element, it is unchanged. * * @param o * element to be removed from this deque, if present * @return {@code true} if the deque contained the specified element * @throws NullPointerException * if the specified element is {@code null} */ public boolean remove(Object o) { return removeFirstOccurrence(o); } /** * Appends all of the elements in the specified collection to the end of * this deque, in the order that they are returned by the specified * collection's iterator. The behavior of this operation is undefined if the * specified collection is modified while the operation is in progress. * (This implies that the behavior of this call is undefined if the * specified Collection is this deque, and this deque is nonempty.) * * @param c * the elements to be inserted into this deque * @return {@code true} if this deque changed as a result of the call * @throws NullPointerException * if {@code c} or any element within it is {@code null} */ public boolean addAll(Collection<? extends E> c) { Iterator<? extends E> it = c.iterator(); if (!it.hasNext()) return false; do { addLast(it.next()); } while (it.hasNext()); return true; } /** * Removes all of the elements from this deque. */ public void clear() { while (pollFirst() != null) ; } /** * Returns an array containing all of the elements in this deque, in proper * sequence (from first to last element). * * <p> * The returned array will be "safe" in that no references to it are * maintained by this deque. (In other words, this method must allocate a * new array). The caller is thus free to modify the returned array. * * <p> * This method acts as bridge between array-based and collection-based APIs. * * @return an array containing all of the elements in this deque */ public Object[] toArray() { return toArrayList().toArray(); } /** * Returns an array containing all of the elements in this deque, in proper * sequence (from first to last element); the runtime type of the returned * array is that of the specified array. If the deque fits in the specified * array, it is returned therein. Otherwise, a new array is allocated with * the runtime type of the specified array and the size of this deque. * * <p> * If this deque fits in the specified array with room to spare (i.e., the * array has more elements than this deque), the element in the array * immediately following the end of the deque is set to {@code null}. * * <p> * Like the {@link #toArray()} method, this method acts as bridge between * array-based and collection-based APIs. Further, this method allows * precise control over the runtime type of the output array, and may, under * certain circumstances, be used to save allocation costs. * * <p> * Suppose {@code x} is a deque known to contain only strings. The following * code can be used to dump the deque into a newly allocated array of * {@code String}: * * <pre> * String[] y = x.toArray(new String[0]); * </pre> * * Note that {@code toArray(new Object[0])} is identical in function to * {@code toArray()}. * * @param a * the array into which the elements of the deque are to be * stored, if it is big enough; otherwise, a new array of the * same runtime type is allocated for this purpose * @return an array containing all of the elements in this deque * @throws ArrayStoreException * if the runtime type of the specified array is not a supertype * of the runtime type of every element in this deque * @throws NullPointerException * if the specified array is null */ public <T> T[] toArray(T[] a) { return toArrayList().toArray(a); } /** * Returns an iterator over the elements in this deque in proper sequence. * The elements will be returned in order from first (head) to last (tail). * * <p> * The returned {@code Iterator} is a "weakly consistent" iterator that will * never throw {@link java.util.ConcurrentModificationException * ConcurrentModificationException}, and guarantees to traverse elements as * they existed upon construction of the iterator, and may (but is not * guaranteed to) reflect any modifications subsequent to construction. * * @return an iterator over the elements in this deque in proper sequence */ public Iterator<E> iterator() { return new Itr(); } /** * Returns an iterator over the elements in this deque in reverse sequential * order. The elements will be returned in order from last (tail) to first * (head). * * <p> * The returned {@code Iterator} is a "weakly consistent" iterator that will * never throw {@link java.util.ConcurrentModificationException * ConcurrentModificationException}, and guarantees to traverse elements as * they existed upon construction of the iterator, and may (but is not * guaranteed to) reflect any modifications subsequent to construction. */ public Iterator<E> descendingIterator() { return new DescendingItr(); } private abstract class AbstractItr implements Iterator<E> { /** * Next node to return item for. */ private Node<E> nextNode; /** * nextItem holds on to item fields because once we claim that an * element exists in hasNext(), we must return it in the following * next() call even if it was in the process of being removed when * hasNext() was called. */ private E nextItem; /** * Node returned by most recent call to next. Needed by remove. Reset to * null if this element is deleted by a call to remove. */ private Node<E> lastRet; abstract Node<E> startNode(); abstract Node<E> nextNode(Node<E> p); AbstractItr() { advance(); } /** * Sets nextNode and nextItem to next valid node, or to null if no such. */ private void advance() { lastRet = nextNode; Node<E> p = (nextNode == null) ? startNode() : nextNode(nextNode); for (;; p = nextNode(p)) { if (p == null) { // p might be active end or TERMINATOR node; both are OK nextNode = null; nextItem = null; break; } E item = p.item; if (item != null) { nextNode = p; nextItem = item; break; } } } public boolean hasNext() { return nextItem != null; } public E next() { E item = nextItem; if (item == null) throw new NoSuchElementException(); advance(); return item; } public void remove() { Node<E> l = lastRet; if (l == null) throw new IllegalStateException(); l.item = null; unlink(l); lastRet = null; } } /** Forward iterator */ private class Itr extends AbstractItr { Node<E> startNode() { return first(); } Node<E> nextNode(Node<E> p) { return succ(p); } } /** Descending iterator */ private class DescendingItr extends AbstractItr { Node<E> startNode() { return last(); } Node<E> nextNode(Node<E> p) { return pred(p); } } /** * Save the state to a stream (that is, serialize it). * * @serialData All of the elements (each an {@code E}) in the proper order, * followed by a null * @param s * the stream */ private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { // Write out any hidden stuff s.defaultWriteObject(); // Write out all elements in the proper order. for (Node<E> p = first(); p != null; p = succ(p)) { Object item = p.item; if (item != null) s.writeObject(item); } // Use trailing null as sentinel s.writeObject(null); } /** * Reconstitute the Queue instance from a stream (that is, deserialize it). * * @param s * the stream */ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { // Read in capacity, and any hidden stuff s.defaultReadObject(); tail = head = new Node<E>(null); // Read in all elements and place in queue for (;;) { @SuppressWarnings("unchecked") E item = (E) s.readObject(); if (item == null) break; else offer(item); } } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE = sun.misc.Unsafe.getUnsafe(); private static final long headOffset = objectFieldOffset(UNSAFE, "head", ConcurrentLinkedDeque.class); private static final long tailOffset = objectFieldOffset(UNSAFE, "tail", ConcurrentLinkedDeque.class); private boolean casHead(Node<E> cmp, Node<E> val) { return UNSAFE.compareAndSwapObject(this, headOffset, cmp, val); } private boolean casTail(Node<E> cmp, Node<E> val) { return UNSAFE.compareAndSwapObject(this, tailOffset, cmp, val); } static long objectFieldOffset(sun.misc.Unsafe UNSAFE, String field, Class<?> klazz) { try { return UNSAFE.objectFieldOffset(klazz.getDeclaredField(field)); } catch (NoSuchFieldException e) { // Convert Exception to corresponding Error NoSuchFieldError error = new NoSuchFieldError(field); error.initCause(e); throw error; } } }
drpicox/david-rodenas.com
src/shared/Container.js
import React from "react" import { css } from "@emotion/core" const containerCss = css` margin: 0 auto; max-width: 37rem; padding: 0 0.5rem; ` export default function Container({ children, className }) { return ( <div css={containerCss} className={className}> {children} </div> ) }
DanielMBytes/2D_PlatformerGame_Dino
src/KeyboardController.h
<gh_stars>0 // // Created by <NAME> on 6/29/20. // #ifndef KeyboardController_H #define KeyboardController_H #include <SDL2/SDL.h> class KeyboardController { public: static KeyboardController *getInstance() { return s_Instance = (s_Instance != nullptr) ? s_Instance : new KeyboardController(); } void listen(); bool getKeyDown(SDL_Scancode key); inline int getX() const { return x; } inline int getY() const { return y; } inline bool getMouseButtonDown() const { return mouseButtonDown; } inline void setMouseButtonDown(bool state) { mouseButtonDown = state; } private: KeyboardController(); static KeyboardController *s_Instance; int x{}, y{}; bool mouseButtonDown = false; SDL_Event event{}; Uint8 *m_KeYStates; void keyDown(); }; #endif //KeyboardController_H
tanhleno/pegparser
test/c89/test/no/invalidExpr01.c
<gh_stars>1-10 /* invalid expression */ enum { a = }; /*OK: enum { a = 1 }; enum { a }; */
yuanjiejiahui/background_Management
back/src/main/java/com/vector/manager/sys/entity/Role.java
package com.vector.manager.sys.entity; import com.baomidou.mybatisplus.annotation.TableName; import com.baomidou.mybatisplus.annotation.IdType; import java.util.Date; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.FieldFill; import com.baomidou.mybatisplus.annotation.TableField; import java.io.Serializable; import java.util.List; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import lombok.Data; /** * <p> * 角色信息 * </p> * * @author YuYue * @since 2020-01-12 */ @Data @TableName("sys_role") @ApiModel(value="Role对象", description="角色信息") public class Role implements Serializable { private static final long serialVersionUID = 1L; @ApiModelProperty(value = "主键") @TableId(value = "id", type = IdType.AUTO) private Long id; @ApiModelProperty(value = "角色名称") @TableField("role_name") private String roleName; @ApiModelProperty(value = "角色描述") @TableField("description") private String description; @ApiModelProperty(value = "状态(0 停用, 1 启用, 默认 1)") @TableField("status") private Integer status; @ApiModelProperty(value = "数据权限(1 用户ID, 2 部门ID, 3 全部,默认 1)") @TableField("data_perm") private Integer dataPerm; @ApiModelProperty(value = "数据权限ID集合") @TableField("data_perm_ids") private String dataPermIds; @ApiModelProperty(value = "创建人ID") @TableField(value = "create_by", fill = FieldFill.INSERT) private Long createBy; @ApiModelProperty(value = "创建时间") @TableField(value = "create_time", fill = FieldFill.INSERT) private Date createTime; @ApiModelProperty(value = "更新人ID") @TableField(value = "update_by", fill = FieldFill.UPDATE) private Long updateBy; @ApiModelProperty(value = "更新时间") @TableField(value = "update_time", fill = FieldFill.UPDATE) private Date updateTime; @TableField(exist = false) private List<Long> checkedKeys; @TableField(exist = false) private Integer userCount; }
Shalini180/LeetCode-Solutions
Matrix/1351. Count Negative Numbers in a Sorted Matrix/Brute Force/Solution.java
<reponame>Shalini180/LeetCode-Solutions class Solution { public int countNegatives(int[][] grid) { int count = 0; for(int i=0; i<grid.length; i++) for(int j=0; j<grid[0].length; j++) if(grid[i][j] < 0) count++; return count; } }
Michael1117/java
maven_mysql/src/main/java/com/dao/ItemsDao.java
<filename>maven_mysql/src/main/java/com/dao/ItemsDao.java<gh_stars>0 package com.dao; import com.shoo.domain.Items; import java.util.List; public interface ItemsDao { public List<Items> findAll(); }
peterson79/pycom-micropython-sigfox
tests/float/builtin_float_round.py
# test round() with floats # check basic cases tests = [ [0.0], [1.0], [0.1], [-0.1], [123.4], [123.6], [-123.4], [-123.6], [1.234567, 5], [1.23456, 1], [1.23456, 0], [1234.56, -2] ] for t in tests: print(round(*t)) # check .5 cases for i in range(11): print(round((i - 5) / 2)) # test second arg # TODO uPy currently only supports second arg being 0 print(round(1.4, 0))
Domiii/sdt-for-javac
compiler/src/edu/ntu/compilers/lab4/tokens/TokenEmitter.java
<reponame>Domiii/sdt-for-javac package edu.ntu.compilers.lab4.tokens; /** * Used to emit a token, based on it's category and content */ public interface TokenEmitter { Token emitToken(String content); }
ingot-cloud/ingot-go
pkg/framework/security/oauth2/provider/accessor/default_security_context_accessor.go
package accessor import ( "github.com/ingot-cloud/ingot-go/pkg/framework/security/core" "github.com/ingot-cloud/ingot-go/pkg/framework/security/core/ingot" "github.com/ingot-cloud/ingot-go/pkg/framework/security/oauth2/provider/authentication" ) // DefaultSecurityContextAccessor 默认实现 type DefaultSecurityContextAccessor struct { } // IsUser 当前身份验证信息是否为用户身份验证信息 func (a *DefaultSecurityContextAccessor) IsUser(ctx *ingot.Context) bool { auth := ctx.GetAuthentication() if auth == nil { return false } if oauth, ok := auth.(*authentication.OAuth2Authentication); ok { return oauth.UserAuthentication != nil } return true } // GetAuthorities 获取权限 func (a *DefaultSecurityContextAccessor) GetAuthorities(ctx *ingot.Context) []core.GrantedAuthority { auth := ctx.GetAuthentication() if auth == nil { return nil } return auth.GetAuthorities() }
curtiscai/curtis-base
curtis-apache/src/main/java/com/curtis/apache/lang3/enums/CoordinateSystemEnum.java
<gh_stars>1-10 package com.curtis.apache.lang3.enums; /** * @author curtis.cai * @desc TODO * @date 2021-11-24 * @email <EMAIL> * @reference */ public enum CoordinateSystemEnum { WGS84("wgs84", 1), GCJ02("gcj02", 2), BD09("bd09", 3); private String coordinateSystemLabel; private Integer coordinateSystemValue; private CoordinateSystemEnum(String coordinateSystemLabel, Integer coordinateSystemValue) { this.coordinateSystemLabel = coordinateSystemLabel; this.coordinateSystemValue = coordinateSystemValue; } public String getCoordinateSystemLabel() { return coordinateSystemLabel; } public Integer getCoordinateSystemValue() { return coordinateSystemValue; } }
CourtHive/competitionFactory
src/tournamentEngine/tests/drawDefinitions/finishingPositions.test.js
<gh_stars>1-10 import { getAppliedPolicies } from '../../../drawEngine/governors/policyGovernor/getAppliedPolicies'; import { eventConstants } from '../../../constants/eventConstants'; import { drawEngine } from '../../../drawEngine/sync'; import mocksEngine from '../../../mocksEngine'; import { tournamentEngine } from '../../sync'; import { MISSING_ASSIGNMENTS } from '../../../constants/errorConditionConstants'; import SEEDING_ITF_POLICY from '../../../fixtures/policies/POLICY_SEEDING_ITF'; const { SINGLES } = eventConstants; it('can aggrgate participant finishingPositions', () => { const { tournamentRecord } = mocksEngine.generateTournamentRecord({ participantsProfile: { participantsCount: 14 }, }); const { participants } = tournamentRecord; tournamentEngine.setState(tournamentRecord); const event = { eventName: 'Test Event', eventType: SINGLES, }; let result = tournamentEngine.addEvent({ event }); const { event: eventResult, success } = result; const { eventId } = eventResult; expect(success).toEqual(true); const participantIds = participants.map((p) => p.participantId); result = tournamentEngine.addEventEntries({ eventId, participantIds }); expect(result.success).toEqual(true); const values = { automated: true, drawSize: 16, eventId, seedsCount: 4, event: eventResult, policyDefinitions: { ...SEEDING_ITF_POLICY }, }; const { drawDefinition } = tournamentEngine.generateDrawDefinition(values); const { drawId } = drawDefinition; result = tournamentEngine.addDrawDefinition({ eventId, drawDefinition }); expect(result.success).toEqual(true); drawEngine.setState(drawDefinition); const { extensions } = drawDefinition; expect(extensions.length).toEqual(2); const { appliedPolicies } = getAppliedPolicies({ drawDefinition }); expect(appliedPolicies.seeding.policyName).toEqual('ITF'); // find main structureId more intelligently const mainStructureId = drawDefinition.structures[0].structureId; const { seedAssignments } = drawEngine.getStructureSeedAssignments({ structureId: mainStructureId, }); expect(seedAssignments.length).toEqual(4); result = tournamentEngine.assignSeedPositions({ structureId: mainStructureId, eventId, drawId, }); expect(result?.error).toEqual(MISSING_ASSIGNMENTS); let { matchUps } = tournamentEngine.allTournamentMatchUps(); const outcomes = [ [1, 2, 1], [1, 3, 2], [1, 4, 2], [1, 5, 1], [1, 6, 2], [1, 7, 1], [2, 1, 1], [2, 2, 1], [2, 3, 2], [2, 4, 2], [3, 1, 1], [3, 2, 1], [4, 1, 1], ]; outcomes.forEach((outcome) => { const [roundNumber, roundPosition, winningSide] = outcome; scoreMatchUp({ roundNumber, roundPosition, matchUps, winningSide, drawId, }); }); let idMap = tournamentEngine.getParticipantIdFinishingPositions({ drawId, }); ({ matchUps } = tournamentEngine.allTournamentMatchUps()); const expectations = [ { roundNumber: 1, finishingPositionRange: [9, 16] }, { roundNumber: 2, finishingPositionRange: [5, 8] }, { roundNumber: 3, finishingPositionRange: [3, 4] }, { roundNumber: 4, finishingPositionRange: [2, 2] }, ]; expectations.forEach(({ roundNumber, finishingPositionRange }) => { const losingParticipantIds = getRoundLosingParticipantIds({ matchUps, roundNumber, }); losingParticipantIds.forEach((id) => { expect(idMap[id].finishingPositionRange).toEqual(finishingPositionRange); }); }); }); function getRoundLosingParticipantIds({ matchUps, roundNumber }) { return matchUps .filter( (matchUp) => matchUp.roundNumber === roundNumber && matchUp.winningSide ) .map( ({ winningSide, sides }) => sides.find(({ sideNumber }) => sideNumber !== winningSide).participantId ); } function scoreMatchUp({ roundNumber, roundPosition, matchUps, drawId, winningSide, }) { const matchUp = matchUps.find( (matchUp) => matchUp.roundNumber === roundNumber && matchUp.roundPosition === roundPosition ); const { matchUpId } = matchUp || {}; const result = tournamentEngine.setMatchUpStatus({ drawId, matchUpId, outcome: { winningSide }, }); expect(result.success).toEqual(true); }
wlliuCode/myth-blog
myth-auth/src/main/java/com/myth/auth/entity/SysPermission.java
package com.myth.auth.entity; import com.myth.common.core.base.BaseEntity; import lombok.Data; import java.util.List; @Data public class SysPermission { private static final long serialVersionUID = 8197268080561607208L; private Integer id; private String permissionName; private String permissionUrl; private Integer parentId; private List<SysRole> roles; }