repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
woof84/mirador
|
__tests__/src/components/WorkspaceControlPanel.test.js
|
import React from 'react';
import { shallow } from 'enzyme';
import AppBar from '@material-ui/core/AppBar';
import createStore from '../../../src/state/createStore';
import * as actions from '../../../src/state/actions';
import WorkspaceAddButton from '../../../src/containers/WorkspaceAddButton';
import WorkspaceControlPanelButtons from '../../../src/containers/WorkspaceControlPanelButtons';
import Branding from '../../../src/containers/Branding';
import { WorkspaceControlPanel } from '../../../src/components/WorkspaceControlPanel';
import fixture from '../../fixtures/version-2/002.json';
describe('WorkspaceControlPanel', () => {
let wrapper;
const store = createStore();
beforeEach(() => {
store.dispatch(actions.receiveManifest('foo', fixture));
store.dispatch(actions.receiveManifest('bar', fixture));
wrapper = shallow(
<WorkspaceControlPanel
classes={{}}
store={store}
t={k => k}
/>,
);
});
it('renders without an error', () => {
expect(wrapper.find(AppBar).length).toBe(1);
expect(wrapper.find(WorkspaceAddButton).length).toBe(1);
expect(wrapper.find(WorkspaceControlPanelButtons).length).toBe(1);
expect(wrapper.find(Branding).length).toBe(1);
});
});
|
meterXu/DogIcon
|
src/packages/dog-icon/components/iconPark/ColorCard.js
|
/**
* @file ColorCard 色卡
* @author Auto Generated by IconPark
*/
/* tslint:disable: max-line-length */
/* eslint-disable max-len */
import {IconWrapper} from '../index';
export default IconWrapper(
'ColorCard',
true,
(h, props) => (
<svg
width={props.size}
height={props.size}
viewBox="0 0 48 48"
fill="none"
>
<path
d="M10 44C13.3137 44 16 41.3137 16 38V23.5147V4H4V38C4 41.3137 6.68629 44 10 44Z"
fill={props.colors[1]}
/>
<path
d="M10 44C13.3137 44 16 41.3137 16 38V23.5147M10 44C6.68629 44 4 41.3137 4 38V4H16V23.5147M10 44H44V32H24.4853M5.75736 42.2426C8.10051 44.5858 11.8995 44.5858 14.2426 42.2426L24.4853 32M16 23.5147L35.0147 4.5L35.4853 4L43.9853 12.5L24.4853 32"
stroke={props.colors[0]}
stroke-width={props.strokeWidth}
stroke-linecap={props.strokeLinecap}
stroke-linejoin={props.strokeLinejoin}
/>
<path
d="M14.2427 42.2426L43.9853 12.5L35.4853 4L16 23.5147"
stroke={props.colors[0]}
stroke-width={props.strokeWidth}
stroke-linecap={props.strokeLinecap}
stroke-linejoin={props.strokeLinejoin}
/>
<path
d="M24.4853 32H44V44H12.4999"
stroke={props.colors[0]}
stroke-width={props.strokeWidth}
stroke-linecap={props.strokeLinecap}
stroke-linejoin={props.strokeLinejoin}
/>
<path
d="M24.4853 32H44V44H12.5"
stroke={props.colors[0]}
stroke-width={props.strokeWidth}
stroke-linecap={props.strokeLinecap}
stroke-linejoin={props.strokeLinejoin}
/>
<path
d="M10 44C13.3137 44 16 41.3137 16 38V23.5147V4H4V38C4 41.3137 6.68629 44 10 44Z"
fill={props.colors[1]}
stroke={props.colors[0]}
stroke-width={props.strokeWidth}
stroke-linecap={props.strokeLinecap}
stroke-linejoin={props.strokeLinejoin}
/>
</svg>
)
);
|
oktadev/okta-java-spring-k8s-istio-microservices-example
|
product/src/main/java/com/okta/developer/product/web/rest/package-info.java
|
/**
* Spring MVC REST controllers.
*/
package com.okta.developer.product.web.rest;
|
jlmayfield/Cirq
|
cirq/contrib/acquaintance/devices_test.py
|
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import cirq
import cirq.contrib.acquaintance as cca
def test_acquaintance_device():
with pytest.raises(ValueError):
op = cirq.X(cirq.NamedQubit('q'))
cca.UnconstrainedAcquaintanceDevice.validate_operation(op)
qubits = cirq.LineQubit.range(4)
swap_network = cca.SwapNetworkGate((1, 2, 1))
cca.UnconstrainedAcquaintanceDevice.validate_operation(
cca.acquaint(*qubits[:2]))
cca.UnconstrainedAcquaintanceDevice.validate_operation(
swap_network(*qubits))
def test_get_acquaintance_size():
with pytest.raises(TypeError):
cca.get_acquaintance_size(cirq.Circuit())
with pytest.raises(TypeError):
cca.get_acquaintance_size(3)
circuit = cirq.Circuit(device=cca.UnconstrainedAcquaintanceDevice)
cca.get_acquaintance_size(circuit)
|
hamdiahmedamin/VoxelPlugin
|
Source/Voxel/Public/VoxelMath.h
|
<reponame>hamdiahmedamin/VoxelPlugin
// Copyright 2019 Phyronnaz
#pragma once
#include "CoreMinimal.h"
namespace FVoxelMath
{
inline int DivideFloor(int Dividend, int Divisor)
{
int Q = Dividend / Divisor;
int R = Dividend % Divisor;
if ((R != 0) && ((R < 0) != (Divisor < 0)))
{
Q--;
}
return Q;
}
}
|
Pandinosaurus/DGM
|
tests/TestPDF.h
|
#pragma once
#include "gtest/gtest.h"
#include "types.h"
#include "DGM.h"
class CTestPDF : public ::testing::Test {
public:
CTestPDF(void) = default;
~CTestPDF(void) = default;
};
|
32th-System/LuaSTG-EX-Plus_Archive
|
LuaSTGExPlus/LuaSTG/LuaCustomLoader.cpp
|
#include <string>
#include "LuaCustomLoader.hpp"
#include "Global.h"
#include "AppFrame.h"
static int readable(const char* filename) {
try {
return LFMGR.FileExistEx(filename) ? 1 : 0;
}
catch(...) {}
return 0;
}
static const char* pushnexttemplate(lua_State* L, const char* path) {
const char* l;
while (*path == *LUA_PATHSEP) path++; /* skip separators */
if (*path == '\0') return NULL; /* no more templates */
l = strchr(path, *LUA_PATHSEP); /* find next separator */
if (l == NULL) l = path + strlen(path);
lua_pushlstring(L, path, (size_t)(l - path)); /* template */
return l;
}
static const char* searchpath(lua_State* L, const char* name, const char* path, const char* sep, const char* dirsep) {
luaL_Buffer msg; /* to build error message */
luaL_buffinit(L, &msg);
if (*sep != '\0') /* non-empty separator? */
name = luaL_gsub(L, name, sep, dirsep); /* replace it by 'dirsep' */
while ((path = pushnexttemplate(L, path)) != NULL) {
const char* filename = luaL_gsub(L, lua_tostring(L, -1),
LUA_PATH_MARK, name);
lua_remove(L, -2); /* remove path template */
if (readable(filename)) /* does file exist and is readable? */
return filename; /* return that file name */
lua_pushfstring(L, "\n\tno file " LUA_QS, filename);
lua_remove(L, -2); /* remove file name */
luaL_addvalue(&msg); /* concatenate error msg. entry */
}
luaL_pushresult(&msg); /* create error message */
return NULL; /* not found */
}
static const char* findfile(lua_State* L, const char* name, const char* pname) {
std::string path;
lua_getglobal(L, "package"); // ??? t
if (lua_istable(L, -1)) {
lua_getfield(L, -1, "path"); // ??? t s
if (lua_isstring(L, -1)) {
path = lua_tostring(L, -1);
}
else {
luaL_error(L, LUA_QL("package.%s") " must be a string", pname);
}
lua_pop(L, 1); // ??? t
}
else {
luaL_error(L, LUA_QL("package") " must be a table");
}
lua_pop(L, 1); // ???
return searchpath(L, name, path.c_str(), ".", "/");
}
static void loaderror(lua_State* L, const char* filename) {
luaL_error(L, "error loading module " LUA_QS " from file " LUA_QS ":\n\t%s",
lua_tostring(L, 1), filename, lua_tostring(L, -1));
}
static int package_loader_luastg(lua_State* L) {
const char* filename;
const char* name = luaL_checkstring(L, 1);
filename = findfile(L, name, "path");
if (filename == NULL) return 1; /* library not found in this path */
//if (luaL_loadfile(L, filename) != 0)
//loaderror(L, filename);
fcyRefPointer<fcyStream> stream; {
fcyStream* p = LAPP.GetFileManager().LoadFile(filename);
stream.DirectSet(p);
}
if (*stream == nullptr)
loaderror(L, filename);
else {
fLen length = stream->GetLength();
std::string str;
str.resize((size_t)length);
stream->SetPosition(FCYSEEKORIGIN_BEG, 0);
fLen rd = 0;
stream->ReadBytes((fData)str.data(), length, &rd);
if (rd != stream->GetLength())
loaderror(L, filename);
if (luaL_loadstring(L, str.c_str()) != 0)
loaderror(L, filename);
}
return 1; /* library loaded successfully */
}
namespace LuaSTGPlus {
void lua_register_custom_loader(lua_State* L) {
lua_getglobal(L, "package"); // ??? t
if (lua_istable(L, -1)) {
lua_getfield(L, -1, "loaders"); // ??? t t
if (lua_istable(L, -1)) {
lua_pushinteger(L, lua_objlen(L, -1) + 1); // ??? t t i
lua_pushcfunction(L, package_loader_luastg); // ??? t t i f
lua_settable(L, -3); // ??? t t
}
lua_pop(L, 1); // ??? t
}
lua_pop(L, 1); // ???
}
};
|
Insafin/iroha
|
irohad/consensus/yac/storage/impl/yac_common.cpp
|
<filename>irohad/consensus/yac/storage/impl/yac_common.cpp
/**
* Copyright Soramitsu Co., Ltd. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/
#include "consensus/yac/storage/yac_common.hpp"
#include <algorithm>
#include "consensus/yac/outcome_messages.hpp"
namespace yac = iroha::consensus::yac;
bool yac::sameKeys(const std::vector<VoteMessage> &votes) {
if (votes.empty()) {
return false;
}
auto first = votes.at(0);
return std::all_of(votes.begin(), votes.end(), [&first](const auto ¤t) {
return first.hash.vote_round == current.hash.vote_round;
});
}
boost::optional<iroha::consensus::Round> yac::getKey(
const std::vector<VoteMessage> &votes) {
if (not sameKeys(votes)) {
return boost::none;
}
return votes[0].hash.vote_round;
}
boost::optional<iroha::consensus::yac::YacHash> yac::getHash(
const std::vector<VoteMessage> &votes) {
if (not sameKeys(votes)) {
return boost::none;
}
return votes.at(0).hash;
}
|
wxun7619/suf
|
usersystem/src/main/java/team/benchem/usersystem/entity/Permission.java
|
<filename>usersystem/src/main/java/team/benchem/usersystem/entity/Permission.java
package team.benchem.usersystem.entity;
import javax.persistence.*;
import java.util.UUID;
@Entity
@Table(name="t_sys_funcpermission")
public class Permission {
@Id
@Column(name="frowid", length = 36)
String rowId;
@Column(name="forderindex")
Integer orderIndex;
@Column(name="fpermissionkey")
String permissionKey;
@Column(name="fpermissionname")
String permissionName;
@Column(name = "fownerfunctionalid")
String ownerFunctionalId;
public Permission() {
rowId = UUID.randomUUID().toString();
orderIndex = 0;
}
public String getRowId() {
return rowId;
}
public void setRowId(String rowId) {
this.rowId = rowId;
}
public Integer getOrderIndex() {
return orderIndex;
}
public void setOrderIndex(Integer orderIndex) {
this.orderIndex = orderIndex;
}
public String getPermissionKey() {
return permissionKey;
}
public void setPermissionKey(String permissionKey) {
this.permissionKey = permissionKey;
}
public String getPermissionName() {
return permissionName;
}
public void setPermissionName(String permissionName) {
this.permissionName = permissionName;
}
public String getOwnerFunctionalId() {
return ownerFunctionalId;
}
public void setOwnerFunctionalId(String ownerFunctionalId) {
this.ownerFunctionalId = ownerFunctionalId;
}
}
|
dewdew/project1InPods
|
Frameworks/CreativeSDK/AdobeCreativeSDKCore.framework/Versions/A/Headers/AdobeAuthUserProfile.h
|
<gh_stars>0
/******************************************************************************
*
* ADOBE CONFIDENTIAL
* ___________________
*
* Copyright 2013 Adobe Systems Incorporated
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains the property of
* Adobe Systems Incorporated and its suppliers, if any. The intellectual and
* technical concepts contained herein are proprietary to Adobe Systems
* Incorporated and its suppliers and are protected by trade secret or
* copyright law. Dissemination of this information or reproduction of this
* material is strictly forbidden unless prior written permission is obtained
* from Adobe Systems Incorporated.
*
* THIS FILE IS PART OF THE CREATIVE SDK PUBLIC API
*
******************************************************************************/
#import <Foundation/Foundation.h>
/**
*
* AdobeAuthUserProfile contains the properties of the currently logged in user as obtained by calling
* [AdobeUXAuthManager sharedManager].userProfile.
*
* See AdobeUXAuthManager
*
*/
@interface AdobeAuthUserProfile : NSObject
/**
* The user ID.
*/
@property (nonatomic, readonly, strong) NSString *adobeID;
/**
* The country code for the user.
*/
@property (nonatomic, readonly, strong) NSString *countryCode;
/**
* Whether the application key is in developer mode.
*/
@property (nonatomic, readonly) BOOL developerMode;
/**
* The display name of the user.
*/
@property (nonatomic, readonly, strong) NSString *displayName;
/**
* The email address of the user.
*/
@property (nonatomic, readonly, strong) NSString *email;
/**
* Whether or not the email address has been verified.
*/
@property (nonatomic, readonly) BOOL emailVerified;
/**
* The first name of the user.
*/
@property (nonatomic, readonly, strong) NSString *firstName;
/**
* The full name of the user.
*/
@property (nonatomic, readonly, strong) NSString *fullName;
/**
* Whether the user is for an enterprise user.
*/
@property (nonatomic, readonly) BOOL isEnterpriseUser;
/**
* The last name of the user.
*/
@property (nonatomic, readonly, strong) NSString *lastName;
/**
* The preferred languages of the user.
*/
@property (nonatomic, readonly, strong) NSArray *preferredLanguages;
/**
* The object in a readable format. Useful for debugging but should not be used publicly.
*/
- (NSString *)description;
@end
|
julpark-rh/cephci
|
tests/cephfs/nfs/nfs_export_path.py
|
<gh_stars>0
import json
import secrets
import string
import traceback
from ceph.ceph import CommandFailed
from tests.cephfs.cephfs_utilsV1 import FsUtils
from tests.cephfs.cephfs_volume_management import wait_for_process
from utility.log import Log
log = Log(__name__)
def run(ceph_cluster, **kw):
"""
CEPH-83574028 - Ensure the path of the nfs export is displayed properly.
Pre-requisites:
1. Create cephfs volume
creats fs volume create <vol_name>
2. Create nfs cluster
ceph nfs cluster create <nfs_name> <nfs_server>
Test operation:
1. Create cephfs nfs export
ceph nfs export create cephfs <fs_name> <nfs_name> <nfs_export_name> path=<export_path>
2. Verify path of cephfs nfs export
ceph nfs export get <nfs_name> <nfs_export_name>
Clean-up:
1. Remove cephfs nfs export
"""
try:
tc = "CEPH-83574028"
log.info(f"Running cephfs {tc} test case")
config = kw["config"]
build = config.get("build", config.get("rhbuild"))
fs_util = FsUtils(ceph_cluster)
clients = ceph_cluster.get_ceph_objects("client")
client1 = clients[0]
fs_util.prepare_clients(clients, build)
fs_util.auth_list(clients)
mon_node_ip = fs_util.get_mon_node_ips()
mon_node_ip = ",".join(mon_node_ip)
rhbuild = config.get("rhbuild")
nfs_servers = ceph_cluster.get_ceph_objects("nfs")
nfs_server = nfs_servers[0].node.hostname
nfs_name = "cephfs-nfs"
clients = ceph_cluster.get_ceph_objects("client")
client1 = clients[0]
client1.exec_command(sudo=True, cmd="ceph mgr module enable nfs")
out, rc = client1.exec_command(
sudo=True, cmd=f"ceph nfs cluster create {nfs_name} {nfs_server}"
)
if wait_for_process(client=client1, process_name=nfs_name, ispresent=True):
log.info("ceph nfs cluster created successfully")
else:
raise CommandFailed("Failed to create nfs cluster")
nfs_export_name = "/export_" + "".join(
secrets.choice(string.digits) for i in range(3)
)
export_path = "/"
fs_name = "cephfs"
if "5.0" in rhbuild:
client1.exec_command(
sudo=True,
cmd=f"ceph nfs export create cephfs {fs_name} {nfs_name} "
f"{nfs_export_name} path={export_path}",
)
else:
client1.exec_command(
sudo=True,
cmd=f"ceph nfs export create cephfs {nfs_name} "
f"{nfs_export_name} {fs_name} path={export_path}",
)
out, rc = client1.exec_command(sudo=True, cmd=f"ceph nfs export ls {nfs_name}")
if nfs_export_name not in out:
raise CommandFailed("Failed to create nfs export")
log.info("ceph nfs export created successfully")
out, rc = client1.exec_command(
sudo=True, cmd=f"ceph nfs export get {nfs_name} {nfs_export_name}"
)
output = json.loads(out)
export_get_path = output["path"]
if export_get_path != export_path:
log.error("Export path is not correct")
return 1
log.info("Test completed successfully")
return 0
except Exception as e:
log.info(e)
log.info(traceback.format_exc())
return 1
finally:
log.info("Cleaning up")
client1.exec_command(
sudo=True,
cmd=f"ceph nfs export delete {nfs_name} {nfs_export_name}",
check_ec=False,
)
|
SINTEF-Infosec/Incident-Information-Sharing-Tool
|
incidents/migrations/0001_initial.py
|
<gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import uuid
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('format', models.CharField(max_length=255, choices=[('iodef', 'IODEF'), ('imdef', 'IMDEF'), ('cybox', 'CybOX'), ('zip', 'ZIP'), ('tar', 'TAR')])),
('file', models.FileField(upload_to='')),
],
),
migrations.CreateModel(
name='CustomField',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('name', models.CharField(max_length=255)),
('type', models.CharField(max_length=6, choices=[('string', 'String'), ('int', 'Integer'), ('bool', 'Boolean')])),
],
),
migrations.CreateModel(
name='CustomFieldValue',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('value', models.TextField()),
],
),
migrations.CreateModel(
name='Incident',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('language', models.CharField(max_length=5)),
('status', models.CharField(max_length=10, choices=[('resolved', 'Resolved'), ('unresolved', 'Unresolved')])),
('impact', models.CharField(max_length=6, choices=[('high', 'High'), ('medium', 'Medium'), ('low', 'Low')])),
('summary', models.TextField()),
('description', models.TextField()),
('occurrence_time', models.DateTimeField()),
('detection_time', models.DateTimeField()),
],
),
migrations.CreateModel(
name='IncidentType',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
('consequence', models.FloatField()),
],
),
migrations.CreateModel(
name='Liaison',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('name', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('phone', models.CharField(max_length=255)),
('address', models.CharField(max_length=255)),
('zip', models.CharField(max_length=19)),
('city', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Notification',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('generation_time', models.DateTimeField()),
('sent', models.DateTimeField()),
('sender', models.CharField(max_length=255)),
('hmac', models.CharField(max_length=255)),
('incidents', models.ManyToManyField(to='incidents.Incident')),
],
),
migrations.CreateModel(
name='NotificationIncident',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
],
),
migrations.CreateModel(
name='NotificationTrigger',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('method', models.CharField(max_length=255, choices=[('and', 'AND'), ('or', 'OR'), ('none', 'NONE')])),
('threshold', models.FloatField()),
('comparator', models.CharField(max_length=1)),
],
),
migrations.CreateModel(
name='NotificationType',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('name', models.CharField(max_length=255)),
('endpoint', models.URLField()),
('incidents', models.ManyToManyField(to='incidents.NotificationIncident')),
],
),
migrations.CreateModel(
name='TriggerType',
fields=[
('id', models.UUIDField(primary_key=True, serialize=False, editable=False, default=uuid.uuid4)),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
('threshold_type', models.CharField(max_length=255)),
('comparators', models.CharField(max_length=255)),
('incident_type', models.ForeignKey(to='incidents.IncidentType')),
],
),
migrations.AddField(
model_name='notificationtrigger',
name='type',
field=models.ForeignKey(to='incidents.TriggerType'),
),
migrations.AddField(
model_name='notificationincident',
name='triggers',
field=models.ManyToManyField(to='incidents.NotificationTrigger'),
),
migrations.AddField(
model_name='notificationincident',
name='type',
field=models.ForeignKey(to='incidents.IncidentType'),
),
migrations.AddField(
model_name='notification',
name='type',
field=models.ForeignKey(to='incidents.NotificationType'),
),
migrations.AddField(
model_name='incident',
name='liaison',
field=models.ForeignKey(to='incidents.Liaison'),
),
migrations.AddField(
model_name='incident',
name='parent',
field=models.ForeignKey(to='incidents.Incident'),
),
migrations.AddField(
model_name='incident',
name='type',
field=models.ForeignKey(to='incidents.IncidentType'),
),
migrations.AddField(
model_name='customfieldvalue',
name='incident',
field=models.ForeignKey(to='incidents.Incident'),
),
migrations.AddField(
model_name='customfieldvalue',
name='type',
field=models.ForeignKey(to='incidents.CustomField'),
),
migrations.AddField(
model_name='customfield',
name='incident_type',
field=models.ForeignKey(to='incidents.IncidentType'),
),
migrations.AddField(
model_name='attachment',
name='incident',
field=models.ForeignKey(to='incidents.Incident'),
),
]
|
phatblat/macOSPrivateFrameworks
|
PrivateFrameworks/WebInspector/RWIDebuggable.h
|
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "NSObject.h"
@class NSDictionary, NSNumber, NSString, RWIApplication;
@interface RWIDebuggable : NSObject
{
id <RWIDebuggableDelegate> _delegate;
NSString *_uuid;
NSNumber *_pageId;
NSString *_url;
NSString *_title;
NSDictionary *_userInfo;
RWIApplication *_application;
long long _type;
long long _debuggerState;
RWIApplication *_proxyApplication;
id <RWIDebugger> _debugger;
}
@property(nonatomic) __weak id <RWIDebugger> debugger; // @synthesize debugger=_debugger;
@property(readonly, nonatomic) RWIApplication *proxyApplication; // @synthesize proxyApplication=_proxyApplication;
@property(readonly, nonatomic) long long debuggerState; // @synthesize debuggerState=_debuggerState;
@property(readonly, nonatomic) long long type; // @synthesize type=_type;
@property(readonly, nonatomic) RWIApplication *application; // @synthesize application=_application;
@property(readonly, copy, nonatomic) NSDictionary *userInfo; // @synthesize userInfo=_userInfo;
@property(readonly, copy, nonatomic) NSString *title; // @synthesize title=_title;
@property(readonly, copy, nonatomic) NSString *url; // @synthesize url=_url;
@property(readonly, copy, nonatomic) NSNumber *pageId; // @synthesize pageId=_pageId;
@property(readonly, copy, nonatomic) NSString *uuid; // @synthesize uuid=_uuid;
@property(nonatomic) __weak id <RWIDebuggableDelegate> delegate; // @synthesize delegate=_delegate;
- (void).cxx_destruct;
- (void)hostApplicationNowAvailable:(id)arg1;
- (void)changeURL:(id)arg1 title:(id)arg2 debuggerState:(long long)arg3 userInfo:(id)arg4;
@property(readonly, copy, nonatomic) NSString *name; // @dynamic name;
@property(readonly, nonatomic) RWIApplication *owningApplication; // @dynamic owningApplication;
- (void)setIndicating:(BOOL)arg1;
- (id)openInspectorPaused:(BOOL)arg1;
- (id)openInspector;
- (id)initWithApplication:(id)arg1 proxyApplication:(id)arg2 pageId:(id)arg3 url:(id)arg4 title:(id)arg5 type:(long long)arg6 debuggerState:(long long)arg7 userInfo:(id)arg8;
@end
|
maritimeconnectivity/G1128-Schemas
|
src/main/java/_int/iho/s100base/ObjectFactory.java
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.3.2
// See <a href="https://javaee.github.io/jaxb-v2/">https://javaee.github.io/jaxb-v2/</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2021.10.20 at 12:23:53 PM BST
//
package _int.iho.s100base;
import javax.xml.bind.annotation.XmlRegistry;
/**
* This object contains factory methods for each
* Java content interface and Java element interface
* generated in the _int.iho.s100base package.
* <p>An ObjectFactory allows you to programatically
* construct new instances of the Java representation
* for XML content. The Java representation of XML
* content can consist of schema derived interfaces
* and classes representing the binding of schema
* type definitions, element declarations and model
* groups. Factory methods for each of these are
* provided in this class.
*
*/
@XmlRegistry
public class ObjectFactory {
/**
* Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: _int.iho.s100base
*
*/
public ObjectFactory() {
}
/**
* Create an instance of {@link UnlimitedInteger }
*
*/
public UnlimitedInteger createUnlimitedInteger() {
return new UnlimitedInteger();
}
/**
* Create an instance of {@link S100Multiplicity }
*
*/
public S100Multiplicity createS100Multiplicity() {
return new S100Multiplicity();
}
/**
* Create an instance of {@link S100NumericRange }
*
*/
public S100NumericRange createS100NumericRange() {
return new S100NumericRange();
}
/**
* Create an instance of {@link Locale }
*
*/
public Locale createLocale() {
return new Locale();
}
/**
* Create an instance of {@link S100UnitOfMeasure }
*
*/
public S100UnitOfMeasure createS100UnitOfMeasure() {
return new S100UnitOfMeasure();
}
}
|
num-codex/codex-processes-ap1
|
codex-process-data-transfer/src/main/java/de/netzwerk_universitaetsmedizin/codex/processes/data_transfer/DataTransferProcessPluginDefinition.java
|
package de.netzwerk_universitaetsmedizin.codex.processes.data_transfer;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import org.highmed.dsf.bpe.ProcessPluginDefinition;
import org.highmed.dsf.fhir.resources.AbstractResource;
import org.highmed.dsf.fhir.resources.ActivityDefinitionResource;
import org.highmed.dsf.fhir.resources.CodeSystemResource;
import org.highmed.dsf.fhir.resources.NamingSystemResource;
import org.highmed.dsf.fhir.resources.ResourceProvider;
import org.highmed.dsf.fhir.resources.StructureDefinitionResource;
import org.highmed.dsf.fhir.resources.ValueSetResource;
import org.springframework.core.env.PropertyResolver;
import ca.uhn.fhir.context.FhirContext;
import de.netzwerk_universitaetsmedizin.codex.processes.data_transfer.spring.config.TransferDataConfig;
import de.netzwerk_universitaetsmedizin.codex.processes.data_transfer.spring.config.TransferDataSerializerConfig;
public class DataTransferProcessPluginDefinition implements ProcessPluginDefinition
{
public static final String VERSION = "0.4.1";
@Override
public String getName()
{
return "codex-process-data-transfer";
}
@Override
public String getVersion()
{
return VERSION;
}
@Override
public Stream<String> getBpmnFiles()
{
return Stream.of("bpe/trigger.bpmn", "bpe/send.bpmn", "bpe/translate.bpmn", "bpe/receive.bpmn");
}
@Override
public Stream<Class<?>> getSpringConfigClasses()
{
return Stream.of(TransferDataConfig.class, TransferDataSerializerConfig.class);
}
@Override
public ResourceProvider getResourceProvider(FhirContext fhirContext, ClassLoader classLoader,
PropertyResolver propertyResolver)
{
var aTri = ActivityDefinitionResource.file("fhir/ActivityDefinition/num-codex-data-trigger.xml");
var aSen = ActivityDefinitionResource.file("fhir/ActivityDefinition/num-codex-data-send.xml");
var aTra = ActivityDefinitionResource.file("fhir/ActivityDefinition/num-codex-data-translate.xml");
var aRec = ActivityDefinitionResource.file("fhir/ActivityDefinition/num-codex-data-receive.xml");
var cD = CodeSystemResource.file("fhir/CodeSystem/num-codex-data-transfer.xml");
var nD = NamingSystemResource.file("fhir/NamingSystem/num-codex-dic-pseudonym-identifier.xml");
var nC = NamingSystemResource.file("fhir/NamingSystem/num-codex-crr-pseudonym-identifier.xml");
var nB = NamingSystemResource.file("fhir/NamingSystem/num-codex-bloom-filter-identifier.xml");
var sTstaDtri = StructureDefinitionResource
.file("fhir/StructureDefinition/num-codex-task-start-data-trigger.xml");
var sTstoDtri = StructureDefinitionResource
.file("fhir/StructureDefinition/num-codex-task-stop-data-trigger.xml");
var sTstaDsen = StructureDefinitionResource.file("fhir/StructureDefinition/num-codex-task-start-data-send.xml");
var sTstaDtra = StructureDefinitionResource
.file("fhir/StructureDefinition/num-codex-task-start-data-translate.xml");
var sTstaDrec = StructureDefinitionResource
.file("fhir/StructureDefinition/num-codex-task-start-data-receive.xml");
var vD = ValueSetResource.file("fhir/ValueSet/num-codex-data-transfer.xml");
Map<String, List<AbstractResource>> resourcesByProcessKeyAndVersion = Map.of( //
"wwwnetzwerk-universitaetsmedizinde_dataTrigger/" + VERSION,
Arrays.asList(aTri, cD, nD, sTstaDtri, sTstoDtri, vD), //
"wwwnetzwerk-universitaetsmedizinde_dataSend/" + VERSION,
Arrays.asList(aSen, cD, nD, nB, sTstaDsen, vD), //
"wwwnetzwerk-universitaetsmedizinde_dataTranslate/" + VERSION,
Arrays.asList(aTra, cD, nD, nC, sTstaDtra, vD), //
"wwwnetzwerk-universitaetsmedizinde_dataReceive/" + VERSION,
Arrays.asList(aRec, cD, nC, sTstaDrec, vD));
return ResourceProvider.read(VERSION, () -> fhirContext.newXmlParser().setStripVersionsFromReferences(false),
classLoader, propertyResolver, resourcesByProcessKeyAndVersion);
}
}
|
dongy6/type-inference
|
ICC/SOOT-Nightly/soot-github/src/soot/jimple/spark/pag/PAG.java
|
/* Soot - a J*va Optimization Framework
* Copyright (C) 2002 <NAME>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
package soot.jimple.spark.pag;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import soot.Context;
import soot.FastHierarchy;
import soot.G;
import soot.Kind;
import soot.Local;
import soot.PointsToAnalysis;
import soot.PointsToSet;
import soot.RefLikeType;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootField;
import soot.SootMethod;
import soot.Type;
import soot.Value;
import soot.jimple.AssignStmt;
import soot.jimple.ClassConstant;
import soot.jimple.InstanceInvokeExpr;
import soot.jimple.InvokeExpr;
import soot.jimple.NullConstant;
import soot.jimple.Stmt;
import soot.jimple.spark.builder.GlobalNodeFactory;
import soot.jimple.spark.builder.MethodNodeFactory;
import soot.jimple.spark.internal.TypeManager;
import soot.jimple.spark.sets.BitPointsToSet;
import soot.jimple.spark.sets.DoublePointsToSet;
import soot.jimple.spark.sets.EmptyPointsToSet;
import soot.jimple.spark.sets.HashPointsToSet;
import soot.jimple.spark.sets.HybridPointsToSet;
import soot.jimple.spark.sets.P2SetFactory;
import soot.jimple.spark.sets.P2SetVisitor;
import soot.jimple.spark.sets.PointsToSetInternal;
import soot.jimple.spark.sets.SharedHybridSet;
import soot.jimple.spark.sets.SharedListSet;
import soot.jimple.spark.sets.SortedArraySet;
import soot.jimple.spark.solver.OnFlyCallGraph;
import soot.jimple.toolkits.callgraph.Edge;
import soot.jimple.toolkits.pointer.util.NativeMethodDriver;
import soot.options.SparkOptions;
import soot.tagkit.LinkTag;
import soot.tagkit.StringTag;
import soot.tagkit.Tag;
import soot.toolkits.scalar.Pair;
import soot.util.ArrayNumberer;
import soot.util.HashMultiMap;
import soot.util.LargeNumberedMap;
import soot.util.queue.ChunkedQueue;
import soot.util.queue.QueueReader;
/** Pointer assignment graph.
* @author <NAME>
*/
public class PAG implements PointsToAnalysis {
public PAG( final SparkOptions opts ) {
this.opts = opts;
if( opts.add_tags() ) {
nodeToTag = new HashMap<Node, Tag>();
}
typeManager = new TypeManager(this);
if( !opts.ignore_types() ) {
typeManager.setFastHierarchy( Scene.v().getOrMakeFastHierarchy() );
}
switch( opts.set_impl() ) {
case SparkOptions.set_impl_hash:
setFactory = HashPointsToSet.getFactory();
break;
case SparkOptions.set_impl_hybrid:
setFactory = HybridPointsToSet.getFactory();
break;
case SparkOptions.set_impl_heintze:
setFactory = SharedHybridSet.getFactory();
break;
case SparkOptions.set_impl_sharedlist:
setFactory = SharedListSet.getFactory();
break;
case SparkOptions.set_impl_array:
setFactory = SortedArraySet.getFactory();
break;
case SparkOptions.set_impl_bit:
setFactory = BitPointsToSet.getFactory();
break;
case SparkOptions.set_impl_double:
P2SetFactory oldF;
P2SetFactory newF;
switch( opts.double_set_old() ) {
case SparkOptions.double_set_old_hash:
oldF = HashPointsToSet.getFactory();
break;
case SparkOptions.double_set_old_hybrid:
oldF = HybridPointsToSet.getFactory();
break;
case SparkOptions.double_set_old_heintze:
oldF = SharedHybridSet.getFactory();
break;
case SparkOptions.double_set_old_sharedlist:
oldF = SharedListSet.getFactory();
break;
case SparkOptions.double_set_old_array:
oldF = SortedArraySet.getFactory();
break;
case SparkOptions.double_set_old_bit:
oldF = BitPointsToSet.getFactory();
break;
default:
throw new RuntimeException();
}
switch( opts.double_set_new() ) {
case SparkOptions.double_set_new_hash:
newF = HashPointsToSet.getFactory();
break;
case SparkOptions.double_set_new_hybrid:
newF = HybridPointsToSet.getFactory();
break;
case SparkOptions.double_set_new_heintze:
newF = SharedHybridSet.getFactory();
break;
case SparkOptions.double_set_new_sharedlist:
newF = SharedListSet.getFactory();
break;
case SparkOptions.double_set_new_array:
newF = SortedArraySet.getFactory();
break;
case SparkOptions.double_set_new_bit:
newF = BitPointsToSet.getFactory();
break;
default:
throw new RuntimeException();
}
setFactory = DoublePointsToSet.getFactory( newF, oldF );
break;
default:
throw new RuntimeException();
}
runGeomPTA = opts.geom_pta();
}
/** Returns the set of objects pointed to by variable l. */
public PointsToSet reachingObjects( Local l ) {
VarNode n = findLocalVarNode( l );
if( n == null ) {
return EmptyPointsToSet.v();
}
return n.getP2Set();
}
/** Returns the set of objects pointed to by variable l in context c. */
public PointsToSet reachingObjects( Context c, Local l ) {
VarNode n = findContextVarNode( l, c );
if( n == null ) {
return EmptyPointsToSet.v();
}
return n.getP2Set();
}
/** Returns the set of objects pointed to by static field f. */
public PointsToSet reachingObjects( SootField f ) {
if( !f.isStatic() )
throw new RuntimeException( "The parameter f must be a *static* field." );
VarNode n = findGlobalVarNode( f );
if( n == null ) {
return EmptyPointsToSet.v();
}
return n.getP2Set();
}
/** Returns the set of objects pointed to by instance field f
* of the objects in the PointsToSet s. */
public PointsToSet reachingObjects( PointsToSet s, final SootField f ) {
if( f.isStatic() )
throw new RuntimeException( "The parameter f must be an *instance* field." );
return reachingObjectsInternal( s, f );
}
/** Returns the set of objects pointed to by elements of the arrays
* in the PointsToSet s. */
public PointsToSet reachingObjectsOfArrayElement( PointsToSet s ) {
return reachingObjectsInternal( s, ArrayElement.v() );
}
private PointsToSet reachingObjectsInternal( PointsToSet s, final SparkField f ) {
if( getOpts().field_based() || getOpts().vta() ) {
VarNode n = findGlobalVarNode( f );
if( n == null ) {
return EmptyPointsToSet.v();
}
return n.getP2Set();
}
if( (getOpts()).propagator() == SparkOptions.propagator_alias ) {
throw new RuntimeException( "The alias edge propagator does not compute points-to information for instance fields! Use a different propagator." );
}
PointsToSetInternal bases = (PointsToSetInternal) s;
final PointsToSetInternal ret = setFactory.newSet(
(f instanceof SootField) ? ((SootField)f).getType() : null, this );
bases.forall( new P2SetVisitor() {
public final void visit( Node n ) {
Node nDotF = ((AllocNode) n).dot( f );
if(nDotF != null) ret.addAll( nDotF.getP2Set(), null );
}} );
return ret;
}
public P2SetFactory getSetFactory() {
return setFactory;
}
public void cleanUpMerges() {
if( opts.verbose() ) {
G.v().out.println( "Cleaning up graph for merged nodes" );
}
Map[] maps = { simple, alloc, store, load,
simpleInv, allocInv, storeInv, loadInv };
for (Map<Object, Object> m : maps) {
for (Object object : m.keySet()) {
lookup( m, object );
}
}
somethingMerged = false;
if( opts.verbose() ) {
G.v().out.println( "Done cleaning up graph for merged nodes" );
}
}
public boolean doAddSimpleEdge( VarNode from, VarNode to ) {
return addToMap( simple, from, to ) | addToMap( simpleInv, to, from );
}
public boolean doAddStoreEdge( VarNode from, FieldRefNode to ) {
return addToMap( store, from, to ) | addToMap( storeInv, to, from );
}
public boolean doAddLoadEdge( FieldRefNode from, VarNode to ) {
return addToMap( load, from, to ) | addToMap( loadInv, to, from );
}
public boolean doAddAllocEdge( AllocNode from, VarNode to ) {
return addToMap( alloc, from, to ) | addToMap( allocInv, to, from );
}
/** Node uses this to notify PAG that n2 has been merged into n1. */
void mergedWith( Node n1, Node n2 ) {
if( n1.equals( n2 ) ) throw new RuntimeException( "oops" );
somethingMerged = true;
if( ofcg() != null ) ofcg().mergedWith( n1, n2 );
Map[] maps = { simple, alloc, store, load,
simpleInv, allocInv, storeInv, loadInv };
for (Map<Node, Object> m : maps) {
if( !m.keySet().contains( n2 ) ) continue;
Object[] os = { m.get( n1 ), m.get( n2 ) };
int size1 = getSize(os[0]); int size2 = getSize(os[1]);
if( size1 == 0 ) {
if( os[1] != null ) m.put( n1, os[1] );
} else if( size2 == 0 ) {
// nothing needed
} else if( os[0] instanceof HashSet ) {
if( os[1] instanceof HashSet ) {
((HashSet) os[0]).addAll( (HashSet) os[1] );
} else {
Node[] ar = (Node[]) os[1];
for (Node element0 : ar) {
( (HashSet<Node>) os[0] ).add( element0 );
}
}
} else if( os[1] instanceof HashSet ) {
Node[] ar = (Node[]) os[0];
for (Node element0 : ar) {
((HashSet<Node>) os[1]).add( element0 );
}
m.put( n1, os[1] );
} else if( size1*size2 < 1000 ) {
Node[] a1 = (Node[]) os[0];
Node[] a2 = (Node[]) os[1];
Node[] ret = new Node[size1+size2];
System.arraycopy( a1, 0, ret, 0, a1.length );
int j = a1.length;
outer: for (Node rep : a2) {
for( int k = 0; k < j; k++ )
if( rep == ret[k] ) continue outer;
ret[j++] = rep;
}
Node[] newArray = new Node[j];
System.arraycopy( ret, 0, newArray, 0, j );
m.put( n1, ret = newArray );
} else {
HashSet<Node> s = new HashSet<Node>( size1+size2 );
for (Object o : os) {
if( o == null ) continue;
if( o instanceof Set ) {
s.addAll( (Set) o );
} else {
Node[] ar = (Node[]) o;
for (Node element1 : ar) {
s.add( element1 );
}
}
}
m.put( n1, s );
}
m.remove( n2 );
}
}
protected final static Node[] EMPTY_NODE_ARRAY = new Node[0];
protected Node[] lookup( Map<Object, Object> m, Object key ) {
Object valueList = m.get( key );
if( valueList == null ) {
return EMPTY_NODE_ARRAY;
}
if( valueList instanceof Set ) {
try {
m.put( key, valueList =
( (Set) valueList ).toArray( EMPTY_NODE_ARRAY ) );
} catch( Exception e ) {
for( Iterator it = ((Set)valueList).iterator(); it.hasNext(); ) {
G.v().out.println( ""+it.next() );
}
throw new RuntimeException( ""+valueList+e );
}
}
Node[] ret = (Node[]) valueList;
if( somethingMerged ) {
for( int i = 0; i < ret.length; i++ ) {
Node reti = ret[i];
Node rep = reti.getReplacement();
if( rep != reti || rep == key ) {
Set<Node> s;
if( ret.length <= 75 ) {
int j = i;
outer: for( ; i < ret.length; i++ ) {
reti = ret[i];
rep = reti.getReplacement();
if( rep == key ) continue;
for( int k = 0; k < j; k++ )
if( rep == ret[k] ) continue outer;
ret[j++] = rep;
}
Node[] newArray = new Node[j];
System.arraycopy( ret, 0, newArray, 0, j );
m.put( key, ret = newArray );
} else {
s = new HashSet<Node>( ret.length * 2 );
for( int j = 0; j < i; j++ ) s.add( ret[j] );
for( int j = i; j < ret.length; j++ ) {
rep = ret[j].getReplacement();
if( rep != key ) {
s.add( rep );
}
}
m.put( key, ret = s.toArray( EMPTY_NODE_ARRAY ) );
}
break;
}
}
}
return ret;
}
public Node[] simpleLookup( VarNode key )
{ return lookup( simple, key ); }
public Node[] simpleInvLookup( VarNode key )
{ return lookup( simpleInv, key ); }
public Node[] loadLookup( FieldRefNode key )
{ return lookup( load, key ); }
public Node[] loadInvLookup( VarNode key )
{ return lookup( loadInv, key ); }
public Node[] storeLookup( VarNode key )
{ return lookup( store, key ); }
public Node[] storeInvLookup( FieldRefNode key )
{ return lookup( storeInv, key ); }
public Node[] allocLookup( AllocNode key )
{ return lookup( alloc, key ); }
public Node[] allocInvLookup( VarNode key )
{ return lookup( allocInv, key ); }
public Set<Object> simpleSources() { return simple.keySet(); }
public Set<Object> allocSources() { return alloc.keySet(); }
public Set<Object> storeSources() { return store.keySet(); }
public Set<Object> loadSources() { return load.keySet(); }
public Set<Object> simpleInvSources() { return simpleInv.keySet(); }
public Set<Object> allocInvSources() { return allocInv.keySet(); }
public Set<Object> storeInvSources() { return storeInv.keySet(); }
public Set<Object> loadInvSources() { return loadInv.keySet(); }
public Iterator<Object> simpleSourcesIterator() { return simple.keySet().iterator(); }
public Iterator<Object> allocSourcesIterator() { return alloc.keySet().iterator(); }
public Iterator<Object> storeSourcesIterator() { return store.keySet().iterator(); }
public Iterator<Object> loadSourcesIterator() { return load.keySet().iterator(); }
public Iterator<Object> simpleInvSourcesIterator() { return simpleInv.keySet().iterator(); }
public Iterator<Object> allocInvSourcesIterator() { return allocInv.keySet().iterator(); }
public Iterator<Object> storeInvSourcesIterator() { return storeInv.keySet().iterator(); }
public Iterator<Object> loadInvSourcesIterator() { return loadInv.keySet().iterator(); }
static private int getSize( Object set ) {
if( set instanceof Set ) return ((Set) set).size();
else if( set == null ) return 0;
else return ((Object[]) set).length;
}
protected P2SetFactory setFactory;
protected boolean somethingMerged = false;
/** Returns the set of objects pointed to by instance field f
* of the objects pointed to by l. */
public PointsToSet reachingObjects( Local l, SootField f ) {
return reachingObjects( reachingObjects(l), f );
}
/** Returns the set of objects pointed to by instance field f
* of the objects pointed to by l in context c. */
public PointsToSet reachingObjects( Context c, Local l, SootField f ) {
return reachingObjects( reachingObjects(c, l), f );
}
private void addNodeTag( Node node, SootMethod m ) {
if( nodeToTag != null ) {
Tag tag;
if( m == null ) {
tag = new StringTag( node.toString() );
} else {
tag = new LinkTag( node.toString(), m, m.getDeclaringClass().getName() );
}
nodeToTag.put( node, tag );
}
}
public AllocNode makeAllocNode( Object newExpr, Type type, SootMethod m ) {
if( opts.types_for_sites() || opts.vta() ) newExpr = type;
AllocNode ret = valToAllocNode.get( newExpr );
if( ret == null ) {
valToAllocNode.put( newExpr, ret = new AllocNode( this, newExpr, type, m ) );
newAllocNodes.add( ret );
addNodeTag( ret, m );
} else if( !( ret.getType().equals( type ) ) ) {
throw new RuntimeException( "NewExpr "+newExpr+" of type "+type+
" previously had type "+ret.getType() );
}
return ret;
}
public AllocNode makeStringConstantNode( String s ) {
if( opts.types_for_sites() || opts.vta() )
return makeAllocNode( RefType.v( "java.lang.String" ),
RefType.v( "java.lang.String" ), null );
StringConstantNode ret = (StringConstantNode) valToAllocNode.get( s );
if( ret == null ) {
valToAllocNode.put( s, ret = new StringConstantNode( this, s ) );
newAllocNodes.add( ret );
addNodeTag( ret, null );
}
return ret;
}
public AllocNode makeClassConstantNode( ClassConstant cc ) {
if( opts.types_for_sites() || opts.vta() )
return makeAllocNode( RefType.v( "java.lang.Class" ),
RefType.v( "java.lang.Class" ), null );
ClassConstantNode ret = (ClassConstantNode) valToAllocNode.get(cc);
if( ret == null ) {
valToAllocNode.put(cc, ret = new ClassConstantNode(this, cc));
newAllocNodes.add( ret );
addNodeTag( ret, null );
}
return ret;
}
ChunkedQueue newAllocNodes = new ChunkedQueue();
public QueueReader allocNodeListener() { return newAllocNodes.reader(); }
/** Finds the GlobalVarNode for the variable value, or returns null. */
public GlobalVarNode findGlobalVarNode( Object value ) {
if( opts.rta() ) {
value = null;
}
return valToGlobalVarNode.get( value );
}
/** Finds the LocalVarNode for the variable value, or returns null. */
public LocalVarNode findLocalVarNode( Object value ) {
if( opts.rta() ) {
value = null;
} else if( value instanceof Local ) {
return (LocalVarNode) localToNodeMap.get( (Local) value );
}
return valToLocalVarNode.get( value );
}
/** Finds or creates the GlobalVarNode for the variable value, of type type. */
public GlobalVarNode makeGlobalVarNode( Object value, Type type ) {
if( opts.rta() ) {
value = null;
type = RefType.v("java.lang.Object");
}
GlobalVarNode ret = valToGlobalVarNode.get( value );
if( ret == null ) {
valToGlobalVarNode.put( value,
ret = new GlobalVarNode( this, value, type ) );
addNodeTag( ret, null );
} else if( !( ret.getType().equals( type ) ) ) {
throw new RuntimeException( "Value "+value+" of type "+type+
" previously had type "+ret.getType() );
}
return ret;
}
/** Finds or creates the LocalVarNode for the variable value, of type type. */
public LocalVarNode makeLocalVarNode( Object value, Type type, SootMethod method ) {
if( opts.rta() ) {
value = null;
type = RefType.v("java.lang.Object");
method = null;
} else if( value instanceof Local ) {
Local val = (Local) value;
if( val.getNumber() == 0 ) Scene.v().getLocalNumberer().add(val);
LocalVarNode ret = (LocalVarNode) localToNodeMap.get( val );
if( ret == null ) {
localToNodeMap.put( (Local) value,
ret = new LocalVarNode( this, value, type, method ) );
addNodeTag( ret, method );
} else if( !( ret.getType().equals( type ) ) ) {
throw new RuntimeException( "Value "+value+" of type "+type+
" previously had type "+ret.getType() );
}
return ret;
}
LocalVarNode ret = valToLocalVarNode.get( value );
if( ret == null ) {
valToLocalVarNode.put( value,
ret = new LocalVarNode( this, value, type, method ) );
addNodeTag( ret, method );
} else if( !( ret.getType().equals( type ) ) ) {
throw new RuntimeException( "Value "+value+" of type "+type+
" previously had type "+ret.getType() );
}
return ret;
}
/** Finds the ContextVarNode for base variable value and context
* context, or returns null. */
public ContextVarNode findContextVarNode( Object baseValue, Context context ) {
LocalVarNode base = findLocalVarNode( baseValue );
if( base == null ) return null;
return base.context( context );
}
/** Finds or creates the ContextVarNode for base variable baseValue and context
* context, of type type. */
public ContextVarNode makeContextVarNode( Object baseValue, Type baseType,
Context context, SootMethod method ) {
LocalVarNode base = makeLocalVarNode( baseValue, baseType, method );
return makeContextVarNode( base, context );
}
/** Finds or creates the ContextVarNode for base variable base and context
* context, of type type. */
public ContextVarNode makeContextVarNode( LocalVarNode base, Context context ) {
ContextVarNode ret = base.context( context );
if( ret == null ) {
ret = new ContextVarNode( this, base, context );
addNodeTag( ret, base.getMethod() );
}
return ret;
}
/** Finds the FieldRefNode for base variable value and field
* field, or returns null. */
public FieldRefNode findLocalFieldRefNode( Object baseValue, SparkField field ) {
VarNode base = findLocalVarNode( baseValue );
if( base == null ) return null;
return base.dot( field );
}
/** Finds the FieldRefNode for base variable value and field
* field, or returns null. */
public FieldRefNode findGlobalFieldRefNode( Object baseValue, SparkField field ) {
VarNode base = findGlobalVarNode( baseValue );
if( base == null ) return null;
return base.dot( field );
}
/** Finds or creates the FieldRefNode for base variable baseValue and field
* field, of type type. */
public FieldRefNode makeLocalFieldRefNode( Object baseValue, Type baseType,
SparkField field, SootMethod method ) {
VarNode base = makeLocalVarNode( baseValue, baseType, method );
return makeFieldRefNode( base, field );
}
/** Finds or creates the FieldRefNode for base variable baseValue and field
* field, of type type. */
public FieldRefNode makeGlobalFieldRefNode( Object baseValue, Type baseType,
SparkField field ) {
VarNode base = makeGlobalVarNode( baseValue, baseType );
return makeFieldRefNode( base, field );
}
/** Finds or creates the FieldRefNode for base variable base and field
* field, of type type. */
public FieldRefNode makeFieldRefNode( VarNode base, SparkField field ) {
FieldRefNode ret = base.dot( field );
if( ret == null ) {
ret = new FieldRefNode( this, base, field );
if( base instanceof LocalVarNode ) {
addNodeTag( ret, ((LocalVarNode) base).getMethod() );
} else {
addNodeTag( ret, null );
}
}
return ret;
}
/** Finds the AllocDotField for base AllocNode an and field
* field, or returns null. */
public AllocDotField findAllocDotField( AllocNode an, SparkField field ) {
return an.dot( field );
}
/** Finds or creates the AllocDotField for base variable baseValue and field
* field, of type t. */
public AllocDotField makeAllocDotField( AllocNode an, SparkField field ) {
AllocDotField ret = an.dot( field );
if( ret == null ) {
ret = new AllocDotField( this, an, field );
}
return ret;
}
public boolean addSimpleEdge( VarNode from, VarNode to ) {
boolean ret = false;
if( doAddSimpleEdge( from, to ) ) {
edgeQueue.add( from );
edgeQueue.add( to );
ret = true;
}
if( opts.simple_edges_bidirectional() ) {
if( doAddSimpleEdge( to, from ) ) {
edgeQueue.add( to );
edgeQueue.add( from );
ret = true;
}
}
return ret;
}
public boolean addStoreEdge( VarNode from, FieldRefNode to ) {
if( !opts.rta() ) {
if( doAddStoreEdge( from, to ) ) {
edgeQueue.add( from );
edgeQueue.add( to );
return true;
}
}
return false;
}
public boolean addLoadEdge( FieldRefNode from, VarNode to ) {
if( !opts.rta() ) {
if( doAddLoadEdge( from, to ) ) {
edgeQueue.add( from );
edgeQueue.add( to );
return true;
}
}
return false;
}
public boolean addAllocEdge( AllocNode from, VarNode to ) {
FastHierarchy fh = typeManager.getFastHierarchy();
if( fh == null || to.getType() == null
|| fh.canStoreType( from.getType(), to.getType() ) ) {
if( doAddAllocEdge( from, to ) ) {
edgeQueue.add( from );
edgeQueue.add( to );
return true;
}
}
return false;
}
/** Adds an edge to the graph, returning false if it was already there. */
public final boolean addEdge( Node from, Node to ) {
from = from.getReplacement();
to = to.getReplacement();
if( from instanceof VarNode ) {
if( to instanceof VarNode ) {
return addSimpleEdge( (VarNode) from, (VarNode) to );
} else {
return addStoreEdge( (VarNode) from, (FieldRefNode) to );
}
} else if( from instanceof FieldRefNode ) {
return addLoadEdge( (FieldRefNode) from, (VarNode) to );
} else {
return addAllocEdge( (AllocNode) from, (VarNode) to );
}
}
protected ChunkedQueue edgeQueue = new ChunkedQueue();
public QueueReader edgeReader() { return edgeQueue.reader(); }
public int getNumAllocNodes() {
return allocNodeNumberer.size();
}
public TypeManager getTypeManager() {
return typeManager;
}
public void setOnFlyCallGraph( OnFlyCallGraph ofcg ) { this.ofcg = ofcg; }
public OnFlyCallGraph getOnFlyCallGraph() { return ofcg; }
public OnFlyCallGraph ofcg() { return ofcg; }
/** Adds the base of a dereference to the list of dereferenced
* variables. */
public void addDereference( VarNode base ) {
dereferences.add( base );
}
/** Returns list of dereferences variables. */
public List<VarNode> getDereferences() {
return dereferences;
}
public Map<Node, Tag> getNodeTags() {
return nodeToTag;
}
private final ArrayNumberer allocNodeNumberer = new ArrayNumberer();
public ArrayNumberer getAllocNodeNumberer() { return allocNodeNumberer; }
private final ArrayNumberer varNodeNumberer = new ArrayNumberer();
public ArrayNumberer getVarNodeNumberer() { return varNodeNumberer; }
private final ArrayNumberer fieldRefNodeNumberer = new ArrayNumberer();
public ArrayNumberer getFieldRefNodeNumberer() { return fieldRefNodeNumberer; }
private final ArrayNumberer allocDotFieldNodeNumberer = new ArrayNumberer();
public ArrayNumberer getAllocDotFieldNodeNumberer() { return allocDotFieldNodeNumberer; }
/** Returns SparkOptions for this graph. */
public SparkOptions getOpts() { return opts; }
// Must be simple edges
public Pair<Node, Node> addInterproceduralAssignment(Node from, Node to, Edge e)
{
Set<Edge> sets;
Pair<Node, Node> val = new Pair<Node, Node>(from, to);
if ( runGeomPTA ) {
sets = assign2edges.get(val);
if ( sets == null ) {
sets = new HashSet<Edge>();
assign2edges.put(val, sets);
}
sets.add(e);
}
return val;
}
public Set<Edge> lookupEdgesForAssignment(Pair<Node, Node> val)
{
return assign2edges.get(val);
}
final public void addCallTarget( Edge e ) {
if( !e.passesParameters() ) return;
MethodPAG srcmpag = MethodPAG.v( this, e.src() );
MethodPAG tgtmpag = MethodPAG.v( this, e.tgt() );
Pair<Node, Node> pval;
if( e.isExplicit() || e.kind() == Kind.THREAD || e.kind() == Kind.ASYNCTASK ) {
addCallTarget( srcmpag, tgtmpag, (Stmt) e.srcUnit(),
e.srcCtxt(), e.tgtCtxt(), e );
} else {
if( e.kind() == Kind.PRIVILEGED ) {
// Flow from first parameter of doPrivileged() invocation
// to this of target, and from return of target to the
// return of doPrivileged()
InvokeExpr ie = e.srcStmt().getInvokeExpr();
Node parm = srcmpag.nodeFactory().getNode( ie.getArg(0) );
parm = srcmpag.parameterize( parm, e.srcCtxt() );
parm = parm.getReplacement();
Node thiz = tgtmpag.nodeFactory().caseThis();
thiz = tgtmpag.parameterize( thiz, e.tgtCtxt() );
thiz = thiz.getReplacement();
addEdge( parm, thiz );
pval = addInterproceduralAssignment(parm, thiz, e);
callAssigns.put(ie, pval);
callToMethod.put(ie, srcmpag.getMethod());
if( e.srcUnit() instanceof AssignStmt ) {
AssignStmt as = (AssignStmt) e.srcUnit();
Node ret = tgtmpag.nodeFactory().caseRet();
ret = tgtmpag.parameterize( ret, e.tgtCtxt() );
ret = ret.getReplacement();
Node lhs = srcmpag.nodeFactory().getNode(as.getLeftOp());
lhs = srcmpag.parameterize( lhs, e.srcCtxt() );
lhs = lhs.getReplacement();
addEdge( ret, lhs );
pval = addInterproceduralAssignment(ret, lhs, e);
callAssigns.put(ie, pval);
callToMethod.put(ie, srcmpag.getMethod());
}
} else if( e.kind() == Kind.FINALIZE ) {
Node srcThis = srcmpag.nodeFactory().caseThis();
srcThis = srcmpag.parameterize( srcThis, e.srcCtxt() );
srcThis = srcThis.getReplacement();
Node tgtThis = tgtmpag.nodeFactory().caseThis();
tgtThis = tgtmpag.parameterize( tgtThis, e.tgtCtxt() );
tgtThis = tgtThis.getReplacement();
addEdge( srcThis, tgtThis );
pval = addInterproceduralAssignment(srcThis, tgtThis, e);
} else if( e.kind() == Kind.NEWINSTANCE ) {
Stmt s = (Stmt) e.srcUnit();
InstanceInvokeExpr iie = (InstanceInvokeExpr) s.getInvokeExpr();
Node cls = srcmpag.nodeFactory().getNode( iie.getBase() );
cls = srcmpag.parameterize( cls, e.srcCtxt() );
cls = cls.getReplacement();
Node newObject = nodeFactory.caseNewInstance( (VarNode) cls );
Node initThis = tgtmpag.nodeFactory().caseThis();
initThis = tgtmpag.parameterize( initThis, e.tgtCtxt() );
initThis = initThis.getReplacement();
addEdge( newObject, initThis );
if (s instanceof AssignStmt) {
AssignStmt as = (AssignStmt)s;
Node asLHS = srcmpag.nodeFactory().getNode(as.getLeftOp());
asLHS = srcmpag.parameterize( asLHS, e.srcCtxt());
asLHS = asLHS.getReplacement();
addEdge( newObject, asLHS);
}
pval = addInterproceduralAssignment(newObject, initThis, e);
callAssigns.put(s.getInvokeExpr(), pval);
callToMethod.put(s.getInvokeExpr(), srcmpag.getMethod());
} else if( e.kind() == Kind.REFL_INVOKE ) {
// Flow (1) from first parameter of invoke(..) invocation
// to this of target, (2) from the contents of the second (array) parameter
// to all parameters of the target, and (3) from return of target to the
// return of invoke(..)
//(1)
InvokeExpr ie = e.srcStmt().getInvokeExpr();
Value arg0 = ie.getArg(0);
//if "null" is passed in, omit the edge
if(arg0!=NullConstant.v()) {
Node parm0 = srcmpag.nodeFactory().getNode( arg0 );
parm0 = srcmpag.parameterize( parm0, e.srcCtxt() );
parm0 = parm0.getReplacement();
Node thiz = tgtmpag.nodeFactory().caseThis();
thiz = tgtmpag.parameterize( thiz, e.tgtCtxt() );
thiz = thiz.getReplacement();
addEdge( parm0, thiz );
pval = addInterproceduralAssignment(parm0, thiz, e);
callAssigns.put(ie, pval);
callToMethod.put(ie, srcmpag.getMethod());
}
//(2)
Value arg1 = ie.getArg(1);
SootMethod tgt = e.getTgt().method();
//if "null" is passed in, or target has no parameters, omit the edge
if(arg1!=NullConstant.v() && tgt.getParameterCount()>0) {
Node parm1 = srcmpag.nodeFactory().getNode( arg1 );
parm1 = srcmpag.parameterize( parm1, e.srcCtxt() );
parm1 = parm1.getReplacement();
FieldRefNode parm1contents = makeFieldRefNode( (VarNode) parm1, ArrayElement.v() );
for(int i=0;i<tgt.getParameterCount(); i++) {
//if no reference type, create no edge
if(!(tgt.getParameterType(i) instanceof RefLikeType)) continue;
Node tgtParmI = tgtmpag.nodeFactory().caseParm( i );
tgtParmI = tgtmpag.parameterize( tgtParmI, e.tgtCtxt() );
tgtParmI = tgtParmI.getReplacement();
addEdge( parm1contents, tgtParmI );
pval = addInterproceduralAssignment(parm1contents, tgtParmI, e);
callAssigns.put(ie, pval);
}
}
//(3)
//only create return edge if we are actually assigning the return value and
//the return type of the callee is actually a reference type
if( e.srcUnit() instanceof AssignStmt && (tgt.getReturnType() instanceof RefLikeType)) {
AssignStmt as = (AssignStmt) e.srcUnit();
Node ret = tgtmpag.nodeFactory().caseRet();
ret = tgtmpag.parameterize( ret, e.tgtCtxt() );
ret = ret.getReplacement();
Node lhs = srcmpag.nodeFactory().getNode(as.getLeftOp());
lhs = srcmpag.parameterize( lhs, e.srcCtxt() );
lhs = lhs.getReplacement();
addEdge( ret, lhs );
pval = addInterproceduralAssignment(ret, lhs, e);
callAssigns.put(ie, pval);
}
} else if( e.kind() == Kind.REFL_CLASS_NEWINSTANCE || e.kind() == Kind.REFL_CONSTR_NEWINSTANCE) {
// (1) create a fresh node for the new object
// (2) create edge from this object to "this" of the constructor
// (3) if this is a call to Constructor.newInstance and not Class.newInstance,
// create edges passing the contents of the arguments array of the call
// to all possible parameters of the target
// (4) if we are inside an assign statement,
// assign the fresh object from (1) to the LHS of the assign statement
Stmt s = (Stmt) e.srcUnit();
InstanceInvokeExpr iie = (InstanceInvokeExpr) s.getInvokeExpr();
//(1)
Node cls = srcmpag.nodeFactory().getNode( iie.getBase() );
cls = srcmpag.parameterize( cls, e.srcCtxt() );
cls = cls.getReplacement();
if( cls instanceof ContextVarNode ) cls = findLocalVarNode( ((VarNode)cls).getVariable() );
VarNode newObject = makeGlobalVarNode( cls, RefType.v( "java.lang.Object" ) );
SootClass tgtClass = e.getTgt().method().getDeclaringClass();
RefType tgtType = tgtClass.getType();
AllocNode site = makeAllocNode( new Pair(cls, tgtClass), tgtType, null );
addEdge( site, newObject );
//(2)
Node initThis = tgtmpag.nodeFactory().caseThis();
initThis = tgtmpag.parameterize( initThis, e.tgtCtxt() );
initThis = initThis.getReplacement();
addEdge( newObject, initThis );
addInterproceduralAssignment(newObject, initThis, e);
//(3)
if(e.kind() == Kind.REFL_CONSTR_NEWINSTANCE) {
Value arg = iie.getArg(0);
SootMethod tgt = e.getTgt().method();
//if "null" is passed in, or target has no parameters, omit the edge
if(arg!=NullConstant.v() && tgt.getParameterCount()>0) {
Node parm0 = srcmpag.nodeFactory().getNode( arg );
parm0 = srcmpag.parameterize( parm0, e.srcCtxt() );
parm0 = parm0.getReplacement();
FieldRefNode parm1contents = makeFieldRefNode( (VarNode) parm0, ArrayElement.v() );
for(int i=0;i<tgt.getParameterCount(); i++) {
//if no reference type, create no edge
if(!(tgt.getParameterType(i) instanceof RefLikeType)) continue;
Node tgtParmI = tgtmpag.nodeFactory().caseParm( i );
tgtParmI = tgtmpag.parameterize( tgtParmI, e.tgtCtxt() );
tgtParmI = tgtParmI.getReplacement();
addEdge( parm1contents, tgtParmI );
pval = addInterproceduralAssignment(parm1contents, tgtParmI, e);
callAssigns.put(iie, pval);
}
}
}
//(4)
if (s instanceof AssignStmt) {
AssignStmt as = (AssignStmt)s;
Node asLHS = srcmpag.nodeFactory().getNode(as.getLeftOp());
asLHS = srcmpag.parameterize( asLHS, e.srcCtxt());
asLHS = asLHS.getReplacement();
addEdge( newObject, asLHS);
}
pval = addInterproceduralAssignment(newObject, initThis, e);
callAssigns.put(s.getInvokeExpr(), pval);
callToMethod.put(s.getInvokeExpr(), srcmpag.getMethod());
} else {
throw new RuntimeException( "Unhandled edge "+e );
}
}
}
/** Adds method target as a possible target of the invoke expression in s.
* If target is null, only creates the nodes for the call site,
* without actually connecting them to any target method.
**/
final public void addCallTarget( MethodPAG srcmpag,
MethodPAG tgtmpag,
Stmt s,
Context srcContext,
Context tgtContext,
Edge e ) {
MethodNodeFactory srcnf = srcmpag.nodeFactory();
MethodNodeFactory tgtnf = tgtmpag.nodeFactory();
InvokeExpr ie = s.getInvokeExpr();
boolean virtualCall = callAssigns.containsKey(ie);
int numArgs = ie.getArgCount();
for( int i = 0; i < numArgs; i++ ) {
Value arg = ie.getArg( i );
if( !( arg.getType() instanceof RefLikeType ) ) continue;
if( arg instanceof NullConstant ) continue;
Node argNode = srcnf.getNode( arg );
argNode = srcmpag.parameterize( argNode, srcContext );
argNode = argNode.getReplacement();
Node parm = tgtnf.caseParm( i );
parm = tgtmpag.parameterize( parm, tgtContext );
parm = parm.getReplacement();
addEdge( argNode, parm );
Pair pval = addInterproceduralAssignment(argNode, parm, e);
callAssigns.put(ie, pval);
callToMethod.put(ie, srcmpag.getMethod());
}
if( ie instanceof InstanceInvokeExpr ) {
InstanceInvokeExpr iie = (InstanceInvokeExpr) ie;
Node baseNode = srcnf.getNode( iie.getBase() );
baseNode = srcmpag.parameterize( baseNode, srcContext );
baseNode = baseNode.getReplacement();
Node thisRef = tgtnf.caseThis();
thisRef = tgtmpag.parameterize( thisRef, tgtContext );
thisRef = thisRef.getReplacement();
addEdge( baseNode, thisRef );
Pair pval = addInterproceduralAssignment(baseNode, thisRef, e);
callAssigns.put(ie, pval);
callToMethod.put(ie, srcmpag.getMethod());
if (virtualCall && !virtualCallsToReceivers.containsKey(ie)) {
virtualCallsToReceivers.put(ie, baseNode);
}
}
if( s instanceof AssignStmt ) {
Value dest = ( (AssignStmt) s ).getLeftOp();
if( dest.getType() instanceof RefLikeType && !(dest instanceof NullConstant) ) {
Node destNode = srcnf.getNode( dest );
destNode = srcmpag.parameterize( destNode, srcContext );
destNode = destNode.getReplacement();
Node retNode = tgtnf.caseRet();
retNode = tgtmpag.parameterize( retNode, tgtContext );
retNode = retNode.getReplacement();
addEdge( retNode, destNode );
Pair pval = addInterproceduralAssignment( retNode, destNode, e );
callAssigns.put(ie, pval);
callToMethod.put(ie, srcmpag.getMethod());
}
}
}
/**
* Delete all the assignment edges.
*/
public void cleanPAG()
{
simple.clear();
load.clear();
store.clear();
alloc.clear();
simpleInv.clear();
loadInv.clear();
storeInv.clear();
allocInv.clear();
}
/* End of package methods. */
protected SparkOptions opts;
protected Map<Object, Object> simple = new HashMap<Object, Object>();
protected Map<Object, Object> load = new HashMap<Object, Object>();
protected Map<Object, Object> store = new HashMap<Object, Object>();
protected Map<Object, Object> alloc = new HashMap<Object, Object>();
protected Map<Object, Object> simpleInv = new HashMap<Object, Object>();
protected Map<Object, Object> loadInv = new HashMap<Object, Object>();
protected Map<Object, Object> storeInv = new HashMap<Object, Object>();
protected Map<Object, Object> allocInv = new HashMap<Object, Object>();
protected boolean addToMap( Map<Object, Object> m, Node key, Node value ) {
Object valueList = m.get( key );
if( valueList == null ) {
m.put( key, valueList = new HashSet(4) );
} else if( !(valueList instanceof Set) ) {
Node[] ar = (Node[]) valueList;
HashSet<Node> vl = new HashSet<Node>(ar.length+4);
m.put( key, vl );
for (Node element : ar)
vl.add( element );
return vl.add( value );
/*
Node[] ar = (Node[]) valueList;
Node[] newar = new Node[ar.length+1];
for( int i = 0; i < ar.length; i++ ) {
Node n = ar[i];
if( n == value ) return false;
newar[i] = n;
}
newar[ar.length] = value;
m.put( key, newar );
return true;
*/
}
return ((Set<Node>) valueList).add( value );
}
private boolean runGeomPTA = false;
protected Map<Pair, Set<Edge>> assign2edges = new HashMap<Pair, Set<Edge>>();
private final Map<Object, LocalVarNode> valToLocalVarNode = new HashMap<Object, LocalVarNode>(1000);
private final Map<Object, GlobalVarNode> valToGlobalVarNode = new HashMap<Object, GlobalVarNode>(1000);
private final Map<Object, AllocNode> valToAllocNode = new HashMap<Object, AllocNode>(1000);
private OnFlyCallGraph ofcg;
private final ArrayList<VarNode> dereferences = new ArrayList<VarNode>();
protected TypeManager typeManager;
private final LargeNumberedMap localToNodeMap = new LargeNumberedMap( Scene.v().getLocalNumberer() );
public int maxFinishNumber = 0;
private Map<Node, Tag> nodeToTag;
private final GlobalNodeFactory nodeFactory = new GlobalNodeFactory(this);
public GlobalNodeFactory nodeFactory() { return nodeFactory; }
public NativeMethodDriver nativeMethodDriver;
public HashMultiMap /* InvokeExpr -> Set[Pair] */ callAssigns = new HashMultiMap();
public Map<InvokeExpr, SootMethod> callToMethod = new HashMap<InvokeExpr, SootMethod>();
public Map<InvokeExpr, Node> virtualCallsToReceivers = new HashMap<InvokeExpr, Node>();
}
|
shinshin86/django
|
tests/template_tests/filter_tests/test_truncatewords.py
|
<gh_stars>1000+
from django.template.defaultfilters import truncatewords
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class TruncatewordsTests(SimpleTestCase):
@setup({
'truncatewords01': '{% autoescape off %}{{ a|truncatewords:"2" }} {{ b|truncatewords:"2"}}{% endautoescape %}'
})
def test_truncatewords01(self):
output = self.engine.render_to_string(
'truncatewords01', {'a': 'alpha & bravo', 'b': mark_safe('alpha & bravo')}
)
self.assertEqual(output, 'alpha & ... alpha & ...')
@setup({'truncatewords02': '{{ a|truncatewords:"2" }} {{ b|truncatewords:"2"}}'})
def test_truncatewords02(self):
output = self.engine.render_to_string(
'truncatewords02', {'a': 'alpha & bravo', 'b': mark_safe('alpha & bravo')}
)
self.assertEqual(output, 'alpha & ... alpha & ...')
class FunctionTests(SimpleTestCase):
def test_truncate(self):
self.assertEqual(truncatewords('A sentence with a few words in it', 1), 'A ...')
def test_truncate2(self):
self.assertEqual(
truncatewords('A sentence with a few words in it', 5),
'A sentence with a few ...',
)
def test_overtruncate(self):
self.assertEqual(
truncatewords('A sentence with a few words in it', 100),
'A sentence with a few words in it',
)
def test_invalid_number(self):
self.assertEqual(
truncatewords('A sentence with a few words in it', 'not a number'),
'A sentence with a few words in it',
)
def test_non_string_input(self):
self.assertEqual(truncatewords(123, 2), '123')
|
MrJiangZM/mingblog
|
redis/src/main/java/com/ming/blog/mq/event/executor/BaseTimeWorker.java
|
<gh_stars>0
package com.ming.blog.mq.event.executor;
/**
* @author jiangzaiming
*/
public abstract class BaseTimeWorker {
protected void actualWork() {
process();
}
protected abstract void process();
}
|
bcvsolutions/icewarp-connector
|
src/main/java/eu/bcvsolutions/idm/connector/communication/Connection.java
|
<filename>src/main/java/eu/bcvsolutions/idm/connector/communication/Connection.java
package eu.bcvsolutions.idm.connector.communication;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import org.identityconnectors.common.logging.Log;
import org.identityconnectors.common.security.GuardedString;
import org.identityconnectors.framework.common.exceptions.ConnectionFailedException;
import org.identityconnectors.framework.common.exceptions.ConnectorException;
import org.identityconnectors.framework.common.objects.Attribute;
import org.identityconnectors.framework.common.objects.ObjectClass;
import org.identityconnectors.framework.common.objects.Uid;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.http.exceptions.UnirestException;
import eu.bcvsolutions.idm.connector.IceWarpConfiguration;
import eu.bcvsolutions.idm.connector.IceWarpConnector;
import eu.bcvsolutions.idm.connector.entity.Account;
import eu.bcvsolutions.idm.connector.entity.AccountList;
import eu.bcvsolutions.idm.connector.entity.AddAccountMembers;
import eu.bcvsolutions.idm.connector.entity.Authenticate;
import eu.bcvsolutions.idm.connector.entity.CreateAccount;
import eu.bcvsolutions.idm.connector.entity.DeleteAccountMembers;
import eu.bcvsolutions.idm.connector.entity.DeleteAccounts;
import eu.bcvsolutions.idm.connector.entity.Filter;
import eu.bcvsolutions.idm.connector.entity.GetAccountMemberInfoList;
import eu.bcvsolutions.idm.connector.entity.GetAccountMemberInfoListResponse;
import eu.bcvsolutions.idm.connector.entity.GetAccountsInfoList;
import eu.bcvsolutions.idm.connector.entity.GetAccountsInfoListResponse;
import eu.bcvsolutions.idm.connector.entity.Item;
import eu.bcvsolutions.idm.connector.entity.Logout;
import eu.bcvsolutions.idm.connector.entity.MemberItem;
import eu.bcvsolutions.idm.connector.entity.Members;
import eu.bcvsolutions.idm.connector.entity.PropertyName;
import eu.bcvsolutions.idm.connector.entity.PropertyState;
import eu.bcvsolutions.idm.connector.entity.PropertyVal;
import eu.bcvsolutions.idm.connector.entity.SetAccountPassword;
import eu.bcvsolutions.idm.connector.entity.SetAccountProperties;
import eu.bcvsolutions.idm.connector.wrapper.Iq;
import eu.bcvsolutions.idm.connector.wrapper.IqResponse;
import eu.bcvsolutions.idm.connector.wrapper.IqResponseAccountInfoList;
import eu.bcvsolutions.idm.connector.wrapper.IqResponseMemberInfoList;
import eu.bcvsolutions.idm.connector.wrapper.Query;
import eu.bcvsolutions.idm.connector.wrapper.QueryResponse;
import eu.bcvsolutions.idm.connector.wrapper.QueryResponseAccountInfoList;
import eu.bcvsolutions.idm.connector.wrapper.QueryResponseMemberInfoList;
/**
* @author <NAME>
*/
public class Connection {
private static final Log log = Log.getLog(IceWarpConnector.class);
private IceWarpConfiguration configuration;
private String sid = "";
public Connection(IceWarpConfiguration configuration) {
this.configuration = configuration;
}
public void logout() {
Logout logout = new Logout();
QueryResponse queryResponse = new QueryResponse();
IqResponse iqResponse = new IqResponse();
iqResponse.setQueryResponse(queryResponse);
try {
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(logout));
if (response.getStatus() != 200) {
log.error("Can't connect to system, return code " + response.getStatus());
}
iqResponse = (IqResponse) getObject(response.getBody(), new IqResponse());
if (iqResponse.getQueryResponse().getResult().equals("0")) {
log.error("Logout failed");
}
} catch (Exception e) {
e.printStackTrace();
log.error("Logout failed");
}
}
public void authenticate() {
// send request for authentication and set sid to variable for later
Authenticate authenticate = new Authenticate();
authenticate.setEmail(configuration.getUsername());
authenticate.setPassword(getPassword(configuration.getPassword()));
authenticate.setPersistentlogin("1");
authenticate.setAuthtype("0");
QueryResponse queryResponse = new QueryResponse();
IqResponse iqResponse = new IqResponse();
iqResponse.setQueryResponse(queryResponse);
try {
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(authenticate));
if (response.getStatus() != 200) {
throw new ConnectionFailedException("Can't connect to system, return code " + response.getStatus());
}
iqResponse = (IqResponse) getObject(response.getBody(), new IqResponse());
if (iqResponse.getQueryResponse().getResult().equals("0")) {
throw new ConnectionFailedException("Authenticate failed");
} else {
this.sid = iqResponse.getSid();
}
} catch (Exception e) {
e.printStackTrace();
throw new ConnectionFailedException("Authenticate failed");
}
}
public GetAccountsInfoListResponse getAccountsInfoList(Filter filter, int offset) {
GetAccountsInfoList getAccountsInfoList = new GetAccountsInfoList();
getAccountsInfoList.setDomainstr(configuration.getDomain());
getAccountsInfoList.setFilter(filter);
getAccountsInfoList.setCount(String.valueOf(IceWarpConnector.MAX_ROWS));
getAccountsInfoList.setOffset(String.valueOf(offset));
GetAccountsInfoListResponse getAccountsInfoListResponse = new GetAccountsInfoListResponse();
QueryResponseAccountInfoList queryResponse = new QueryResponseAccountInfoList();
queryResponse.setAccountsInfoListResponse(getAccountsInfoListResponse);
IqResponseAccountInfoList iqResponse = new IqResponseAccountInfoList();
iqResponse.setQueryResponse(queryResponse);
try {
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(getAccountsInfoList));
if (response.getStatus() != 200) {
throw new ConnectorException("Can't connect to system, return code " + response.getStatus());
}
iqResponse = (IqResponseAccountInfoList) getObject(response.getBody(), iqResponse);
return iqResponse.getQueryResponse().getAccountsInfoListResponse();
} catch (Exception e) {
e.printStackTrace();
throw new ConnectorException("Cannot get accounts");
}
}
public String createAccount(Set<Attribute> createAttributes) {
Account account = new Account();
// default value = active user and normal user
account.setAccountstate("0");
account.setAdmintype("0");
List<String> groups = new ArrayList<>();
GuardedString password = <PASSWORD>;
for (Attribute attribute : createAttributes) {
if (attribute.getName().equals(IceWarpConnector.NAME)) {
account.setEmail(String.valueOf(attribute.getValue().get(0)));
continue;
}
if (attribute.getName().equals(IceWarpConnector.EMAIL)) {
account.setEmail(String.valueOf(attribute.getValue().get(0)));
continue;
}
if (attribute.getName().equals(IceWarpConnector.FIRST_NAME) && attribute.getValue().get(0) != null) {
if (attribute.getValue() != null && attribute.getValue().get(0) != null) {
account.setFirstName(String.valueOf(attribute.getValue().get(0)));
}
continue;
}
if (attribute.getName().equals(IceWarpConnector.LAST_NAME)) {
if (attribute.getValue() != null && attribute.getValue().get(0) != null) {
account.setLastName(String.valueOf(attribute.getValue().get(0)));
}
continue;
}
if (attribute.getName().equals(IceWarpConnector.ACCOUNT_STATE)) {
if (attribute.getValue() != null && attribute.getValue().get(0) != null) {
account.setAccountstate(String.valueOf(attribute.getValue().get(0)));
}
continue;
}
if (attribute.getName().equals(IceWarpConnector.GROUPS)) {
attribute.getValue().forEach(o -> groups.add(String.valueOf(o)));
continue;
}
if (attribute.getName().equals(IceWarpConnector.PASSWORD)) {
password = (GuardedString) attribute.getValue().get(0);
continue;
}
if (attribute.getName().equals(IceWarpConnector.ADMIN_TYPE)) {
Boolean adminType = (Boolean) attribute.getValue().get(0);
if (adminType != null && adminType) {
account.setAdmintype("1");
} else {
account.setAdmintype("0");
}
}
}
if (configuration.getObject().equals(ObjectClass.ACCOUNT_NAME)) {
account.setAccounttype(IceWarpConnector.USER_TYPE);
} else if (configuration.getObject().equals(ObjectClass.GROUP_NAME)) {
account.setAccounttype(IceWarpConnector.ROLE_TYPE);
}
CreateAccount createAccount = new CreateAccount();
createAccount.setDomainStr(configuration.getDomain());
Item name = new Item("A_Name", new PropertyName(account.getFirstName(), account.getLastName()));
Item type = new Item("U_Type", new PropertyVal("NativeInt", account.getAccounttype()));
Item email = new Item("U_Mailbox", new PropertyVal("TPropertyString", account.getEmail().split("@")[0]));
Item accountState = new Item("A_State", new PropertyState(account.getAccountstate()));
Item adminType = new Item("A_AdminType", new PropertyVal("TPropertyString", account.getAdmintype()));
List<Item> items = Arrays.asList(name, type, email, adminType, accountState);
createAccount.setItems(items);
QueryResponse queryResponse = new QueryResponse();
IqResponse iqResponse = new IqResponse();
iqResponse.setQueryResponse(queryResponse);
int count = 1;
int maxTries = 5;
try {
int configMaxTries = configuration.getMaxTries();
if (configMaxTries > 0 && configMaxTries < 20) {
maxTries = configMaxTries;
}
} catch(NumberFormatException e) {
log.error("Can't parse max tries config input.");
}
while (true) {
try {
if (configuration.getDebug()) {
log.info("REQUEST TRY COUNT: " + count);
}
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(createAccount));
if (response.getStatus() != 200) {
throw new ConnectorException("Can't connect to system, return code " + response.getStatus());
}
iqResponse = (IqResponse) getObject(response.getBody(), iqResponse);
if (iqResponse.getType().equals(IceWarpConnector.RESPONSE_TYPE_RESULT)) {
if (iqResponse.getQueryResponse().getResult().equals("0")) {
if (count++ >= maxTries) {
throw new ConnectorException("Cannot create user " + account.getEmail());
}
} else if (iqResponse.getQueryResponse().getResult().equals("1") ) {
if (!groups.isEmpty()) {
groups.forEach(group -> addMemberToGroup(account.getEmail(), group));
}
setAccountPassword(account.getEmail(), password);
return account.getEmail();
}
} else {
if (count++ >= maxTries) {
throw new ConnectorException("Cannot create user " + account.getEmail());
}
}
} catch (JAXBException e) {
e.printStackTrace();
throw new ConnectorException("Cannot create");
}
}
}
public void deleteAccount(String account) {
AccountList accountList = new AccountList();
accountList.addAccounts(account);
DeleteAccounts deleteAccounts = new DeleteAccounts();
deleteAccounts.setDomainstr(configuration.getDomain());
deleteAccounts.setAccountList(accountList);
deleteAccounts.setLeavedata("0");
QueryResponse queryResponse = new QueryResponse();
IqResponse iqResponse = new IqResponse();
iqResponse.setQueryResponse(queryResponse);
try {
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(deleteAccounts));
if (response.getStatus() != 200) {
throw new ConnectorException("Can't connect to system, return code " + response.getStatus());
}
iqResponse = (IqResponse) getObject(response.getBody(), iqResponse);
if (iqResponse.getQueryResponse().getResult().equals("0")) {
throw new ConnectorException("Cannot delete member " + account);
}
} catch (JAXBException e) {
e.printStackTrace();
throw new ConnectorException("Cannot delete member " + account);
}
}
public String setAccountProperties(Uid uid, Set<Attribute> replaceAttributes, List<String> oldGroups) {
String newUid = uid.getUidValue();
List<String> groups = null;
SetAccountProperties setAccountProperties = new SetAccountProperties();
// identifier which is email must be set
setAccountProperties.setAccountemail(uid.getUidValue());
PropertyName propertyName = new PropertyName();
Item name = new Item();
name.setPropertyname(propertyName);
name.setPropname(Collections.singletonList("A_Name"));
setAccountProperties.addItem(name);
for (Attribute attribute : replaceAttributes) {
if (attribute.getName().equals(IceWarpConnector.FIRST_NAME)) {
propertyName.setFirstname(String.valueOf(attribute.getValue().get(0)));
continue;
}
if (attribute.getName().equals(IceWarpConnector.LAST_NAME)) {
propertyName.setSurname(String.valueOf(attribute.getValue().get(0)));
continue;
}
if (attribute.getName().equals(IceWarpConnector.EMAIL) || attribute.getName().equals(IceWarpConnector.NAME)) {
Item email = new Item("U_Mailbox", new PropertyVal("TPropertyString", String.valueOf(attribute.getValue().get(0))));
setAccountProperties.addItem(email);
newUid = String.valueOf(attribute.getValue().get(0));
continue;
}
if (attribute.getName().equals(IceWarpConnector.ACCOUNT_STATE)) {
Item state = new Item("A_State", new PropertyState(String.valueOf(attribute.getValue().get(0))));
setAccountProperties.addItem(state);
continue;
}
if (attribute.getName().equals(IceWarpConnector.ADMIN_TYPE)) {
Boolean adminTypeValue = (Boolean) attribute.getValue().get(0);
String type;
if (adminTypeValue != null && adminTypeValue) {
type = "1";
} else {
type = "0";
}
Item adminType = new Item("A_AdminType", new PropertyVal("TPropertyString", type));
setAccountProperties.addItem(adminType);
continue;
}
if (attribute.getName().equals(IceWarpConnector.PASSWORD)) {
setAccountPassword(uid.getUidValue(), (GuardedString) attribute.getValue().get(0));
continue;
}
if (attribute.getName().equals(IceWarpConnector.GROUPS)) {
groups = new ArrayList<>();
for (Object o : attribute.getValue()) {
groups.add(String.valueOf(o));
}
}
}
try {
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(setAccountProperties));
if (response.getStatus() != 200) {
throw new ConnectorException("Can't update account properties" + response.getStatus());
}
if (groups != null) {
Set<String> rolesForRemove = getRolesForRemove(oldGroups, groups);
Set<String> rolesForAdd = getRolesForAdd(oldGroups, groups);
if (!rolesForRemove.isEmpty()) {
rolesForRemove.forEach(roleForRemove -> deleteMemberFromGroup(uid.getUidValue(), roleForRemove));
}
if (!rolesForAdd.isEmpty()) {
rolesForAdd.forEach(roleForAdd -> addMemberToGroup(uid.getUidValue(), roleForAdd));
}
}
return newUid;
} catch (JAXBException e) {
e.printStackTrace();
throw new ConnectorException("Can't update account properties " + uid.getUidValue());
}
}
public GetAccountMemberInfoListResponse getGroupMembers(String uid, int offset) {
GetAccountMemberInfoList getAccountMemberInfoList = new GetAccountMemberInfoList();
getAccountMemberInfoList.setGroupUid(uid);
getAccountMemberInfoList.setCount(String.valueOf(IceWarpConnector.MAX_ROWS));
getAccountMemberInfoList.setOffset(String.valueOf(offset));
GetAccountMemberInfoListResponse getAccountMemberInfoListResponse = new GetAccountMemberInfoListResponse();
QueryResponseMemberInfoList queryResponse = new QueryResponseMemberInfoList();
queryResponse.setGetAccountMemberInfoListResponse(getAccountMemberInfoListResponse);
IqResponseMemberInfoList iqResponse = new IqResponseMemberInfoList();
iqResponse.setQueryResponse(queryResponse);
try {
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(getAccountMemberInfoList));
if (response.getStatus() != 200) {
throw new ConnectorException("Can't connect to system, return code " + response.getStatus());
}
iqResponse = (IqResponseMemberInfoList) getObject(response.getBody(), iqResponse);
return iqResponse.getQueryResponse().getGetAccountMemberInfoListResponse();
} catch (JAXBException e) {
e.printStackTrace();
throw new ConnectorException("Cannot get group members for " + uid);
}
}
private void addMemberToGroup(String userUid, String groupUid) {
MemberItem member = new MemberItem();
member.setUserUid(userUid);
Members members = new Members();
members.setItems(Collections.singletonList(member));
AddAccountMembers addAccountMembers = new AddAccountMembers();
addAccountMembers.setGroupEmail(groupUid);
addAccountMembers.setMembers(members);
QueryResponse queryResponse = new QueryResponse();
IqResponse iqResponse = new IqResponse();
iqResponse.setQueryResponse(queryResponse);
try {
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(addAccountMembers));
if (response.getStatus() != 200) {
throw new ConnectorException("Can't connect to system, return code " + response.getStatus());
}
iqResponse = (IqResponse) getObject(response.getBody(), iqResponse);
if (iqResponse.getQueryResponse().getResult().equals("0")) {
throw new ConnectorException("Cannot add member " + userUid + " to group " + groupUid);
}
} catch (JAXBException e) {
e.printStackTrace();
throw new ConnectorException("Cannot add member " + userUid + " to group " + groupUid);
}
}
private void deleteMemberFromGroup(String userUid, String groupUid) {
MemberItem member = new MemberItem();
member.setUserUid(userUid);
Members members = new Members();
members.setItems(Collections.singletonList(member));
DeleteAccountMembers deleteAccountMembers = new DeleteAccountMembers();
deleteAccountMembers.setGroupEmail(groupUid);
deleteAccountMembers.setMembers(members);
QueryResponse queryResponse = new QueryResponse();
IqResponse iqResponse = new IqResponse();
iqResponse.setQueryResponse(queryResponse);
try {
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(deleteAccountMembers));
if (response.getStatus() != 200) {
throw new ConnectorException("Can't connect to system, return code " + response.getStatus());
}
iqResponse = (IqResponse) getObject(response.getBody(), iqResponse);
if (iqResponse.getQueryResponse().getResult().equals("0")) {
throw new ConnectorException("Cannot delete member " + userUid + " from group " + groupUid);
}
} catch (JAXBException e) {
e.printStackTrace();
throw new ConnectorException("Cannot delete member " + userUid + " from group " + groupUid);
}
}
private void setAccountPassword(String userUid, GuardedString password) {
SetAccountPassword setAccountPassword = new SetAccountPassword();
setAccountPassword.setAccountemail(userUid);
setAccountPassword.setPassword(getPassword(password));
QueryResponse queryResponse = new QueryResponse();
IqResponse iqResponse = new IqResponse();
iqResponse.setQueryResponse(queryResponse);
try {
HttpResponse<String> response = post(configuration.getHost() + "/icewarpapi/", getWrappedXml(setAccountPassword));
if (response.getStatus() != 200) {
throw new ConnectorException("Can't connect to system, return code " + response.getStatus());
}
iqResponse = (IqResponse) getObject(response.getBody(), iqResponse);
if (iqResponse.getType().equals(IceWarpConnector.RESPONSE_TYPE_RESULT)) {
if (iqResponse.getQueryResponse().getResult().equals("0")) {
throw new ConnectorException("Cannot set password for " + userUid);
}
} else {
throw new ConnectorException("Cannot set password for " + userUid);
}
} catch (JAXBException e) {
e.printStackTrace();
throw new ConnectorException("Cannot set password for " + userUid);
}
}
private String getWrappedXml(Object request) throws JAXBException {
Query query = new Query();
query.setCommand(request);
Iq iq = new Iq();
iq.setSid(this.sid);
iq.setQuery(query);
String xmlBody = getXMLBody(iq);
// log xml body for debug purposes
if (configuration.getDebug()) {
log.info("ICEWARP REQUEST XML BODY");
log.info(xmlBody);
}
return xmlBody;
}
// TODO try to make all entities to inherit some class and use it here instead of Object?
private String getXMLBody(Object request) throws JAXBException {
OutputStream stream = new ByteArrayOutputStream();
JAXBContext jaxbContext = JAXBContext.newInstance(request.getClass());
Marshaller marshaller = jaxbContext.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
marshaller.marshal(request, stream);
return stream.toString();
}
private Object getObject(String xml, Object response) throws JAXBException {
// log xml body for debug purposes
if (configuration.getDebug()) {
log.info("ICEWARP RESPONSE XML BODY");
log.info(xml);
}
InputStream inputStream = new ByteArrayInputStream(xml.getBytes());
JAXBContext jaxbContext = JAXBContext.newInstance(response.getClass());
Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
return unmarshaller.unmarshal(inputStream);
}
private HttpResponse<String> post(String url, String body) {
try {
HttpResponse<String> response = Unirest.post(url)
.header("content-type", "text/xml")
.body(body)
.asString();
return response;
} catch (UnirestException e) {
throw new ConnectionFailedException("Connection failed " + e.getMessage());
}
}
private String getPassword(GuardedString password) {
GuardedStringAccessor accessor = new GuardedStringAccessor();
password.access(accessor);
char[] result = accessor.getArray();
return new String(result);
}
private Set<String> getRolesForRemove(List<String> old, List<String> groups) {
Set<String> oldGroups = new HashSet<>(old);
Set<String> newGroups = new HashSet<>(groups);
oldGroups.removeAll(newGroups);
return oldGroups;
}
private Set<String> getRolesForAdd(List<String> old, List<String> groups) {
Set<String> oldGroups = new HashSet<>(old);
Set<String> newGroups = new HashSet<>(groups);
newGroups.removeAll(oldGroups);
return newGroups;
}
}
|
folloze/box-content-preview
|
src/lib/viewers/media/VideoBaseViewer.js
|
<gh_stars>10-100
import throttle from 'lodash/throttle';
import ControlsRoot from '../controls';
import MediaBaseViewer from './MediaBaseViewer';
import { CLASS_HIDDEN, CLASS_IS_BUFFERING, CLASS_DARK } from '../../constants';
import { ICON_PLAY_LARGE } from '../../icons';
const MOUSE_MOVE_TIMEOUT_IN_MILLIS = 1000;
const CLASS_PLAY_BUTTON = 'bp-media-play-button';
class VideoBaseViewer extends MediaBaseViewer {
/**
* @inheritdoc
*/
constructor(options) {
super(options);
// Bind context for handlers
this.handleControlsHide = this.handleControlsHide.bind(this);
this.handleControlsShow = this.handleControlsShow.bind(this);
this.loadeddataHandler = this.loadeddataHandler.bind(this);
this.pointerHandler = this.pointerHandler.bind(this);
this.waitingHandler = this.waitingHandler.bind(this);
this.playingHandler = this.playingHandler.bind(this);
this.pauseHandler = this.pauseHandler.bind(this);
this.resize = this.resize.bind(this);
}
/**
* @inheritdoc
*/
setup() {
if (this.isSetup) {
return;
}
// Call super() to set up common layout
super.setup();
// Video element
this.mediaEl = this.mediaContainerEl.appendChild(document.createElement('video'));
this.mediaEl.setAttribute('preload', 'auto');
// Prevents native iOS UI from taking over
this.mediaEl.setAttribute('playsinline', '');
// Play button
this.playButtonEl = this.mediaContainerEl.appendChild(document.createElement('div'));
this.playButtonEl.classList.add(CLASS_PLAY_BUTTON);
this.playButtonEl.classList.add(CLASS_HIDDEN);
this.playButtonEl.innerHTML = ICON_PLAY_LARGE;
this.lowerLights();
}
/**
* [destructor]
*
* @override
* @return {void}
*/
destroy() {
if (this.mediaEl) {
this.mediaEl.removeEventListener('mousemove', this.mousemoveHandler);
this.mediaEl.removeEventListener('click', this.pointerHandler);
this.mediaEl.removeEventListener('touchstart', this.pointerHandler);
this.mediaEl.removeEventListener('waiting', this.waitingHandler);
}
if (this.playButtonEl) {
this.playButtonEl.removeEventListener('click', this.togglePlay);
}
super.destroy();
}
/**
* Handler for meta data load for the media element.
*
* @override
* @return {void}
*/
loadeddataHandler() {
super.loadeddataHandler();
this.showPlayButton();
if (this.mediaControls) {
this.mediaControls.show();
} else if (this.controls && this.controls.controlsLayer) {
this.controls.controlsLayer.show();
this.controls.controlsLayer.hide(); // Show controls briefly after content loads
}
}
/**
* @inheritdoc
*/
loadUI() {
super.loadUI();
}
/**
* @inheritdoc
*/
loadUIReact() {
super.loadUIReact();
this.controls = new ControlsRoot({
className: 'bp-VideoControlsRoot',
containerEl: this.mediaContainerEl,
fileId: this.options.file.id,
onHide: this.handleControlsHide,
onShow: this.handleControlsShow,
});
this.renderUI();
}
/**
* Handler for a pointer event on the media element.
*
* @param {Event} event pointer event, either touch or mouse
* @return {void}
*/
pointerHandler(event) {
if (event.type === 'touchstart') {
// Prevents 'click' event from firing which would pause the video
event.preventDefault();
event.stopPropagation();
if (this.mediaControls) {
this.mediaControls.toggle();
}
} else if (event.type === 'click') {
this.togglePlay();
}
}
/**
* Handler for play state
*
* @override
* @return {void}
*/
playingHandler() {
super.playingHandler();
this.hidePlayButton();
}
/**
* Handler for pause state
*
* @override
* @return {void}
*/
pauseHandler() {
super.pauseHandler();
this.showPlayButton();
this.hideLoadingIcon();
}
/**
* Shows the loading indicator.
*
* @private
* @return {void}
*/
waitingHandler() {
if (this.containerEl) {
this.containerEl.classList.add(CLASS_IS_BUFFERING);
this.hidePlayButton();
}
}
/**
* Adds event listeners to the media controls.
* Makes changes to the media element.
*
* @override
* @return {void}
*/
addEventListenersForMediaControls() {
super.addEventListenersForMediaControls();
/* istanbul ignore next */
this.mediaControls.on('togglefullscreen', () => {
this.toggleFullscreen();
});
}
/**
* Adds event listeners to the media element.
* Makes changes to the media controls.
*
* @override
* @return {void}
*/
addEventListenersForMediaElement() {
super.addEventListenersForMediaElement();
/* istanbul ignore next */
this.mousemoveHandler = throttle(() => {
if (this.mediaControls) {
this.mediaControls.show();
}
}, MOUSE_MOVE_TIMEOUT_IN_MILLIS);
this.mediaEl.addEventListener('mousemove', this.mousemoveHandler);
if (this.hasTouch) {
this.mediaEl.addEventListener('touchstart', this.pointerHandler);
}
this.mediaEl.addEventListener('click', this.pointerHandler);
this.mediaEl.addEventListener('waiting', this.waitingHandler);
this.playButtonEl.addEventListener('click', this.togglePlay);
}
/**
* @inheritdoc
*/
showLoadingIcon() {
super.showLoadingIcon();
}
/**
* Overriden method to handle resizing of the window.
* Adjusts the size of the time scrubber since its
* senstive to the containers width.
*
* @override
* @return {void}
*/
resize() {
if (this.mediaControls) {
this.mediaControls.resizeTimeScrubber();
}
super.resize();
}
/**
* Function to tell preview if navigation arrows
* should be shown and won't intefere with viewer
*
* @protected
* @return {boolean} true if arrows should be shown
*/
allowNavigationArrows() {
return !this.mediaControls || !this.mediaControls.isSettingsVisible();
}
/**
* Darkens the background of preview.
* Good for having high contrast videos.
*
* @protected
* @return {void}
*/
lowerLights() {
if (this.rootEl) {
this.rootEl.classList.add(CLASS_DARK);
}
}
/**
* @inheritdoc
*/
onKeydown(key) {
return super.onKeydown(key);
}
/**
* Auto-play was prevented, try muted play
*
* @override
*/
handleAutoplayFail = () => {
this.setVolume(0);
this.play().catch(this.pause);
};
handleControlsHide = () => {
this.mediaContainerEl.classList.remove('bp-media-controls-is-visible');
};
handleControlsShow = () => {
this.mediaContainerEl.classList.add('bp-media-controls-is-visible');
};
}
export default VideoBaseViewer;
|
CodeWater404/JavaCode
|
Jvm/src/JVMDemo1/chapter05/src/com/atguigu/java1/StackFrameTest.java
|
<filename>Jvm/src/JVMDemo1/chapter05/src/com/atguigu/java1/StackFrameTest.java
package JVMDemo1.chapter05.src.com.atguigu.java1;
/**
* @author shkstart
* @create 2020 下午 4:11
* <p>
* 方法的结束方式分为两种:① 正常结束,以return为代表 ② 方法执行中出现未捕获处理的异常,以抛出异常的方式结束
*/
public class StackFrameTest {
public static void main(String[] args) {
try {
StackFrameTest test = new StackFrameTest();
test.method1();
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("main()正常结束");
}
public void method1() {
System.out.println("method1()开始执行...");
method2();
System.out.println("method1()执行结束...");
// System.out.println(10 / 0);
// return ;//可以省略
}
public int method2() {
System.out.println("method2()开始执行...");
int i = 10;
int m = (int) method3();
System.out.println("method2()即将结束...");
return i + m;
}
public double method3() {
System.out.println("method3()开始执行...");
double j = 20.0;
System.out.println("method3()即将结束...");
return j;
}
}
|
SMiThRos/java-design-patterns
|
src/main/java/com/gof/example/structural/decorator/Developer.java
|
<reponame>SMiThRos/java-design-patterns
package com.gof.example.structural.decorator;
public interface Developer {
String makeJob();
}
|
zyzil/cm_ext
|
cm-schema/src/main/java/com/cloudera/validation/DescriptorRunner.java
|
// Licensed to Cloudera, Inc. under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. Cloudera, Inc. licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.validation;
import com.cloudera.common.Parser;
import com.fasterxml.jackson.databind.JsonMappingException.Reference;
import com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.Writer;
import java.util.List;
import java.util.Set;
import org.apache.commons.io.IOUtils;
/**
* This class runs validations on entities using a matched {@link #Parser} and
* {@link #DescriptorValidator}.
*/
public class DescriptorRunner<T> implements ValidationRunner {
private Parser<T> parser;
private DescriptorValidator<T> validator;
public DescriptorRunner(Parser<T> parser,
DescriptorValidator<T> validator) {
this.parser = parser;
this.validator = validator;
}
@Override
public boolean run(String target, Writer writer)
throws IOException {
FileInputStream stream = null;
try {
stream = new FileInputStream(target);
return run(target, IOUtils.toByteArray(stream), writer);
} catch (Exception e) {
writer.write(String.format("==> %s\n", e.getMessage()));
return false;
} finally {
IOUtils.closeQuietly(stream);
}
}
/**
* Run the validation against a byte array.
*
* @param name The name of the target that was loaded into the byte array.
* @param data The byte array
* @param writer to write validation errors to
* @return true if validation passed, false otherwise
* @throws IOException if we can't write to the outputStream
*/
public boolean run(String name, byte[] data, Writer writer)
throws IOException {
try {
writer.write("Validating: " + name + "\n");
T descriptor = parser.parse(data);
Set<String> errors = validator.validate(descriptor);
for (String error : errors) {
writer.write(String.format("==> %s\n", error));
}
return errors.isEmpty();
} catch (UnrecognizedPropertyException e) {
List<String> elements = Lists.newArrayList();
for (Reference r : e.getPath()) {
elements.add(r.getFieldName());
}
writer.write(String.format(
"==> Unrecognized field \"%s\". Recognized fields are \"%s\"\n",
Joiner.on('.').join(elements),
e.getKnownPropertyIds().toString()));
return false;
} catch (Exception e) {
writer.write(String.format("==> %s\n", e.getMessage()));
return false;
}
}
}
|
stevewallone/cfdev
|
provision/services.go
|
<reponame>stevewallone/cfdev<filename>provision/services.go
package provision
import (
"code.cloudfoundry.org/cfdev/bosh"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"time"
)
type Service struct {
Name string `yaml:"name"`
Flagname string `yaml:"flag_name"`
DefaultDeploy bool `yaml:"default_deploy"`
Handle string `yaml:"handle"` //TODO <-- remove
Script string `yaml:"script"`
Deployment string `yaml:"deployment"`
IsErrand bool `yaml:"errand"`
}
func (c *Controller) WhiteListServices(whiteList string, services []Service) ([]Service, error) {
var whiteListed []Service
for _, service := range services {
if service.Flagname == "always-include" {
whiteListed = append(whiteListed, service)
}
}
switch strings.TrimSpace(strings.ToLower(whiteList)) {
case "all":
return services, nil
case "none":
return whiteListed, nil
case "":
for _, service := range services {
if service.DefaultDeploy && !contains(whiteListed, service.Name) {
whiteListed = append(whiteListed, service)
}
}
return whiteListed, nil
default:
for _, service := range services {
if strings.Contains(strings.ToLower(whiteList), strings.ToLower(service.Flagname)) && !contains(whiteListed, service.Name) {
whiteListed = append(whiteListed, service)
}
}
return whiteListed, nil
}
}
func contains(services []Service, name string) bool {
for _, s := range services {
if s.Name == name {
return true
}
}
return false
}
func (c *Controller) DeployServices(ui UI, services []Service) error {
b, err := bosh.New(c.Config)
if err != nil {
return err
}
errChan := make(chan error, 1)
for _, service := range services {
start := time.Now()
ui.Say("Deploying %s...", service.Name)
go func(handle string, serviceManifest string) {
errChan <- c.DeployService(service)
}(service.Handle, service.Script)
err = c.report(start, ui, b, service, errChan)
if err != nil {
return err
}
}
return nil
}
func (c *Controller) DeployService(service Service) error {
var cmd *exec.Cmd
if runtime.GOOS == "windows" {
cmd = exec.Command("powershell.exe", "-ExecutionPolicy", "Bypass", "-File", filepath.Join(c.Config.ServicesDir, service.Script+".ps1"))
} else {
cmd = exec.Command(filepath.Join(c.Config.ServicesDir, service.Script))
}
cmd.Env = os.Environ()
cmd.Env = append(cmd.Env, bosh.Envs(c.Config)...)
logFile, err := os.Create(filepath.Join(c.Config.LogDir, "deploy-"+strings.ToLower(service.Name)+".log"))
if err != nil {
return err
}
defer logFile.Close()
cmd.Stdout = logFile
cmd.Stderr = logFile
return cmd.Run()
}
|
paulocsilvajr/controle_pessoal_de_financas
|
API/v1/controller/tipo_conta_handler_test.go
|
<filename>API/v1/controller/tipo_conta_handler_test.go
package controller
import (
"encoding/json"
"fmt"
"testing"
)
func TestTipoContaCreate(t *testing.T) {
// criar TipoConta como administrador - 201 ou 500(chave duplicada)
rota := fmt.Sprintf("/tipos_conta")
res, body, err := post(rota, `{"nome":"tipo conta teste 01", "descricao_debito":"saída", "descricao_credito":"entrada"}`, testTokenAdmin)
if err != nil {
t.Error(err)
return
}
status := res.StatusCode
if !(status == 201 || status == 500) {
t.Error(res, string(body))
}
res, body, err = post(rota, `{"nome":"tipo conta teste 02", "descricao_debito":"saída", "descricao_credito":"entrada"}`, testTokenAdmin)
status = res.StatusCode
if !(status == 201 || status == 500) {
t.Error(res, string(body))
}
res, body, err = post(rota, `{"nome":"tipo conta teste 03", "descricao_debito":"saída", "descricao_credito":"entrada"}`, testTokenAdmin)
status = res.StatusCode
if !(status == 201 || status == 500) {
t.Error(res, string(body))
}
// criar TipoConta como usuário comum - 201 ou 500(chave duplicada)
res, body, err = post(rota, `{"nome":"base", "descricao_debito":"-", "descricao_credito":"+"}`, testTokenComum)
status = res.StatusCode
if !(status == 201 || status == 500) {
t.Error(res, string(body))
}
// chave duplicada na inclusão de tipo conta como admin - 500
res, body, err = post(rota, `{"nome":"base", "descricao_debito":"-", "descricao_credito":"+"}`, testTokenAdmin)
status = res.StatusCode
if status != 500 {
t.Error(res, string(body))
}
// erro ao criar tipo conta com json inválido - 422
res, body, err = post(rota, `"nome":"tipo conta teste 04", "descricao_debito":"saída", "descricao_credito":"entrada"`, testTokenAdmin)
status = res.StatusCode
if status != 422 {
t.Error(res, string(body))
}
}
func TestTipoContaEstado(t *testing.T) {
// Inativa tipo conta(tipo conta teste 01) com usuário administrador - 200
tipoConta := "tipo conta teste 01"
rota := fmt.Sprintf("/tipos_conta/%s/estado", tipoConta)
res, body, err := put(rota, `{"estado": false}`, testTokenAdmin)
if err != nil {
t.Error(err)
return
}
status := res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
// Inativar tipo conta(base) como usuário comun - 200
tipoConta = "base"
rota = fmt.Sprintf("/tipos_conta/%s/estado", tipoConta)
res, body, err = put(rota, `{"estado": false}`, testTokenComum)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
// Ativar tipo conta(tipo conta teste 01) como usuário comum - 500
tipoConta = "tipo conta teste 01"
rota = fmt.Sprintf("/tipos_conta/%s/estado", tipoConta)
res, body, err = put(rota, `{"estado": true}`, testTokenComum)
status = res.StatusCode
if status != 500 {
t.Error(res, string(body))
}
// Ativar tipo conta(tipo conta teste 01) como administrador - 200
tipoConta = "tipo conta teste 01"
rota = fmt.Sprintf("/tipos_conta/%s/estado", tipoConta)
res, body, err = put(rota, `{"estado": true}`, testTokenAdmin)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
}
func TestTipoContaIndex(t *testing.T) {
var retorno ReturnData
var quantAdmin, quantComum int
// retorno de dados dos tipos de conta cadastradas usando um token como administrador - 200
rota := "/tipos_conta"
res, body, err := get(rota, testTokenAdmin)
if err != nil {
t.Error(err)
return
}
status := res.StatusCode
json.Unmarshal(body, &retorno)
quantAdmin = retorno.Count
if status != 200 {
t.Error(res, string(body))
}
// retorno de dados de tipos de conta usando um token de um usuário comum - 200
res, body, _ = get(rota, testTokenComum)
status = res.StatusCode
json.Unmarshal(body, &retorno)
quantComum = retorno.Count
if status != 200 {
t.Error(res, string(body))
}
diferenca := quantAdmin - quantComum
if diferenca < 1 {
t.Error("Diferença entre a quantidade de registros na busca de Tipo de Conta como adminitrador está menor do que a quantidade de registros como usuário comum")
}
}
func TestTipoContaShow(t *testing.T) {
// retorno de dados de tipo conta como administrador - 200
tipoConta := "tipo conta teste 01"
rota := fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, err := get(rota, testTokenAdmin)
if err != nil {
t.Error(err)
return
}
status := res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
// retorno de dados de tipo conta como usuário comum - 200
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, _ = get(rota, testTokenComum)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
// consulta de tipo conta inativo como administrador - 200
tipoConta = "base"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, _ = get(rota, testTokenAdmin)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
// consulta de tipo conta inativo como usuário comum - 500
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, _ = get(rota, testTokenComum)
status = res.StatusCode
if status != 500 {
t.Error(res, string(body))
}
}
func TestTipoContaAlter(t *testing.T) {
// alterar tipo conta(tipo conta teste 01) como administrador - 200
tipoConta := "tipo conta teste 01"
rota := fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, err := put(rota, `{"nome":"tipo conta teste 04", "descricao_debito":"<", "descricao_credito":">"}`, testTokenAdmin)
if err != nil {
t.Error(err)
return
}
status := res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
tipoConta = "tipo conta teste 04"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, err = put(rota, `{"nome":"tipo conta teste 01", "descricao_debito":"diminuir", "descricao_credito":"aumentar"}`, testTokenAdmin)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
// alter tipo conta como usuário comum - 200
tipoConta = "tipo conta teste 02"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, err = put(rota, `{"nome":"tipo conta teste 02", "descricao_debito":"-", "descricao_credito":"+"}`, testTokenComum)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
// alter tipo conta como administrador sem informar em JSON o campo nome - 200
tipoConta = "tipo conta teste 01"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, err = put(rota, `{"descricao_debito":"-", "descricao_credito":"+"}`, testTokenAdmin)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
// tipo conta não pôde ser alterada por não existir no BD - 304
tipoConta = "tipo conta teste 05"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, err = put(rota, `{"descricao_debito":"menos", "descricao_credito":"mais"}`, testTokenAdmin)
status = res.StatusCode
if status != 304 {
t.Error(res, string(body))
}
// Dados em JSON não podem ser processados - 422
tipoConta = "tipo conta teste 01"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, err = put(rota, `"descricao_debito":"-", "descricao_credito":"+"`, testTokenAdmin)
status = res.StatusCode
if status != 422 {
t.Error(res, string(body))
}
}
func TestTipoContaRemove(t *testing.T) {
// remove tipo conta como usuário comum - 500
tipoConta := "tipo conta teste 01"
rota := fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, _ := delete(rota, testTokenComum)
status := res.StatusCode
if status != 500 {
t.Error(res, string(body))
}
// remove tipo conta que não existe como administrador - 500
tipoConta = "tipo conta teste 05"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, _ = delete(rota, testTokenAdmin)
status = res.StatusCode
if status != 500 {
t.Error(res, string(body))
}
// remove tipos conta como administrador
tipoConta = "tipo conta teste 01"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, _ = delete(rota, testTokenAdmin)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
tipoConta = "tipo conta teste 02"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, _ = delete(rota, testTokenAdmin)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
tipoConta = "tipo conta teste 03"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, _ = delete(rota, testTokenAdmin)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
tipoConta = "base"
rota = fmt.Sprintf("/tipos_conta/%s", tipoConta)
res, body, _ = delete(rota, testTokenAdmin)
status = res.StatusCode
if status != 200 {
t.Error(res, string(body))
}
}
|
devgateway/gtp
|
persistence/src/main/java/org/devgateway/toolkit/persistence/service/SpringContext.java
|
package org.devgateway.toolkit.persistence.service;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
@Component
public class SpringContext implements ApplicationContextAware {
private static ApplicationContext applicationContext;
public static <T extends Object> T getBean(final Class<T> beanClass) {
return applicationContext.getBean(beanClass);
}
@Override
public void setApplicationContext(final ApplicationContext applicationContext) throws BeansException {
SpringContext.applicationContext = applicationContext;
}
}
|
timkpaine/arrow
|
cpp/src/parquet/encryption/local_wrap_kms_client.cc
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include "arrow/json/object_parser.h"
#include "arrow/json/object_writer.h"
#include "parquet/encryption/key_toolkit_internal.h"
#include "parquet/encryption/local_wrap_kms_client.h"
#include "parquet/exception.h"
using ::arrow::json::internal::ObjectParser;
using ::arrow::json::internal::ObjectWriter;
namespace parquet {
namespace encryption {
constexpr const char LocalWrapKmsClient::kLocalWrapNoKeyVersion[];
constexpr const char LocalWrapKmsClient::LocalKeyWrap::kLocalWrapKeyVersionField[];
constexpr const char LocalWrapKmsClient::LocalKeyWrap::kLocalWrapEncryptedKeyField[];
LocalWrapKmsClient::LocalKeyWrap::LocalKeyWrap(std::string master_key_version,
std::string encrypted_encoded_key)
: encrypted_encoded_key_(std::move(encrypted_encoded_key)),
master_key_version_(std::move(master_key_version)) {}
std::string LocalWrapKmsClient::LocalKeyWrap::CreateSerialized(
const std::string& encrypted_encoded_key) {
ObjectWriter json_writer;
json_writer.SetString(kLocalWrapKeyVersionField, kLocalWrapNoKeyVersion);
json_writer.SetString(kLocalWrapEncryptedKeyField, encrypted_encoded_key);
return json_writer.Serialize();
}
LocalWrapKmsClient::LocalKeyWrap LocalWrapKmsClient::LocalKeyWrap::Parse(
const std::string& wrapped_key) {
ObjectParser json_parser;
auto status = json_parser.Parse(wrapped_key);
if (!status.ok()) {
throw ParquetException("Failed to parse local key wrap json " + wrapped_key);
}
PARQUET_ASSIGN_OR_THROW(const auto master_key_version,
json_parser.GetString(kLocalWrapKeyVersionField));
PARQUET_ASSIGN_OR_THROW(const auto encrypted_encoded_key,
json_parser.GetString(kLocalWrapEncryptedKeyField));
return LocalWrapKmsClient::LocalKeyWrap(std::move(master_key_version),
std::move(encrypted_encoded_key));
}
LocalWrapKmsClient::LocalWrapKmsClient(const KmsConnectionConfig& kms_connection_config)
: kms_connection_config_(kms_connection_config) {
master_key_cache_.Clear();
}
std::string LocalWrapKmsClient::WrapKey(const std::string& key_bytes,
const std::string& master_key_identifier) {
const auto master_key = master_key_cache_.GetOrInsert(
master_key_identifier, [this, master_key_identifier]() -> std::string {
return this->GetKeyFromServer(master_key_identifier);
});
const auto& aad = master_key_identifier;
const auto encrypted_encoded_key =
internal::EncryptKeyLocally(key_bytes, master_key, aad);
return LocalKeyWrap::CreateSerialized(encrypted_encoded_key);
}
std::string LocalWrapKmsClient::UnwrapKey(const std::string& wrapped_key,
const std::string& master_key_identifier) {
LocalKeyWrap key_wrap = LocalKeyWrap::Parse(wrapped_key);
const std::string& master_key_version = key_wrap.master_key_version();
if (kLocalWrapNoKeyVersion != master_key_version) {
throw ParquetException("Master key versions are not supported for local wrapping: " +
master_key_version);
}
const std::string& encrypted_encoded_key = key_wrap.encrypted_encoded_key();
const std::string master_key = master_key_cache_.GetOrInsert(
master_key_identifier, [this, master_key_identifier]() -> std::string {
return this->GetKeyFromServer(master_key_identifier);
});
const std::string& aad = master_key_identifier;
return internal::DecryptKeyLocally(encrypted_encoded_key, master_key, aad);
}
std::string LocalWrapKmsClient::GetKeyFromServer(const std::string& key_identifier) {
std::string master_key = GetMasterKeyFromServer(key_identifier);
int32_t key_length_bits = static_cast<int32_t>(master_key.size() * 8);
if (!internal::ValidateKeyLength(key_length_bits)) {
std::ostringstream ss;
ss << "Wrong master key length : " << key_length_bits;
throw ParquetException(ss.str());
}
return master_key;
}
} // namespace encryption
} // namespace parquet
|
knuu/competitive-programming
|
atcoder/abc/abc031_c.py
|
<filename>atcoder/abc/abc031_c.py
ans = -2500
N = int(input())
A = [int(x) for x in input().split()]
for i in range(N):
score = []
for j in range(N):
if i == j:
continue
a, b = i, j
if a > b:
a, b = b, a
subA = A[a:b+1]
score.append((sum(subA[1::2]), -j, sum(subA[::2])))
_, _, cand = max(score)
ans = max(ans, cand)
print(ans)
|
xaviertorgerson/North-Shore-Extension
|
CodeBase/Builds/Build_2/CTCTrainManager.java
|
public class CTCTrainManager{
//Tracing <NAME>
//Making the back edges and stuff
//Union find
//Lazy prim's code
//Making a memoization structure for dice problem
//know the set notation
//Either add something for line or make two different lists for red and green
private int destination;
private int currentBlock;
public CTCTrainManager(int destination, int currentBlock)
{
this.destination = destination;
this.currentBlock = currentBlock;
}
}
|
deeprave/pylib
|
pylib/timer.py
|
<gh_stars>0
# -*- coding: utf-8 -*-
"""
Implement a simple function timer
"""
from types import FunctionType
import datetime
def timer(func, *args, **kwargs):
"""
:param any func: function to call
:param args: ...
:param kwargs: ...
:return: (return value(s), duration)
"""
start = datetime.datetime.now()
value = func(*args, **kwargs)
end = datetime.datetime.now()
return value, end.replace(microsecond=0) - start.replace(microsecond=0)
|
cebartling/gummi-bears-web
|
src/stories/storybookApolloClient.js
|
<reponame>cebartling/gummi-bears-web<gh_stars>0
import {ApolloClient, HttpLink, InMemoryCache} from "@apollo/client";
import {setContext} from "apollo-link-context";
const httpLink = new HttpLink({
uri: process.env.REACT_APP_GRAPHQL_URL,
});
const storybookAuthLink = setContext((_, { headers }) => {
// get the authentication token from environment variable
const token = process.env.REACT_APP_AUTH_TOKEN;
// return the headers to the context so httpLink can read them
return {
headers: {
...headers,
Authorization: token ? `Token ${token}` : "",
}
}
});
const client = new ApolloClient({
cache: new InMemoryCache(),
link: storybookAuthLink.concat(httpLink)
});
export default client;
|
Priba91/Meeting
|
Convoy Caravan Code parts/UIElements/TableView Cells/Chat/RightChatTableViewCell.h
|
<reponame>Priba91/Meeting
//
// RightChatTableViewCell.h
// <NAME>
//
// Created by Priba on 10/20/18.
// Copyright © 2018 Priba. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "CommentModel.h"
#import "MessageModel.h"
NS_ASSUME_NONNULL_BEGIN
@interface RightChatTableViewCell : UITableViewCell
@property (weak, nonatomic) IBOutlet UIImageView *profileImageView;
@property (weak, nonatomic) IBOutlet UILabel *profileNameLbl;
@property (weak, nonatomic) IBOutlet UILabel *messageLbl;
@property (weak, nonatomic) IBOutlet UILabel *timeLbl;
@property (weak, nonatomic) IBOutlet UIView *backView;
@property (weak, nonatomic) IBOutlet UIImageView *chatImageView;
@property (weak, nonatomic) IBOutlet NSLayoutConstraint *chatImageHeight;
- (void)populateWithComment:(CommentModel*)comment;
- (void)populateWithMessage:(MessageModel*)message;
@end
NS_ASSUME_NONNULL_END
|
wyan/ack
|
lang/cem/libcc/stdio/getchar.c
|
<gh_stars>100-1000
/* $Id$ */
#include <stdio.h>
#undef getchar
getchar()
{
return getc(stdin);
}
|
real-digital/esque-wire
|
esque_wire/protocol/serializers/header.py
|
from ..structs.header import RequestHeader, ResponseHeader
from .constants import apiKeySerializer
from .generic import ClassSerializer, Schema
from .primitive import int16Serializer, int32Serializer, nullableStringSerializer
requestHeaderSchema: Schema = [
("api_key", apiKeySerializer),
("api_version", int16Serializer),
("correlation_id", int32Serializer),
("client_id", nullableStringSerializer),
]
requestHeaderSerializer = ClassSerializer(RequestHeader, requestHeaderSchema)
responseHeaderSchema: Schema = [("correlation_id", int32Serializer)]
responseHeaderSerializer = ClassSerializer(ResponseHeader, responseHeaderSchema)
|
wanxiaolong/evc
|
src/main/java/com/my/evc/mapper/NoticeMapper.java
|
<reponame>wanxiaolong/evc
package com.my.evc.mapper;
import java.util.List;
import org.mybatis.spring.annotation.MapperScan;
import com.my.evc.model.Notice;
@MapperScan
public interface NoticeMapper extends BaseMapper<Notice> {
/**
* 查询所有的公告。
*/
public List<Notice> findAll();
}
|
Pranshu-Kumbhare/500-Buck-Manipulator
|
sources/com/vuforia/ar/pl/Camera1_Preview.java
|
<filename>sources/com/vuforia/ar/pl/Camera1_Preview.java
package com.vuforia.ar.pl;
import android.app.Activity;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.Area;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import com.vuforia.PIXEL_FORMAT;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Vector;
import org.json.JSONException;
import org.json.JSONObject;
public class Camera1_Preview implements PreviewCallback {
private static final int AR_CAMERA_DIRECTION_BACK = 268443665;
private static final int AR_CAMERA_DIRECTION_FRONT = 268443666;
private static final int AR_CAMERA_DIRECTION_UNKNOWN = 268443664;
private static final int AR_CAMERA_EXPOSUREMODE_AUTO = 805314560;
private static final int AR_CAMERA_EXPOSUREMODE_CONTINUOUSAUTO = 805322752;
private static final int AR_CAMERA_EXPOSUREMODE_LOCKED = 805310464;
private static final int AR_CAMERA_FOCUSMODE_AUTO = 805306400;
private static final int AR_CAMERA_FOCUSMODE_CONTINUOUSAUTO = 805306432;
private static final int AR_CAMERA_FOCUSMODE_FIXED = 805306880;
private static final int AR_CAMERA_FOCUSMODE_INFINITY = 805306624;
private static final int AR_CAMERA_FOCUSMODE_MACRO = 805306496;
private static final int AR_CAMERA_FOCUSMODE_NORMAL = 805306384;
private static final int AR_CAMERA_IMAGE_FORMAT_ARGB32 = 268439813;
private static final int AR_CAMERA_IMAGE_FORMAT_ARGB8888 = 268439813;
private static final int AR_CAMERA_IMAGE_FORMAT_BGR24 = 268439822;
private static final int AR_CAMERA_IMAGE_FORMAT_BGR888 = 268439822;
private static final int AR_CAMERA_IMAGE_FORMAT_BGRA32 = 268439814;
private static final int AR_CAMERA_IMAGE_FORMAT_BGRA8888 = 268439814;
private static final int AR_CAMERA_IMAGE_FORMAT_LUM = 268439809;
private static final int AR_CAMERA_IMAGE_FORMAT_NV12 = 268439815;
private static final int AR_CAMERA_IMAGE_FORMAT_NV16 = 268439816;
private static final int AR_CAMERA_IMAGE_FORMAT_NV21 = 268439817;
private static final int AR_CAMERA_IMAGE_FORMAT_RGB24 = 268439811;
private static final int AR_CAMERA_IMAGE_FORMAT_RGB565 = 268439810;
private static final int AR_CAMERA_IMAGE_FORMAT_RGB888 = 268439811;
private static final int AR_CAMERA_IMAGE_FORMAT_RGBA32 = 268439812;
private static final int AR_CAMERA_IMAGE_FORMAT_RGBA4444 = 268439821;
private static final int AR_CAMERA_IMAGE_FORMAT_RGBA5551 = 268439820;
private static final int AR_CAMERA_IMAGE_FORMAT_RGBA8888 = 268439812;
private static final int AR_CAMERA_IMAGE_FORMAT_UNKNOWN = 268439808;
private static final int AR_CAMERA_IMAGE_FORMAT_YV12 = 268439818;
private static final int AR_CAMERA_IMAGE_FORMAT_YV16 = 268439819;
private static final int AR_CAMERA_PARAMTYPE_BASE = 536870912;
private static final int AR_CAMERA_PARAMTYPE_BRIGHTNESSRANGE = 537133056;
private static final int AR_CAMERA_PARAMTYPE_BRIGHTNESSVALUE = 537001984;
private static final int AR_CAMERA_PARAMTYPE_CONTRASTRANGE = 537919488;
private static final int AR_CAMERA_PARAMTYPE_CONTRASTVALUE = 537395200;
private static final int AR_CAMERA_PARAMTYPE_EXPOSUREMODE = 536870944;
private static final int AR_CAMERA_PARAMTYPE_EXPOSURETIME = 536871168;
private static final int AR_CAMERA_PARAMTYPE_EXPOSURETIMERANGE = 536871424;
private static final int AR_CAMERA_PARAMTYPE_EXPOSUREVALUE = 536871936;
private static final int AR_CAMERA_PARAMTYPE_EXPOSUREVALUERANGE = 536872960;
private static final int AR_CAMERA_PARAMTYPE_FOCUSMODE = 536870914;
private static final int AR_CAMERA_PARAMTYPE_FOCUSRANGE = 536870920;
private static final int AR_CAMERA_PARAMTYPE_FOCUSREGION = 536870928;
private static final int AR_CAMERA_PARAMTYPE_FOCUSVALUE = 536870916;
private static final int AR_CAMERA_PARAMTYPE_ISO = 536870976;
private static final int AR_CAMERA_PARAMTYPE_ISORANGE = 536871040;
private static final int AR_CAMERA_PARAMTYPE_LENS_IS_ADJUSTING = 545259520;
private static final int AR_CAMERA_PARAMTYPE_RECORDING_HINT = 541065216;
private static final int AR_CAMERA_PARAMTYPE_ROTATION = 538968064;
private static final int AR_CAMERA_PARAMTYPE_TORCHMODE = 536870913;
private static final int AR_CAMERA_PARAMTYPE_VIDEO_STABILIZATION = 553648128;
private static final int AR_CAMERA_PARAMTYPE_WHITEBALANCEMODE = 536875008;
private static final int AR_CAMERA_PARAMTYPE_WHITEBALANCERANGE = 536887296;
private static final int AR_CAMERA_PARAMTYPE_WHITEBALANCEVALUE = 536879104;
private static final int AR_CAMERA_PARAMTYPE_ZOOMRANGE = 536936448;
private static final int AR_CAMERA_PARAMTYPE_ZOOMVALUE = 536903680;
private static final int AR_CAMERA_PARAMVALUE_BASE = 805306368;
private static final int AR_CAMERA_STATUS_CAPTURE_RUNNING = 268443651;
private static final int AR_CAMERA_STATUS_OPENED = 268443650;
private static final int AR_CAMERA_STATUS_UNINITIALIZED = 268443649;
private static final int AR_CAMERA_STATUS_UNKNOWN = 268443648;
private static final int AR_CAMERA_TORCHMODE_AUTO = 805306372;
private static final int AR_CAMERA_TORCHMODE_CONTINUOUSAUTO = 805306376;
private static final int AR_CAMERA_TORCHMODE_OFF = 805306369;
private static final int AR_CAMERA_TORCHMODE_ON = 805306370;
private static final int AR_CAMERA_TYPE_MONO = 268447761;
private static final int AR_CAMERA_TYPE_STEREO = 268447762;
private static final int AR_CAMERA_TYPE_UNKNOWN = 268447760;
private static final int AR_CAMERA_WHITEBALANCEMODE_AUTO = 807403520;
private static final int AR_CAMERA_WHITEBALANCEMODE_CONTINUOUSAUTO = 809500672;
private static final int AR_CAMERA_WHITEBALANCEMODE_LOCKED = 806354944;
private static final int CAMERA_CAPSINFO_VALUE_NUM_SUPPORTED_FRAMERATES = 4;
private static final int CAMERA_CAPSINFO_VALUE_NUM_SUPPORTED_IMAGEFORMATS = 5;
private static final int CAMERA_CAPSINFO_VALUE_NUM_SUPPORTED_IMAGESIZES = 3;
private static final int CAMERA_CAPSINFO_VALUE_SUPPORTED_PARAMVALUES = 2;
private static final int CAMERA_CAPSINFO_VALUE_SUPPORTED_QUERYABLE_PARAMS = 0;
private static final int CAMERA_CAPSINFO_VALUE_SUPPORTED_SETTABLE_PARAMS = 1;
private static final int CAMERA_CAPTUREINFO_VALUE_FORMAT = 2;
private static final int CAMERA_CAPTUREINFO_VALUE_FRAMERATE = 3;
private static final int CAMERA_CAPTUREINFO_VALUE_HEIGHT = 1;
private static final int CAMERA_CAPTUREINFO_VALUE_PREVIEWSURFACEENABLED = 4;
private static final int CAMERA_CAPTUREINFO_VALUE_WIDTH = 0;
private static final int[] CAMERA_IMAGE_FORMAT_CONVERSIONTABLE = new int[]{16, AR_CAMERA_IMAGE_FORMAT_NV16, 17, AR_CAMERA_IMAGE_FORMAT_NV21, 4, AR_CAMERA_IMAGE_FORMAT_RGB565, 842094169, AR_CAMERA_IMAGE_FORMAT_YV12};
private static boolean CONVERT_FORMAT_TO_ANDROID = false;
private static boolean CONVERT_FORMAT_TO_PL = true;
private static final String FOCUS_MODE_NORMAL = "normal";
private static final String MODULENAME = "Camera1_Preview";
private static final int NUM_CAPTURE_BUFFERS = 2;
private static final int NUM_CAPTURE_BUFFERS_TO_ADD = 2;
private static final int NUM_MAX_CAMERAOPEN_RETRY = 10;
private static final String SAMSUNG_PARAM_FAST_FPS_MODE = "fast-fps-mode";
private static final String SAMSUNG_PARAM_VRMODE = "vrmode";
private static final String SAMSUNG_PARAM_VRMODE_SUPPORTED = "vrmode-supported";
private static final int TIME_CAMERAOPEN_RETRY_DELAY_MS = 250;
private static final int _NUM_CAMERA_CAPSINFO_VALUE_ = 6;
private static final int _NUM_CAMERA_CAPTUREINFO_VALUE_ = 5;
private Vector<CameraCacheInfo> cameraCacheInfo = null;
private HashMap<Camera, Integer> cameraCacheInfoIndexCache = null;
private SurfaceManager surfaceManager = null;
/* renamed from: com.vuforia.ar.pl.Camera1_Preview$1 */
class C00681 implements AutoFocusCallback {
C00681() {
}
public void onAutoFocus(boolean z, Camera camera) {
Object obj = Camera1_Preview.this.cameraCacheInfoIndexCache.get(camera);
if (obj != null) {
CameraCacheInfo access$100 = Camera1_Preview.this.getCameraCacheInfo(((Integer) obj).intValue());
if (access$100 != null) {
access$100.isAutoFocusing = false;
}
}
}
}
public class CameraCacheInfo {
byte[][] buffer;
int bufferFormatPL;
int bufferHeight;
int bufferSize;
int bufferWidth;
Camera camera;
int[] caps;
long deviceHandle;
int deviceID;
boolean isAutoFocusing;
int overrideFormatAndroid;
int overrideHeight;
int overrideWidth;
int requestFormatAndroid;
int requestHeight;
int requestWidth;
int status;
CameraSurface surface;
SurfaceTexture surfaceTexture;
}
private native void newFrameAvailable(long j, int i, int i2, int i3, int i4, byte[] bArr, long j2);
private boolean checkPermission() {
try {
Activity activityFromNative = SystemTools.getActivityFromNative();
if (activityFromNative != null && activityFromNative.getPackageManager().checkPermission("android.permission.CAMERA", activityFromNative.getPackageName()) == 0) {
return true;
}
return false;
} catch (Exception e) {
return false;
}
}
private int getCameraDeviceIndex(int i, int i2, int i3) {
int i4 = 1;
int i5 = 0;
if (i2 != AR_CAMERA_TYPE_UNKNOWN) {
}
if (SystemTools.checkMinimumApiLevel(9)) {
switch (i3) {
case AR_CAMERA_DIRECTION_UNKNOWN /*268443664*/:
i4 = -1;
break;
case AR_CAMERA_DIRECTION_BACK /*268443665*/:
i4 = 0;
break;
case AR_CAMERA_DIRECTION_FRONT /*268443666*/:
break;
default:
SystemTools.setSystemErrorCode(2);
return -1;
}
int numberOfCameras = Camera.getNumberOfCameras();
while (i5 < numberOfCameras) {
CameraInfo cameraInfo = new CameraInfo();
try {
Camera.getCameraInfo(i5, cameraInfo);
if ((i4 < 0 || i4 == cameraInfo.facing) && (i < 0 || i == i5)) {
return i5;
}
} catch (Exception e) {
}
i5++;
}
SystemTools.setSystemErrorCode(6);
return -1;
} else if (i3 == AR_CAMERA_DIRECTION_FRONT) {
SystemTools.setSystemErrorCode(2);
return -1;
} else if (i < 1) {
return 0;
} else {
SystemTools.setSystemErrorCode(2);
return -1;
}
}
private Parameters getCameraParameters(Camera camera) {
Parameters parameters = null;
try {
parameters = camera.getParameters();
} catch (Exception e) {
}
return parameters;
}
private CameraCacheInfo getCameraCacheInfo(int i) {
if (i < 0 || i >= this.cameraCacheInfo.size()) {
return null;
}
return (CameraCacheInfo) this.cameraCacheInfo.get(i);
}
private boolean setCustomCameraParams(Parameters parameters, String str) {
try {
JSONObject jSONObject = new JSONObject(str);
Iterator keys = jSONObject.keys();
while (keys.hasNext()) {
String str2 = (String) keys.next();
try {
Object obj = jSONObject.get(str2);
if (obj.getClass() == String.class) {
parameters.set(str2, (String) obj);
} else if (obj.getClass() != Integer.class) {
return false;
} else {
parameters.set(str2, ((Integer) obj).intValue());
}
} catch (JSONException e) {
return false;
}
}
return true;
} catch (JSONException e2) {
return false;
}
}
private boolean setCameraPreviewFps(int i, Parameters parameters) {
List<int[]> supportedPreviewFpsRange = parameters.getSupportedPreviewFpsRange();
int i2 = i * 1000;
int[] iArr = null;
if ((i == 60 || i == 120) && "true".equalsIgnoreCase(parameters.get(SAMSUNG_PARAM_VRMODE_SUPPORTED))) {
iArr = new int[2];
parameters.set(SAMSUNG_PARAM_VRMODE, 1);
parameters.setRecordingHint(true);
parameters.set("focus-mode", "continuous-video");
if (i == 60) {
parameters.set(SAMSUNG_PARAM_FAST_FPS_MODE, 1);
iArr[0] = 60000;
iArr[1] = 60000;
}
if (i == 120) {
parameters.set(SAMSUNG_PARAM_FAST_FPS_MODE, 2);
iArr[0] = 120000;
iArr[1] = 120000;
}
} else {
if (!(!"true".equalsIgnoreCase(parameters.get(SAMSUNG_PARAM_VRMODE_SUPPORTED)) || parameters.get(SAMSUNG_PARAM_FAST_FPS_MODE) == null || parameters.getInt(SAMSUNG_PARAM_FAST_FPS_MODE) == 0)) {
parameters.set(SAMSUNG_PARAM_VRMODE, 0);
parameters.set(SAMSUNG_PARAM_FAST_FPS_MODE, 0);
}
for (int[] iArr2 : supportedPreviewFpsRange) {
int[] iArr22;
if (iArr22[0] != i2 || iArr22[1] - iArr22[0] >= Integer.MAX_VALUE) {
iArr22 = iArr;
}
iArr = iArr22;
}
}
if (iArr == null) {
return false;
}
parameters.setPreviewFpsRange(iArr[0], iArr[1]);
return true;
}
private boolean setCameraCaptureParams(CameraCacheInfo cameraCacheInfo, Parameters parameters, int[] iArr, int[] iArr2) {
if (!(iArr == null && iArr2 == null)) {
cameraCacheInfo.overrideWidth = iArr2 != null ? iArr2[0] : iArr[0];
cameraCacheInfo.overrideHeight = iArr2 != null ? iArr2[1] : iArr[1];
cameraCacheInfo.overrideFormatAndroid = translateImageFormat(iArr2 != null ? iArr2[2] : iArr[2], CONVERT_FORMAT_TO_ANDROID);
}
if (iArr == null) {
return true;
}
cameraCacheInfo.requestWidth = iArr[0];
cameraCacheInfo.requestHeight = iArr[1];
cameraCacheInfo.requestFormatAndroid = translateImageFormat(iArr[2], CONVERT_FORMAT_TO_ANDROID);
int i = iArr[3];
try {
if (cameraCacheInfo.requestWidth > 0 && cameraCacheInfo.requestHeight > 0) {
parameters.setPreviewSize(cameraCacheInfo.requestWidth, cameraCacheInfo.requestHeight);
}
if (i > 0) {
if (!SystemTools.checkMinimumApiLevel(8)) {
parameters.setPreviewFrameRate(i);
} else if (!setCameraPreviewFps(i, parameters)) {
parameters.setPreviewFrameRate(i);
}
}
if (cameraCacheInfo.requestFormatAndroid != 0) {
parameters.setPreviewFormat(cameraCacheInfo.requestFormatAndroid);
}
if (iArr[4] > 0) {
i = 1;
} else {
i = 0;
}
if (i != 0) {
if (SystemTools.checkMinimumApiLevel(11)) {
try {
cameraCacheInfo.surfaceTexture = new SurfaceTexture(-1);
try {
cameraCacheInfo.camera.setPreviewTexture(cameraCacheInfo.surfaceTexture);
} catch (Exception e) {
}
} catch (Exception e2) {
return false;
}
} else if (this.surfaceManager == null) {
return false;
} else {
if (!this.surfaceManager.addCameraSurface(cameraCacheInfo)) {
return false;
}
}
}
return true;
} catch (Exception e3) {
return false;
}
}
private boolean checkSamsungHighFPS(CameraCacheInfo cameraCacheInfo) {
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
return false;
}
if ("true".equalsIgnoreCase(cameraParameters.get(SAMSUNG_PARAM_VRMODE_SUPPORTED)) && cameraCacheInfo.requestWidth > 0 && cameraCacheInfo.requestHeight > 0 && cameraParameters.get(SAMSUNG_PARAM_FAST_FPS_MODE) != null && cameraParameters.getInt(SAMSUNG_PARAM_FAST_FPS_MODE) != 0 && !(cameraCacheInfo.requestWidth == cameraParameters.getPreviewSize().width && cameraCacheInfo.requestHeight == cameraParameters.getPreviewSize().height)) {
DebugLog.LOGW(MODULENAME, "Detected Samsung high fps camera driver bug.");
DebugLog.LOGW(MODULENAME, "Preview size doesn't match request; width " + cameraCacheInfo.requestWidth + "!=" + cameraParameters.getPreviewSize().width + " or height " + cameraCacheInfo.requestHeight + "!=" + cameraParameters.getPreviewSize().height);
setCameraPreviewFps(30, cameraParameters);
cameraParameters.setPreviewSize(cameraCacheInfo.requestWidth, cameraCacheInfo.requestHeight);
try {
cameraCacheInfo.camera.setParameters(cameraParameters);
cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (!(cameraCacheInfo.requestWidth == cameraParameters.getPreviewSize().width && cameraCacheInfo.requestHeight == cameraParameters.getPreviewSize().height)) {
DebugLog.LOGE(MODULENAME, "Unable to workaround Samsung high fps camera driver bug.");
DebugLog.LOGE(MODULENAME, "Preview size doesn't match request; width " + cameraCacheInfo.requestWidth + "!=" + cameraParameters.getPreviewSize().width + " or height " + cameraCacheInfo.requestHeight + "!=" + cameraParameters.getPreviewSize().height);
return false;
}
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return false;
}
}
return true;
}
private boolean setupPreviewBuffer(CameraCacheInfo cameraCacheInfo) {
int i = 0;
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
return false;
}
try {
cameraCacheInfo.bufferWidth = cameraCacheInfo.requestWidth == cameraCacheInfo.overrideWidth ? cameraParameters.getPreviewSize().width : cameraCacheInfo.overrideWidth;
cameraCacheInfo.bufferHeight = cameraCacheInfo.requestHeight == cameraCacheInfo.overrideHeight ? cameraParameters.getPreviewSize().height : cameraCacheInfo.overrideHeight;
int previewFormat = cameraCacheInfo.requestFormatAndroid == cameraCacheInfo.overrideFormatAndroid ? cameraParameters.getPreviewFormat() : cameraCacheInfo.overrideFormatAndroid;
cameraCacheInfo.bufferFormatPL = translateImageFormat(previewFormat, CONVERT_FORMAT_TO_PL);
try {
PixelFormat pixelFormat = new PixelFormat();
PixelFormat.getPixelFormatInfo(previewFormat, pixelFormat);
previewFormat = pixelFormat.bitsPerPixel;
} catch (Exception e) {
previewFormat = getBitsPerPixel(previewFormat);
if (previewFormat == 0) {
return false;
}
}
previewFormat = ((previewFormat * (cameraCacheInfo.bufferWidth * cameraCacheInfo.bufferHeight)) / 8) + 4096;
if (previewFormat <= cameraCacheInfo.bufferSize) {
cameraCacheInfo.camera.setPreviewCallbackWithBuffer(this);
return true;
}
cameraCacheInfo.buffer = new byte[2][];
while (i < 2) {
cameraCacheInfo.buffer[i] = new byte[previewFormat];
if (i < 2) {
cameraCacheInfo.camera.addCallbackBuffer(cameraCacheInfo.buffer[i]);
}
i++;
}
cameraCacheInfo.bufferSize = previewFormat;
cameraCacheInfo.camera.setPreviewCallbackWithBuffer(this);
System.gc();
return true;
} catch (Exception e2) {
return false;
}
}
private void setCameraCapsBit(CameraCacheInfo cameraCacheInfo, int i, int i2, boolean z) {
int i3;
switch (i) {
case 0:
case 1:
i3 = AR_CAMERA_PARAMTYPE_BASE;
break;
case 2:
i3 = AR_CAMERA_PARAMVALUE_BASE;
break;
default:
return;
}
i3 = (int) (Math.log((double) ((i3 ^ -1) & i2)) / Math.log(2.0d));
if (z) {
int[] iArr = cameraCacheInfo.caps;
iArr[i] = (1 << i3) | iArr[i];
return;
}
iArr = cameraCacheInfo.caps;
iArr[i] = ((1 << i3) ^ -1) & iArr[i];
}
private int translateImageFormat(int i, boolean z) {
int i2 = 0;
int i3 = 0;
while (i3 < CAMERA_IMAGE_FORMAT_CONVERSIONTABLE.length / 2) {
if (i != (z == CONVERT_FORMAT_TO_PL ? CAMERA_IMAGE_FORMAT_CONVERSIONTABLE[i3 * 2] : CAMERA_IMAGE_FORMAT_CONVERSIONTABLE[(i3 * 2) + 1])) {
i3++;
} else if (z == CONVERT_FORMAT_TO_PL) {
return CAMERA_IMAGE_FORMAT_CONVERSIONTABLE[(i3 * 2) + 1];
} else {
return CAMERA_IMAGE_FORMAT_CONVERSIONTABLE[i3 * 2];
}
}
if (z == CONVERT_FORMAT_TO_PL) {
i2 = AR_CAMERA_IMAGE_FORMAT_UNKNOWN;
}
return i2;
}
int getBitsPerPixel(int i) {
switch (i) {
case 4:
case PIXEL_FORMAT.RGBA8888 /*16*/:
return 16;
case 17:
return 12;
case 842094169:
return 12;
default:
return 0;
}
}
public void onPreviewFrame(byte[] bArr, Camera camera) {
Object obj;
long nanoTime = System.nanoTime();
if (SystemTools.checkMinimumApiLevel(18)) {
obj = this.cameraCacheInfoIndexCache.get(camera);
} else {
obj = this.cameraCacheInfoIndexCache.get(camera);
}
if (obj != null) {
int intValue = ((Integer) obj).intValue();
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(intValue);
if (cameraCacheInfo != null) {
newFrameAvailable(cameraCacheInfo.deviceHandle, intValue, cameraCacheInfo.bufferWidth, cameraCacheInfo.bufferHeight, cameraCacheInfo.bufferFormatPL, bArr, nanoTime);
camera.addCallbackBuffer(bArr);
if (!SystemTools.checkMinimumApiLevel(18)) {
}
} else if (!SystemTools.checkMinimumApiLevel(18)) {
}
} else if (!SystemTools.checkMinimumApiLevel(18)) {
}
}
public boolean init() {
this.cameraCacheInfo = new Vector();
this.cameraCacheInfoIndexCache = new HashMap();
return true;
}
public void setSurfaceManager(SurfaceManager surfaceManager) {
this.surfaceManager = surfaceManager;
}
public int getNumberOfCameras() {
int i = -1;
if (!checkPermission()) {
SystemTools.setSystemErrorCode(6);
return i;
} else if (SystemTools.checkMinimumApiLevel(9)) {
try {
return Camera.getNumberOfCameras();
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return i;
}
} else {
try {
if (SystemTools.getActivityFromNative().getPackageManager().hasSystemFeature("android.hardware.camera")) {
return 1;
}
return 0;
} catch (Exception e2) {
SystemTools.setSystemErrorCode(6);
return i;
}
}
}
public int getOrientation(int i) {
if (!checkPermission()) {
SystemTools.setSystemErrorCode(6);
return -1;
} else if (SystemTools.checkMinimumApiLevel(9)) {
CameraInfo cameraInfo = new CameraInfo();
try {
Camera.getCameraInfo(i, cameraInfo);
return cameraInfo.orientation;
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return -1;
}
} else {
SystemTools.setSystemErrorCode(6);
return -1;
}
}
public int getDirection(int i) {
if (!checkPermission()) {
SystemTools.setSystemErrorCode(6);
return -1;
} else if (!SystemTools.checkMinimumApiLevel(9)) {
return AR_CAMERA_DIRECTION_BACK;
} else {
CameraInfo cameraInfo = new CameraInfo();
try {
Camera.getCameraInfo(i, cameraInfo);
switch (cameraInfo.facing) {
case 0:
return AR_CAMERA_DIRECTION_BACK;
case 1:
return AR_CAMERA_DIRECTION_FRONT;
default:
return AR_CAMERA_DIRECTION_UNKNOWN;
}
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return -1;
}
}
}
public int getDeviceID(int i) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo != null) {
return cameraCacheInfo.deviceID;
}
SystemTools.setSystemErrorCode(4);
return -1;
}
public int open(long j, int i, int i2, int i3, String str, int[] iArr, int[] iArr2) {
if (checkPermission()) {
int cameraDeviceIndex = getCameraDeviceIndex(i, i2, i3);
if (cameraDeviceIndex < 0) {
return -1;
}
Object obj;
int i4;
CameraCacheInfo cameraCacheInfo = null;
int size = this.cameraCacheInfo.size();
int i5 = 0;
while (i5 < size) {
cameraCacheInfo = (CameraCacheInfo) this.cameraCacheInfo.get(i5);
if (cameraCacheInfo.deviceID == cameraDeviceIndex) {
break;
}
i5++;
}
i5 = -1;
if (i5 < 0) {
CameraCacheInfo cameraCacheInfo2 = new CameraCacheInfo();
cameraCacheInfo2.deviceID = cameraDeviceIndex;
cameraCacheInfo2.deviceHandle = j;
cameraCacheInfo2.camera = null;
cameraCacheInfo2.surface = null;
cameraCacheInfo2.buffer = (byte[][]) null;
cameraCacheInfo2.overrideWidth = 0;
cameraCacheInfo2.requestWidth = 0;
cameraCacheInfo2.bufferWidth = 0;
cameraCacheInfo2.overrideHeight = 0;
cameraCacheInfo2.requestHeight = 0;
cameraCacheInfo2.bufferHeight = 0;
cameraCacheInfo2.bufferFormatPL = AR_CAMERA_IMAGE_FORMAT_UNKNOWN;
cameraCacheInfo2.overrideFormatAndroid = 0;
cameraCacheInfo2.requestFormatAndroid = 0;
cameraCacheInfo2.caps = null;
cameraCacheInfo2.status = AR_CAMERA_STATUS_UNINITIALIZED;
cameraCacheInfo2.isAutoFocusing = false;
cameraCacheInfo = cameraCacheInfo2;
}
cameraCacheInfo.bufferSize = 0;
size = NUM_MAX_CAMERAOPEN_RETRY;
Object obj2 = null;
while (true) {
try {
if (SystemTools.checkMinimumApiLevel(9)) {
cameraCacheInfo.camera = Camera.open(cameraCacheInfo.deviceID);
} else if (cameraCacheInfo.deviceID == 0) {
cameraCacheInfo.camera = Camera.open();
}
obj = cameraCacheInfo.camera != null ? 1 : null;
} catch (Exception e) {
obj = obj2;
}
if (obj == null && size > 0) {
try {
synchronized (this) {
wait(250);
}
} catch (Exception e2) {
}
}
if (obj != null) {
break;
}
i4 = size - 1;
if (size <= 0) {
break;
}
size = i4;
obj2 = obj;
}
if (cameraCacheInfo.camera == null) {
SystemTools.setSystemErrorCode(6);
return -1;
}
obj = ((iArr == null || iArr.length <= 0) && (iArr2 == null || iArr2.length <= 0)) ? null : 1;
obj2 = (str == null || str.length() <= 0) ? null : 1;
if (!(obj == null && obj2 == null)) {
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
return -1;
}
if (obj != null) {
if (iArr != null && iArr.length != 5) {
SystemTools.setSystemErrorCode(2);
return -1;
} else if (!setCameraCaptureParams(cameraCacheInfo, cameraParameters, iArr, iArr2)) {
SystemTools.setSystemErrorCode(6);
return -1;
}
}
if (obj2 == null || setCustomCameraParams(cameraParameters, str)) {
try {
cameraCacheInfo.camera.setParameters(cameraParameters);
if (!checkSamsungHighFPS(cameraCacheInfo)) {
return -1;
}
} catch (Exception e3) {
SystemTools.setSystemErrorCode(6);
return -1;
}
}
SystemTools.setSystemErrorCode(2);
return -1;
}
cameraCacheInfo.status = AR_CAMERA_STATUS_OPENED;
if (i5 < 0) {
this.cameraCacheInfo.add(cameraCacheInfo);
i4 = this.cameraCacheInfo.size() - 1;
} else {
i4 = i5;
}
this.cameraCacheInfoIndexCache.put(cameraCacheInfo.camera, Integer.valueOf(i4));
return i4;
}
SystemTools.setSystemErrorCode(6);
return -1;
}
public boolean close(int i) {
boolean z = false;
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null) {
SystemTools.setSystemErrorCode(4);
} else {
this.cameraCacheInfoIndexCache.remove(cameraCacheInfo.camera);
try {
cameraCacheInfo.camera.release();
z = true;
} catch (Exception e) {
}
cameraCacheInfo.camera = null;
cameraCacheInfo.buffer = (byte[][]) null;
cameraCacheInfo.status = AR_CAMERA_STATUS_UNINITIALIZED;
System.gc();
}
return z;
}
public int[] getCameraCapabilities(int i) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null) {
SystemTools.setSystemErrorCode(4);
return null;
} else if (cameraCacheInfo.caps != null) {
return cameraCacheInfo.caps;
} else {
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
return null;
}
int size;
List supportedPreviewSizes = cameraParameters.getSupportedPreviewSizes();
List supportedPreviewFrameRates = cameraParameters.getSupportedPreviewFrameRates();
List supportedPreviewFormats = cameraParameters.getSupportedPreviewFormats();
List supportedFlashModes = cameraParameters.getSupportedFlashModes();
List supportedFocusModes = cameraParameters.getSupportedFocusModes();
int size2 = supportedPreviewSizes != null ? supportedPreviewSizes.size() : 0;
int size3 = supportedPreviewFrameRates != null ? supportedPreviewFrameRates.size() : 0;
if (supportedPreviewFormats != null) {
size = supportedPreviewFormats.size();
} else {
size = 0;
}
cameraCacheInfo.caps = new int[((((size2 * 2) + 6) + size3) + size)];
cameraCacheInfo.caps[0] = AR_CAMERA_PARAMTYPE_BASE;
boolean z = supportedFlashModes != null ? supportedFlashModes.contains("torch") || supportedFlashModes.contains("on") : false;
setCameraCapsBit(cameraCacheInfo, 0, AR_CAMERA_PARAMTYPE_TORCHMODE, z);
setCameraCapsBit(cameraCacheInfo, 0, AR_CAMERA_PARAMTYPE_FOCUSMODE, true);
setCameraCapsBit(cameraCacheInfo, 0, AR_CAMERA_PARAMTYPE_FOCUSVALUE, SystemTools.checkMinimumApiLevel(8));
setCameraCapsBit(cameraCacheInfo, 0, AR_CAMERA_PARAMTYPE_FOCUSREGION, SystemTools.checkMinimumApiLevel(14));
setCameraCapsBit(cameraCacheInfo, 0, AR_CAMERA_PARAMTYPE_EXPOSUREVALUE, SystemTools.checkMinimumApiLevel(8));
setCameraCapsBit(cameraCacheInfo, 0, AR_CAMERA_PARAMTYPE_EXPOSUREVALUERANGE, SystemTools.checkMinimumApiLevel(8));
z = SystemTools.checkMinimumApiLevel(8) && cameraParameters.isZoomSupported();
setCameraCapsBit(cameraCacheInfo, 0, AR_CAMERA_PARAMTYPE_ZOOMVALUE, z);
z = SystemTools.checkMinimumApiLevel(8) && cameraParameters.isZoomSupported();
setCameraCapsBit(cameraCacheInfo, 0, AR_CAMERA_PARAMTYPE_ZOOMRANGE, z);
setCameraCapsBit(cameraCacheInfo, 0, AR_CAMERA_PARAMTYPE_VIDEO_STABILIZATION, SystemTools.checkMinimumApiLevel(15));
cameraCacheInfo.caps[1] = AR_CAMERA_PARAMTYPE_BASE;
z = supportedFlashModes != null ? supportedFlashModes.contains("torch") || supportedFlashModes.contains("on") : false;
setCameraCapsBit(cameraCacheInfo, 1, AR_CAMERA_PARAMTYPE_TORCHMODE, z);
setCameraCapsBit(cameraCacheInfo, 1, AR_CAMERA_PARAMTYPE_FOCUSMODE, true);
setCameraCapsBit(cameraCacheInfo, 1, AR_CAMERA_PARAMTYPE_FOCUSREGION, SystemTools.checkMinimumApiLevel(14));
setCameraCapsBit(cameraCacheInfo, 1, AR_CAMERA_PARAMTYPE_EXPOSUREVALUE, SystemTools.checkMinimumApiLevel(8));
z = SystemTools.checkMinimumApiLevel(8) && cameraParameters.isZoomSupported();
setCameraCapsBit(cameraCacheInfo, 1, AR_CAMERA_PARAMTYPE_ZOOMVALUE, z);
setCameraCapsBit(cameraCacheInfo, 1, AR_CAMERA_PARAMTYPE_VIDEO_STABILIZATION, SystemTools.checkMinimumApiLevel(15));
cameraCacheInfo.caps[2] = AR_CAMERA_PARAMVALUE_BASE;
if (supportedFlashModes != null && (supportedFlashModes.contains("torch") || supportedFlashModes.contains("on"))) {
setCameraCapsBit(cameraCacheInfo, 2, AR_CAMERA_TORCHMODE_OFF, true);
setCameraCapsBit(cameraCacheInfo, 2, AR_CAMERA_TORCHMODE_ON, true);
}
if (supportedFocusModes != null) {
setCameraCapsBit(cameraCacheInfo, 2, AR_CAMERA_FOCUSMODE_NORMAL, true);
setCameraCapsBit(cameraCacheInfo, 2, AR_CAMERA_FOCUSMODE_AUTO, supportedFocusModes.contains("auto"));
setCameraCapsBit(cameraCacheInfo, 2, AR_CAMERA_FOCUSMODE_CONTINUOUSAUTO, supportedFocusModes.contains("continuous-video"));
setCameraCapsBit(cameraCacheInfo, 2, AR_CAMERA_FOCUSMODE_MACRO, supportedFocusModes.contains("macro"));
setCameraCapsBit(cameraCacheInfo, 2, AR_CAMERA_FOCUSMODE_INFINITY, supportedFocusModes.contains("infinity"));
setCameraCapsBit(cameraCacheInfo, 2, AR_CAMERA_FOCUSMODE_FIXED, supportedFocusModes.contains("fixed"));
}
cameraCacheInfo.caps[3] = size2;
cameraCacheInfo.caps[4] = size3;
cameraCacheInfo.caps[5] = size;
int i2 = 6;
if (size2 > 0) {
ListIterator listIterator = supportedPreviewSizes.listIterator();
size2 = 6;
while (listIterator.hasNext()) {
Size size4 = (Size) listIterator.next();
cameraCacheInfo.caps[size2] = size4.width;
cameraCacheInfo.caps[size2 + 1] = size4.height;
size2 += 2;
}
i2 = size2;
}
if (size3 > 0) {
ListIterator listIterator2 = supportedPreviewFrameRates.listIterator();
size2 = i2;
while (listIterator2.hasNext()) {
cameraCacheInfo.caps[size2] = ((Integer) listIterator2.next()).intValue();
size2++;
}
i2 = size2;
}
if (size > 0) {
ListIterator listIterator3 = supportedPreviewFormats.listIterator();
size = i2;
while (listIterator3.hasNext()) {
cameraCacheInfo.caps[size] = translateImageFormat(((Integer) listIterator3.next()).intValue(), true);
size++;
}
}
return cameraCacheInfo.caps;
}
}
public boolean setCaptureInfo(int i, int[] iArr, int[] iArr2) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null) {
SystemTools.setSystemErrorCode(4);
return false;
} else if (iArr.length != 5) {
SystemTools.setSystemErrorCode(2);
return false;
} else {
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
return false;
} else if (setCameraCaptureParams(cameraCacheInfo, cameraParameters, iArr, iArr2)) {
try {
cameraCacheInfo.camera.setParameters(cameraParameters);
if (checkSamsungHighFPS(cameraCacheInfo)) {
return true;
}
return false;
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return false;
}
} else {
SystemTools.setSystemErrorCode(6);
return false;
}
}
}
public int[] getCaptureInfo(int i) {
int i2 = 0;
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null) {
SystemTools.setSystemErrorCode(4);
return null;
}
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
return null;
}
try {
int[] iArr = new int[5];
iArr[0] = cameraParameters.getPreviewSize().width;
iArr[1] = cameraParameters.getPreviewSize().height;
iArr[2] = translateImageFormat(cameraParameters.getPreviewFormat(), CONVERT_FORMAT_TO_PL);
iArr[3] = cameraParameters.getPreviewFrameRate();
if (!(cameraCacheInfo.surface == null && cameraCacheInfo.surfaceTexture == null)) {
i2 = 1;
}
iArr[4] = i2;
return iArr;
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return null;
}
}
public boolean start(int i) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null) {
SystemTools.setSystemErrorCode(4);
return false;
} else if (setupPreviewBuffer(cameraCacheInfo)) {
try {
cameraCacheInfo.camera.startPreview();
cameraCacheInfo.status = AR_CAMERA_STATUS_CAPTURE_RUNNING;
return true;
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return false;
}
} else {
SystemTools.setSystemErrorCode(6);
return false;
}
}
public boolean stop(int i) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null) {
SystemTools.setSystemErrorCode(4);
return false;
}
try {
cameraCacheInfo.camera.stopPreview();
cameraCacheInfo.status = AR_CAMERA_STATUS_OPENED;
return true;
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return false;
}
}
public boolean setBatchParameters(int i, String str) {
if (str == null) {
return false;
}
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null || cameraCacheInfo.camera == null) {
SystemTools.setSystemErrorCode(4);
return false;
}
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
return false;
} else if (!setCustomCameraParams(cameraParameters, str)) {
return false;
} else {
cameraCacheInfo.camera.setParameters(cameraParameters);
return true;
}
}
boolean setUntypedCameraParameter(int i, String str, String str2) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null || cameraCacheInfo.camera == null) {
SystemTools.setSystemErrorCode(4);
return false;
}
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
return false;
}
try {
cameraParameters.set(str, str2);
cameraCacheInfo.camera.setParameters(cameraParameters);
return true;
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return false;
}
}
String getUntypedCameraParameter(int i, String str) {
String str2 = null;
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null || cameraCacheInfo.camera == null) {
SystemTools.setSystemErrorCode(4);
} else {
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
} else {
str2 = cameraParameters.get(str);
if (str2 == null) {
SystemTools.setSystemErrorCode(6);
}
}
}
return str2;
}
String getFlattenedParameters(int i) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null || cameraCacheInfo.camera == null) {
SystemTools.setSystemErrorCode(4);
return "";
}
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters != null) {
return cameraParameters.flatten();
}
SystemTools.setSystemErrorCode(6);
return "";
}
boolean setTypedCameraParameter(int i, int i2, Object obj) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null || cameraCacheInfo.camera == null) {
SystemTools.setSystemErrorCode(4);
return false;
}
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
return false;
}
Object obj2 = null;
int intValue;
switch (i2) {
case AR_CAMERA_PARAMTYPE_TORCHMODE /*536870913*/:
switch (((Number) obj).intValue()) {
case AR_CAMERA_TORCHMODE_OFF /*805306369*/:
cameraParameters.setFlashMode("off");
break;
case AR_CAMERA_TORCHMODE_ON /*805306370*/:
if (!cameraParameters.getSupportedFlashModes().contains("torch")) {
cameraParameters.setFlashMode("on");
break;
}
cameraParameters.setFlashMode("torch");
break;
case AR_CAMERA_TORCHMODE_AUTO /*805306372*/:
SystemTools.setSystemErrorCode(3);
return false;
default:
SystemTools.setSystemErrorCode(3);
return false;
}
case AR_CAMERA_PARAMTYPE_FOCUSMODE /*536870914*/:
cameraCacheInfo.camera.cancelAutoFocus();
switch (((Number) obj).intValue()) {
case AR_CAMERA_FOCUSMODE_NORMAL /*805306384*/:
if (!cameraParameters.getSupportedFocusModes().contains(FOCUS_MODE_NORMAL)) {
cameraParameters.setFocusMode("auto");
obj2 = 1;
break;
}
cameraParameters.setFocusMode(FOCUS_MODE_NORMAL);
break;
case AR_CAMERA_FOCUSMODE_AUTO /*805306400*/:
cameraParameters.setFocusMode("auto");
obj2 = 1;
break;
case AR_CAMERA_FOCUSMODE_CONTINUOUSAUTO /*805306432*/:
if (cameraParameters.getSupportedFocusModes().contains("continuous-video")) {
cameraParameters.setFocusMode("continuous-video");
break;
}
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_FOCUSMODE_MACRO /*805306496*/:
cameraParameters.setFocusMode("macro");
break;
case AR_CAMERA_FOCUSMODE_INFINITY /*805306624*/:
cameraParameters.setFocusMode("infinity");
break;
case AR_CAMERA_FOCUSMODE_FIXED /*805306880*/:
cameraParameters.setFocusMode("fixed");
break;
default:
SystemTools.setSystemErrorCode(3);
return false;
}
case AR_CAMERA_PARAMTYPE_FOCUSVALUE /*536870916*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_FOCUSRANGE /*536870920*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_FOCUSREGION /*536870928*/:
if (SystemTools.checkMinimumApiLevel(14)) {
float[] fArr = (float[]) obj;
if (fArr.length == 5) {
if (fArr[0] >= 0.0f && fArr[0] <= 1.0f && fArr[1] >= 0.0f && fArr[1] <= 1.0f && fArr[2] >= 0.0f && fArr[2] <= 1.0f && fArr[3] >= 0.0f && fArr[3] <= 1.0f && fArr[4] >= 0.0f && fArr[4] <= 1.0f) {
Rect rect = new Rect(((int) (((double) fArr[0]) * 2000.0d)) - 1000, ((int) (((double) fArr[1]) * 2000.0d)) - 1000, ((int) (((double) fArr[2]) * 2000.0d)) - 1000, ((int) (((double) fArr[3]) * 2000.0d)) - 1000);
List arrayList = new ArrayList();
arrayList.add(new Area(rect, (int) (((double) fArr[4]) * 1000.0d)));
if (cameraParameters.getMaxNumFocusAreas() > 0) {
cameraParameters.setFocusAreas(arrayList);
break;
}
}
SystemTools.setSystemErrorCode(2);
return false;
}
SystemTools.setSystemErrorCode(2);
return false;
}
SystemTools.setSystemErrorCode(6);
return false;
break;
case AR_CAMERA_PARAMTYPE_EXPOSUREMODE /*536870944*/:
switch (((Number) obj).intValue()) {
case AR_CAMERA_EXPOSUREMODE_LOCKED /*805310464*/:
if (cameraParameters.isAutoExposureLockSupported()) {
cameraParameters.setAutoExposureLock(true);
break;
}
break;
case AR_CAMERA_EXPOSUREMODE_CONTINUOUSAUTO /*805322752*/:
if (cameraParameters.isAutoExposureLockSupported()) {
cameraParameters.setAutoExposureLock(false);
break;
}
break;
default:
SystemTools.setSystemErrorCode(3);
return false;
}
case AR_CAMERA_PARAMTYPE_ISO /*536870976*/:
try {
String num = Integer.toString(((Number) obj).intValue());
String str = cameraParameters.get("iso-values");
if (str != null) {
String[] split = str.split(",");
int i3 = 0;
while (i3 < split.length) {
if (split[i3].toLowerCase().contains(num.toLowerCase())) {
num = split[i3];
} else {
i3++;
}
}
}
cameraParameters.set("iso", num);
break;
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return false;
}
case AR_CAMERA_PARAMTYPE_EXPOSUREVALUE /*536871936*/:
if (SystemTools.checkMinimumApiLevel(8)) {
float floatValue = ((Number) obj).floatValue();
float exposureCompensationStep = cameraParameters.getExposureCompensationStep();
if (exposureCompensationStep != 0.0f) {
cameraParameters.setExposureCompensation(Math.round(floatValue / exposureCompensationStep));
break;
}
SystemTools.setSystemErrorCode(6);
return false;
}
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_EXPOSUREVALUERANGE /*536872960*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_WHITEBALANCEMODE /*536875008*/:
intValue = ((Number) obj).intValue();
switch (intValue) {
case AR_CAMERA_WHITEBALANCEMODE_LOCKED /*806354944*/:
if (cameraParameters.isAutoWhiteBalanceLockSupported()) {
cameraParameters.setAutoWhiteBalanceLock(true);
break;
}
break;
case AR_CAMERA_WHITEBALANCEMODE_CONTINUOUSAUTO /*809500672*/:
if (cameraParameters.isAutoWhiteBalanceLockSupported()) {
cameraParameters.setAutoWhiteBalanceLock(false);
break;
}
break;
default:
SystemTools.setSystemErrorCode(3);
SystemTools.logSystemError("Cannot set unknown white balance mode (" + intValue + ")");
return false;
}
case AR_CAMERA_PARAMTYPE_WHITEBALANCEVALUE /*536879104*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_WHITEBALANCERANGE /*536887296*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_ZOOMVALUE /*536903680*/:
if (SystemTools.checkMinimumApiLevel(8) && cameraParameters.isZoomSupported()) {
cameraParameters.setZoom(((Number) obj).intValue());
break;
}
SystemTools.setSystemErrorCode(6);
return false;
break;
case AR_CAMERA_PARAMTYPE_ZOOMRANGE /*536936448*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_BRIGHTNESSVALUE /*537001984*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_BRIGHTNESSRANGE /*537133056*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_CONTRASTVALUE /*537395200*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_CONTRASTRANGE /*537919488*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_ROTATION /*538968064*/:
SystemTools.setSystemErrorCode(6);
return false;
case AR_CAMERA_PARAMTYPE_RECORDING_HINT /*541065216*/:
intValue = ((Number) obj).intValue();
if (!SystemTools.checkMinimumApiLevel(14)) {
cameraParameters.set("recording-hint", intValue != 0 ? "true" : "false");
break;
}
boolean z;
if (intValue != 0) {
z = true;
} else {
z = false;
}
cameraParameters.setRecordingHint(z);
break;
case AR_CAMERA_PARAMTYPE_VIDEO_STABILIZATION /*553648128*/:
if (!((Boolean) obj).booleanValue()) {
cameraParameters.setVideoStabilization(false);
break;
}
cameraParameters.setVideoStabilization(true);
break;
default:
return false;
}
try {
cameraCacheInfo.camera.setParameters(cameraParameters);
if (obj2 != null) {
switch (i2) {
case AR_CAMERA_PARAMTYPE_FOCUSMODE /*536870914*/:
try {
cameraCacheInfo.isAutoFocusing = true;
cameraCacheInfo.camera.autoFocus(new C00681());
break;
} catch (Exception e2) {
SystemTools.setSystemErrorCode(6);
return false;
}
}
}
return true;
} catch (Exception e3) {
SystemTools.setSystemErrorCode(6);
return false;
}
}
Object getTypedCameraParameter(int i, int i2) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo == null || cameraCacheInfo.camera == null) {
SystemTools.setSystemErrorCode(4);
return null;
}
Parameters cameraParameters = getCameraParameters(cameraCacheInfo.camera);
if (cameraParameters == null) {
SystemTools.setSystemErrorCode(6);
return null;
}
switch (i2) {
case AR_CAMERA_PARAMTYPE_TORCHMODE /*536870913*/:
try {
String flashMode = cameraParameters.getFlashMode();
if (flashMode.equals("torch") || flashMode.equals("on")) {
return Integer.valueOf(AR_CAMERA_TORCHMODE_ON);
}
if (flashMode.equals("off")) {
return Integer.valueOf(AR_CAMERA_TORCHMODE_OFF);
}
SystemTools.setSystemErrorCode(6);
return null;
} catch (Exception e) {
SystemTools.setSystemErrorCode(6);
return null;
}
case AR_CAMERA_PARAMTYPE_FOCUSMODE /*536870914*/:
String focusMode = cameraParameters.getFocusMode();
if (focusMode.equals("auto")) {
return Integer.valueOf(cameraCacheInfo.isAutoFocusing ? AR_CAMERA_FOCUSMODE_AUTO : AR_CAMERA_FOCUSMODE_NORMAL);
} else if (focusMode.equals("continuous-video")) {
return Integer.valueOf(AR_CAMERA_FOCUSMODE_CONTINUOUSAUTO);
} else {
if (focusMode.equals("infinity")) {
return Integer.valueOf(AR_CAMERA_FOCUSMODE_INFINITY);
}
if (focusMode.equals("macro")) {
return Integer.valueOf(AR_CAMERA_FOCUSMODE_MACRO);
}
if (focusMode.equals("fixed")) {
return Integer.valueOf(AR_CAMERA_FOCUSMODE_FIXED);
}
SystemTools.setSystemErrorCode(6);
return null;
}
case AR_CAMERA_PARAMTYPE_FOCUSVALUE /*536870916*/:
if (SystemTools.checkMinimumApiLevel(8)) {
return Float.valueOf(cameraParameters.getFocalLength());
}
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_FOCUSRANGE /*536870920*/:
if (SystemTools.checkMinimumApiLevel(9)) {
r3 = new float[3];
cameraParameters.getFocusDistances(r3);
return new float[]{r3[0], r3[2]};
}
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_FOCUSREGION /*536870928*/:
if (SystemTools.checkMinimumApiLevel(14) && cameraParameters.getMaxNumFocusAreas() > 0) {
List focusAreas = cameraParameters.getFocusAreas();
if (focusAreas.size() > 0) {
Area area = (Area) focusAreas.get(0);
return new float[]{(float) area.rect.left, (float) area.rect.top, (float) area.rect.right, (float) area.rect.bottom, (float) area.weight};
}
}
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_EXPOSUREMODE /*536870944*/:
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_EXPOSUREVALUE /*536871936*/:
if (SystemTools.checkMinimumApiLevel(8)) {
return Float.valueOf(cameraParameters.getExposureCompensationStep() * ((float) cameraParameters.getExposureCompensation()));
}
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_EXPOSUREVALUERANGE /*536872960*/:
if (SystemTools.checkMinimumApiLevel(8)) {
Object obj = new float[2];
obj[0] = cameraParameters.getExposureCompensationStep() * ((float) cameraParameters.getMinExposureCompensation());
obj[1] = ((float) cameraParameters.getMaxExposureCompensation()) * cameraParameters.getExposureCompensationStep();
return obj;
}
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_WHITEBALANCEMODE /*536875008*/:
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_WHITEBALANCEVALUE /*536879104*/:
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_WHITEBALANCERANGE /*536887296*/:
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_ZOOMVALUE /*536903680*/:
if (SystemTools.checkMinimumApiLevel(8) && cameraParameters.isZoomSupported()) {
return Integer.valueOf(cameraParameters.getZoom());
}
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_ZOOMRANGE /*536936448*/:
if (SystemTools.checkMinimumApiLevel(8) && cameraParameters.isZoomSupported()) {
return new int[]{null, cameraParameters.getMaxZoom()};
}
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_BRIGHTNESSVALUE /*537001984*/:
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_BRIGHTNESSRANGE /*537133056*/:
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_CONTRASTVALUE /*537395200*/:
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_CONTRASTRANGE /*537919488*/:
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_ROTATION /*538968064*/:
SystemTools.setSystemErrorCode(6);
return null;
case AR_CAMERA_PARAMTYPE_VIDEO_STABILIZATION /*553648128*/:
if (cameraParameters.getVideoStabilization()) {
return Boolean.valueOf(true);
}
return Boolean.valueOf(false);
default:
return null;
}
SystemTools.setSystemErrorCode(6);
return null;
}
int getStatus(int i) {
CameraCacheInfo cameraCacheInfo = getCameraCacheInfo(i);
if (cameraCacheInfo != null) {
return cameraCacheInfo.status;
}
SystemTools.setSystemErrorCode(4);
return AR_CAMERA_STATUS_UNKNOWN;
}
}
|
touchmii/OpenTCS-4
|
openTCS-CommAdapter-TCP/src/test/java/com/lvsrobot/vehicletcp/UptimeClient.java
|
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.lvsrobot.vehicletcp;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.bytes.ByteArrayDecoder;
import io.netty.handler.codec.bytes.ByteArrayEncoder;
import io.netty.handler.timeout.IdleStateHandler;
import io.netty.util.CharsetUtil;
/**
* Connects to a server periodically to measure and print the uptime of the
* server. This example demonstrates how to implement reliable reconnection
* mechanism in Netty.
*/
/*public final class UptimeClient {
static final String HOST = System.getProperty("host", "192.168.0.101");
static final int PORT = Integer.parseInt(System.getProperty("port", "1024"));
// Sleep 5 seconds before a reconnection attempt.
static final int RECONNECT_DELAY = Integer.parseInt(System.getProperty("reconnectDelay", "5"));
// Reconnect when the server sends nothing for 10 seconds.
private static final int READ_TIMEOUT = Integer.parseInt(System.getProperty("readTimeout", "10"));
private static final UptimeClientHandler handler = new UptimeClientHandler();
private static final Bootstrap bs = new Bootstrap();
static ChannelFuture f;
public static void main(String[] args) throws Exception {
EventLoopGroup group = new NioEventLoopGroup();
bs.group(group)
.channel(NioSocketChannel.class)
.remoteAddress(HOST, PORT)
.handler(new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(new ByteArrayEncoder());
ch.pipeline().addLast(new ByteToMsgDecoder());
ch.pipeline().addLast(new ByteArrayDecoder());
ch.pipeline().addLast(new IdleStateHandler(READ_TIMEOUT, 0, 0), handler);
}
});
f = bs.connect();
while (true) {
// f.channel().writeAndFlush(Unpooled.copiedBuffer("Hello", CharsetUtil.UTF_8));
byte[] query = {0, 1, 2, 1, (byte)253};
f.channel().writeAndFlush(query);
Thread.currentThread().sleep(1000);//毫秒
}
}
static void connect() {
bs.connect().addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (future.cause() != null) {
handler.startTime = -1;
handler.println("Failed to connect: " + future.cause());
} else {
f = future;
}
}
});
}
}*/
|
micolore/jsbt-web-ui
|
docs/src/environments/environment.prod.js
|
<reponame>micolore/jsbt-web-ui
/*
* @Author: kubrick
* @LastEditors: kubrick
* @Description: 上线环境的环境配置,上线环境需要过aot打包,命令ng build --aot
* @email:
* @Date: 2019-04-12 16:39:30
* @LastEditTime: 2019-07-11 16:49:04
*/
// The file contents for the current environment will overwrite these during build.
// The build system defaults to the dev environment which uses `environment.ts`, but if you do
// `ng build --env=prod` then `environment.prod.ts` will be used instead.
// The list of which env maps to which file can be found in `.angular-cli.json`.
// 开发环境
export const environment = {
production: true,
pid: "FC",
projectName: "fc-angular",
systemurl: "/server/",
apiurl: "/server/api/",
logurl: "/server/api/",
authurl: "/server/api/",
wsurl: "",
license: ''
};
//# sourceMappingURL=environment.prod.js.map
|
wanyuenmei/quilt
|
api/pb/pb.pb.go
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: pb/pb.proto
/*
Package pb is a generated protocol buffer package.
It is generated from these files:
pb/pb.proto
It has these top-level messages:
DBQuery
QueryReply
DeployRequest
DeployReply
VersionRequest
VersionReply
*/
package pb
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import (
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
type DBQuery struct {
Table string `protobuf:"bytes,1,opt,name=Table" json:"Table,omitempty"`
}
func (m *DBQuery) Reset() { *m = DBQuery{} }
func (m *DBQuery) String() string { return proto.CompactTextString(m) }
func (*DBQuery) ProtoMessage() {}
func (*DBQuery) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
func (m *DBQuery) GetTable() string {
if m != nil {
return m.Table
}
return ""
}
type QueryReply struct {
TableContents string `protobuf:"bytes,1,opt,name=TableContents" json:"TableContents,omitempty"`
}
func (m *QueryReply) Reset() { *m = QueryReply{} }
func (m *QueryReply) String() string { return proto.CompactTextString(m) }
func (*QueryReply) ProtoMessage() {}
func (*QueryReply) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
func (m *QueryReply) GetTableContents() string {
if m != nil {
return m.TableContents
}
return ""
}
type DeployRequest struct {
Deployment string `protobuf:"bytes,1,opt,name=Deployment" json:"Deployment,omitempty"`
}
func (m *DeployRequest) Reset() { *m = DeployRequest{} }
func (m *DeployRequest) String() string { return proto.CompactTextString(m) }
func (*DeployRequest) ProtoMessage() {}
func (*DeployRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
func (m *DeployRequest) GetDeployment() string {
if m != nil {
return m.Deployment
}
return ""
}
type DeployReply struct {
}
func (m *DeployReply) Reset() { *m = DeployReply{} }
func (m *DeployReply) String() string { return proto.CompactTextString(m) }
func (*DeployReply) ProtoMessage() {}
func (*DeployReply) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
type VersionRequest struct {
}
func (m *VersionRequest) Reset() { *m = VersionRequest{} }
func (m *VersionRequest) String() string { return proto.CompactTextString(m) }
func (*VersionRequest) ProtoMessage() {}
func (*VersionRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
type VersionReply struct {
Version string `protobuf:"bytes,1,opt,name=Version" json:"Version,omitempty"`
}
func (m *VersionReply) Reset() { *m = VersionReply{} }
func (m *VersionReply) String() string { return proto.CompactTextString(m) }
func (*VersionReply) ProtoMessage() {}
func (*VersionReply) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
func (m *VersionReply) GetVersion() string {
if m != nil {
return m.Version
}
return ""
}
func init() {
proto.RegisterType((*DBQuery)(nil), "DBQuery")
proto.RegisterType((*QueryReply)(nil), "QueryReply")
proto.RegisterType((*DeployRequest)(nil), "DeployRequest")
proto.RegisterType((*DeployReply)(nil), "DeployReply")
proto.RegisterType((*VersionRequest)(nil), "VersionRequest")
proto.RegisterType((*VersionReply)(nil), "VersionReply")
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// Client API for API service
type APIClient interface {
Query(ctx context.Context, in *DBQuery, opts ...grpc.CallOption) (*QueryReply, error)
Deploy(ctx context.Context, in *DeployRequest, opts ...grpc.CallOption) (*DeployReply, error)
Version(ctx context.Context, in *VersionRequest, opts ...grpc.CallOption) (*VersionReply, error)
}
type aPIClient struct {
cc *grpc.ClientConn
}
func NewAPIClient(cc *grpc.ClientConn) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) Query(ctx context.Context, in *DBQuery, opts ...grpc.CallOption) (*QueryReply, error) {
out := new(QueryReply)
err := grpc.Invoke(ctx, "/API/Query", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *aPIClient) Deploy(ctx context.Context, in *DeployRequest, opts ...grpc.CallOption) (*DeployReply, error) {
out := new(DeployReply)
err := grpc.Invoke(ctx, "/API/Deploy", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *aPIClient) Version(ctx context.Context, in *VersionRequest, opts ...grpc.CallOption) (*VersionReply, error) {
out := new(VersionReply)
err := grpc.Invoke(ctx, "/API/Version", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// Server API for API service
type APIServer interface {
Query(context.Context, *DBQuery) (*QueryReply, error)
Deploy(context.Context, *DeployRequest) (*DeployReply, error)
Version(context.Context, *VersionRequest) (*VersionReply, error)
}
func RegisterAPIServer(s *grpc.Server, srv APIServer) {
s.RegisterService(&_API_serviceDesc, srv)
}
func _API_Query_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(DBQuery)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).Query(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/API/Query",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).Query(ctx, req.(*DBQuery))
}
return interceptor(ctx, in, info, handler)
}
func _API_Deploy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(DeployRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).Deploy(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/API/Deploy",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).Deploy(ctx, req.(*DeployRequest))
}
return interceptor(ctx, in, info, handler)
}
func _API_Version_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(VersionRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).Version(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/API/Version",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).Version(ctx, req.(*VersionRequest))
}
return interceptor(ctx, in, info, handler)
}
var _API_serviceDesc = grpc.ServiceDesc{
ServiceName: "API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Query",
Handler: _API_Query_Handler,
},
{
MethodName: "Deploy",
Handler: _API_Deploy_Handler,
},
{
MethodName: "Version",
Handler: _API_Version_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "pb/pb.proto",
}
func init() { proto.RegisterFile("pb/pb.proto", fileDescriptor0) }
var fileDescriptor0 = []byte{
// 228 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x90, 0x41, 0x4b, 0xc3, 0x40,
0x10, 0x85, 0x53, 0xa4, 0xad, 0xbe, 0x34, 0x55, 0x06, 0x0f, 0x25, 0x07, 0x2d, 0x83, 0x87, 0x80,
0xb0, 0x85, 0xfa, 0x0b, 0xd4, 0x5e, 0xbc, 0x69, 0x10, 0xef, 0x06, 0xe6, 0x20, 0xac, 0xbb, 0x6b,
0xb2, 0x3d, 0xec, 0xc5, 0xdf, 0x2e, 0x66, 0xb7, 0x6d, 0x72, 0x7c, 0x5f, 0x32, 0x33, 0xdf, 0x5b,
0xe4, 0xae, 0xd9, 0xb8, 0x46, 0xb9, 0xd6, 0x7a, 0xcb, 0xb7, 0x98, 0xef, 0x9e, 0xde, 0xf6, 0xd2,
0x06, 0xba, 0xc6, 0xf4, 0xfd, 0xb3, 0xd1, 0xb2, 0x9a, 0xac, 0x27, 0xd5, 0x45, 0x1d, 0x03, 0x6f,
0x81, 0xfe, 0x73, 0x2d, 0x4e, 0x07, 0xba, 0x43, 0xd1, 0xe3, 0x67, 0x6b, 0xbc, 0x18, 0xdf, 0xa5,
0x7f, 0xc7, 0x90, 0x37, 0x28, 0x76, 0xe2, 0xb4, 0x0d, 0xb5, 0xfc, 0xec, 0xa5, 0xf3, 0x74, 0x03,
0x44, 0xf0, 0x2d, 0xc6, 0xa7, 0x99, 0x01, 0xe1, 0x02, 0xf9, 0x61, 0xc0, 0xe9, 0xc0, 0x57, 0x58,
0x7e, 0x48, 0xdb, 0x7d, 0x59, 0x93, 0x16, 0x70, 0x85, 0xc5, 0x91, 0xfc, 0x7b, 0xac, 0x30, 0x4f,
0x39, 0x6d, 0x3b, 0xc4, 0xed, 0x2f, 0xce, 0x1e, 0x5f, 0x5f, 0x68, 0x8d, 0x69, 0x6c, 0x75, 0xae,
0x52, 0xbf, 0x32, 0x57, 0xa7, 0x22, 0x9c, 0x51, 0x85, 0x59, 0xbc, 0x49, 0x4b, 0x35, 0xb2, 0x2d,
0x17, 0x6a, 0x28, 0x93, 0xd1, 0xfd, 0xf1, 0x18, 0x5d, 0xaa, 0xb1, 0x58, 0x59, 0xa8, 0xa1, 0x17,
0x67, 0xcd, 0xac, 0x7f, 0xd7, 0x87, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xcd, 0x14, 0xe7, 0x70,
0x66, 0x01, 0x00, 0x00,
}
|
kariya-mitsuru/Sprout
|
sprout/type_traits/is_nothrow_invocable.hpp
|
/*=============================================================================
Copyright (c) 2011-2017 <NAME>
https://github.com/bolero-MURAKAMI/Sprout
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
=============================================================================*/
#ifndef SPROUT_TYPE_TRAITS_IS_NOTHROW_INVOCABLE_HPP
#define SPROUT_TYPE_TRAITS_IS_NOTHROW_INVOCABLE_HPP
#include <sprout/config.hpp>
#include <sprout/type_traits/integral_constant.hpp>
#include <sprout/type_traits/invoke_result.hpp>
#include <sprout/type_traits/is_invocable.hpp>
#include <sprout/type_traits/detail/invoke.hpp>
namespace sprout {
//
// is_nothrow_invocable
//
namespace detail {
template<bool Cond, typename F, typename... Args>
struct is_nothrow_invocable_impl
: public sprout::false_type
{};
template<typename F, typename... Args>
struct is_nothrow_invocable_impl<true, F, Args...>
: public sprout::bool_constant<
SPROUT_NOEXCEPT_EXPR(sprout::detail::invoke(std::declval<F>(), std::declval<Args>()...))
>
{};
} // namespace detail
template<typename F, typename... Args>
struct is_nothrow_invocable
: public sprout::detail::is_nothrow_invocable_impl<
sprout::is_invocable<F, Args...>::value, F, Args...
>::type
{};
#if SPROUT_USE_VARIABLE_TEMPLATES
template<typename F, typename... Args>
SPROUT_STATIC_CONSTEXPR bool is_nothrow_invocable_v = sprout::is_nothrow_invocable<F, Args...>::value;
#endif // #if SPROUT_USE_VARIABLE_TEMPLATES
} // namespace sprout
#endif // #ifndef SPROUT_TYPE_TRAITS_IS_NOTHROW_INVOCABLE_HPP
|
svemir/wikipedia-ios
|
Wikipedia/Code/MWKList.h
|
<filename>Wikipedia/Code/MWKList.h
@import Mantle;
#import <WMF/MWKDataObject.h>
#import <WMF/WMFBlockDefinitions.h>
NS_ASSUME_NONNULL_BEGIN
@protocol MWKListObject <NSObject>
- (id<NSCopying, NSObject>)listIndex;
@end
typedef id<MWKListObject> MWKListEntry;
typedef id<NSCopying, NSObject> MWKListIndex;
/**
* Abstract base class for homogeneous lists of model objects.
*
* Can be specialized to contain instances of @c EntryType, which are queryable by index or an associated key of type
* @c IndexType.
*/
@interface MWKList <EntryType : MWKListEntry, IndexType : MWKListIndex> : MTLModel<NSFastEnumeration>
- (instancetype)initWithEntries:(NSArray<EntryType>* __nullable)entries;
/**
* Observable - observe to get KVO notifications
*/
@property (nonatomic, strong, readonly) NSArray<EntryType> *entries;
#pragma mark - Querying the List
- (NSUInteger)countOfEntries;
- (NSUInteger)indexForEntry:(EntryType)entry;
- (EntryType)entryAtIndex:(NSUInteger)index;
- (EntryType __nullable)entryForListIndex:(IndexType)listIndex;
- (BOOL)containsEntryForListIndex:(IndexType)listIndex;
#pragma mark - Mutating the List
- (void)addEntry:(EntryType)entry;
- (void)removeEntry:(EntryType)entry;
- (void)removeEntryWithListIndex:(IndexType)listIndex;
- (void)removeAllEntries;
- (NSArray *)pruneToMaximumCount:(NSUInteger)maximumCount;
#pragma mark - Persisting Changes to the List
- (void)saveWithFailure:(WMFErrorHandler)failure success:(WMFSuccessHandler)success;
- (void)save;
@end
NS_ASSUME_NONNULL_END
|
Rastamafugg/modern-democracy
|
app/components/bclaws/lawAlphabeticalList.js
|
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import BCLawsActList from './lawActList';
class BCLawsAlphabeticalList extends Component {
constructor() {
super();
this.state = {
};
}
setLetter(letter) {
this.setState({ letter });
}
render() {
const { path, data: { lawsDocumentList = [] } } = this.props;
const lawSelector = (this.state.letter) ? (<BCLawsActList path={[...path, this.state.letter]} />) : undefined;
return (
<div>
<select name="lawsByLetter" onChange={(event) => this.setLetter(event.target.value)}>
<option key={''} value={''}>Select a letter</option>
{
lawsDocumentList && lawsDocumentList.map(({ title, id }) => (<option key={id} value={id}>{title}</option>))
}
</select>
{lawSelector}
</div>
);
}
}
BCLawsAlphabeticalList.propTypes = {
path: PropTypes.array.isRequired,
data: PropTypes.object.isRequired,
};
export default BCLawsAlphabeticalList;
|
lihongli528628/yishu
|
medtree/medtree/Groups/HomePage/View/HomeJobChannelHotEnterpriseCollectionViewCell.h
|
//
// HomeJobChannelHotEnterpriseCollectionViewCell.h
// medtree
//
// Created by tangshimi on 11/2/15.
// Copyright © 2015 sam. All rights reserved.
//
#import <UIKit/UIKit.h>
@class HomeJobChannelHotEmploymentDetailDTO;
@interface HomeJobChannelHotEnterpriseCollectionViewCell : UICollectionViewCell
@property (nonatomic, strong) HomeJobChannelHotEmploymentDetailDTO *detailDTO;
@property (nonatomic, assign) BOOL hideReflection;
@end
|
bdleitner/common-annotation-processing
|
src/test/java/com/bdl/annotation/processing/model/TypeMetadataTest.java
|
<gh_stars>0
package com.bdl.annotation.processing.model;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.testing.compile.CompilationRule;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import javax.lang.model.element.Element;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.Elements;
import static com.google.common.truth.Truth.assertThat;
/**
* Tests for the TypeMetadata class.
*
* @author <NAME>
*/
@RunWith(JUnit4.class)
public class TypeMetadataTest {
@Rule public final CompilationRule compilation = new CompilationRule();
private Elements elements;
@Before
public void before() {
elements = compilation.getElements();
}
@Test
public void testSimpleInterface() {
TypeElement element = elements.getTypeElement("com.bdl.annotation.processing.model.Simple");
TypeMetadata type = TypeMetadata.fromElement(element);
assertThat(type)
.isEqualTo(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Simple")
.build());
assertThat(type.toString(Imports.empty()))
.isEqualTo("com.bdl.annotation.processing.model.Simple");
assertThat(type.toString(Imports.empty(), true))
.isEqualTo("com.bdl.annotation.processing.model.Simple");
}
@Test
public void testParameterizedInterface() {
TypeElement element =
elements.getTypeElement("com.bdl.annotation.processing.model.Parameterized");
TypeMetadata type = TypeMetadata.fromElement(element);
assertThat(type)
.isEqualTo(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Parameterized")
.addParam(TypeMetadata.builder().setIsTypeParameter(true).setName("T").build())
.build());
assertThat(type.toString(Imports.empty()))
.isEqualTo("com.bdl.annotation.processing.model.Parameterized<T>");
assertThat(type.toString(Imports.empty(), true))
.isEqualTo("com.bdl.annotation.processing.model.Parameterized<T>");
}
@Test
public void testParameterizedWithBound() {
TypeElement element = elements.getTypeElement("com.bdl.annotation.processing.model.Field");
TypeMetadata type = TypeMetadata.fromElement(element);
assertThat(type)
.isEqualTo(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Field")
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("F")
.addBound(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Field")
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("F")
.build())
.build())
.build())
.build());
assertThat(type.toString(Imports.empty()))
.isEqualTo("com.bdl.annotation.processing.model.Field<F>");
assertThat(type.toString(Imports.empty(), true))
.isEqualTo(
"com.bdl.annotation.processing.model.Field<F extends com.bdl.annotation.processing.model.Field<F>>");
}
@Test
public void testMultipleParamsMultipleBounds() {
TypeElement element =
elements.getTypeElement("com.bdl.annotation.processing.model.ParameterizedMultibound");
TypeMetadata type = TypeMetadata.fromElement(element);
assertThat(type)
.isEqualTo(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("ParameterizedMultibound")
.addParam(TypeMetadata.builder().setIsTypeParameter(true).setName("S").build())
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("T")
.addBound(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Simple")
.build())
.addBound(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Parameterized")
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("S")
.build())
.build())
.build())
.build());
assertThat(type.toString(Imports.empty()))
.isEqualTo("com.bdl.annotation.processing.model.ParameterizedMultibound<S, T>");
assertThat(type.toString(Imports.empty(), true))
.isEqualTo(
"com.bdl.annotation.processing.model.ParameterizedMultibound"
+ "<S, T extends com.bdl.annotation.processing.model.Simple & com.bdl.annotation.processing.model.Parameterized<S>>");
}
@Test
public void testMultipleParamsMultipleBounds_fromClass() {
TypeMetadata type = TypeMetadata.from(ParameterizedMultibound.class);
TypeMetadata expected =
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("ParameterizedMultibound")
.addParam(TypeMetadata.builder().setIsTypeParameter(true).setName("S").build())
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("T")
.addBound(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Simple")
.build())
.addBound(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Parameterized")
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("S")
.build())
.build())
.build())
.build();
assertThat(type).isEqualTo(expected);
assertThat(type.toString(Imports.empty()))
.isEqualTo("com.bdl.annotation.processing.model.ParameterizedMultibound<S, T>");
assertThat(type.toString(Imports.empty(), true))
.isEqualTo(
"com.bdl.annotation.processing.model.ParameterizedMultibound"
+ "<S, T extends com.bdl.annotation.processing.model.Simple & com.bdl.annotation.processing.model.Parameterized<S>>");
}
@Test
public void testNestedClasses() {
TypeElement element =
elements.getTypeElement("com.bdl.annotation.processing.model.TopLevel.Outer.Inner");
TypeMetadata type = TypeMetadata.fromElement(element);
assertThat(type)
.isEqualTo(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Inner")
.addOuterClass("Outer")
.addOuterClass("TopLevel")
.build());
assertThat(type.toString(Imports.empty()))
.isEqualTo("com.bdl.annotation.processing.model.TopLevel.Outer.Inner");
assertThat(type.toString(Imports.empty(), true))
.isEqualTo("com.bdl.annotation.processing.model.TopLevel.Outer.Inner");
}
@Test
public void testNestedClasses_fromType() {
TypeMetadata type = TypeMetadata.from(TopLevel.Outer.Inner.class);
assertThat(type)
.isEqualTo(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Inner")
.addOuterClass("Outer")
.addOuterClass("TopLevel")
.build());
assertThat(type.toString(Imports.empty()))
.isEqualTo("com.bdl.annotation.processing.model.TopLevel.Outer.Inner");
assertThat(type.toString(Imports.empty(), true))
.isEqualTo("com.bdl.annotation.processing.model.TopLevel.Outer.Inner");
}
@Test
public void testAllTypes() {
TypeElement element =
elements.getTypeElement("com.bdl.annotation.processing.model.ParameterizedMultibound");
TypeMetadata type = TypeMetadata.fromElement(element);
assertThat(type.getAllTypes())
.containsExactly(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Parameterized")
.build(),
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("ParameterizedMultibound")
.build(),
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("Simple")
.build());
}
@Test
public void testComplexParameterization() {
TypeElement element =
elements.getTypeElement("com.bdl.annotation.processing.model.ComplexParameterized");
TypeMetadata type = TypeMetadata.fromElement(element);
assertThat(type)
.isEqualTo(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("ComplexParameterized")
.addParam(TypeMetadata.builder().setIsTypeParameter(true).setName("X").build())
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("Y")
.addBound(
TypeMetadata.builder()
.setPackageName("java.lang")
.setName("Comparable")
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("Y")
.build())
.build())
.build())
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("Z")
.addBound(
TypeMetadata.builder()
.setPackageName("java.util")
.setName("List")
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("Y")
.build())
.build())
.build())
.build());
assertThat(type.toString(Imports.empty()))
.isEqualTo("com.bdl.annotation.processing.model.ComplexParameterized<X, Y, Z>");
assertThat(type.toString(Imports.empty(), true))
.isEqualTo(
"com.bdl.annotation.processing.model.ComplexParameterized<"
+ "X, Y extends Comparable<Y>, Z extends java.util.List<Y>>");
}
@Test
public void testTypeConversion() {
TypeElement element =
elements.getTypeElement("com.bdl.annotation.processing.model.ComplexParameterized");
TypeMetadata type = TypeMetadata.fromElement(element);
type =
type.convertTypeParams(
ImmutableList.of(
TypeMetadata.simpleTypeParam("A"),
TypeMetadata.simpleTypeParam("B"),
TypeMetadata.simpleTypeParam("C")));
assertThat(type)
.isEqualTo(
TypeMetadata.builder()
.setPackageName("com.bdl.annotation.processing.model")
.setName("ComplexParameterized")
.addParam(TypeMetadata.builder().setIsTypeParameter(true).setName("A").build())
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("B")
.addBound(
TypeMetadata.builder()
.setPackageName("java.lang")
.setName("Comparable")
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("B")
.build())
.build())
.build())
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("C")
.addBound(
TypeMetadata.builder()
.setPackageName("java.util")
.setName("List")
.addParam(
TypeMetadata.builder()
.setIsTypeParameter(true)
.setName("B")
.build())
.build())
.build())
.build());
assertThat(type.toString(Imports.empty()))
.isEqualTo("com.bdl.annotation.processing.model.ComplexParameterized<A, B, C>");
assertThat(type.toString(Imports.empty(), true))
.isEqualTo(
"com.bdl.annotation.processing.model.ComplexParameterized<"
+ "A, B extends Comparable<B>, C extends java.util.List<B>>");
}
@Test
public void testArrayType() {
TypeElement hasFields =
elements.getTypeElement("com.bdl.annotation.processing.model.HasFields");
Element field = null;
for (Element element : hasFields.getEnclosedElements()) {
if (element.getSimpleName().toString().equals("array")) {
field = element;
break;
}
}
Preconditions.checkState(field != null, "Unable to find field HasFields.array");
TypeMetadata type = TypeMetadata.fromType(field.asType());
assertThat(type).isEqualTo(TypeMetadata.STRING.arrayOf());
assertThat(type.toString(Imports.empty())).isEqualTo("String[]");
}
@Test
public void testMultipleArrayType() {
TypeElement hasFields =
elements.getTypeElement("com.bdl.annotation.processing.model.HasFields");
Element field = null;
for (Element element : hasFields.getEnclosedElements()) {
if (element.getSimpleName().toString().equals("threeDArray")) {
field = element;
break;
}
}
Preconditions.checkState(field != null, "Unable to find field HasFields.threeDArray");
TypeMetadata type = TypeMetadata.fromType(field.asType());
assertThat(type).isEqualTo(TypeMetadata.INT.arrayOf().arrayOf().arrayOf());
assertThat(type.toString(Imports.empty())).isEqualTo("int[][][]");
}
}
|
SubjeBilisim/apidaora
|
apidaora/exceptions.py
|
<reponame>SubjeBilisim/apidaora
from dataclasses import dataclass
from typing import Any, Dict, Optional, Sequence
from .header import Header
class APIDaoraError(Exception):
...
class MethodNotFoundError(APIDaoraError):
...
class PathNotFoundError(APIDaoraError):
...
class InvalidReturnError(APIDaoraError):
def __str__(self) -> str:
return (
f"handler_name='{self.args[1].__name__}', "
f"return_type='{type(self.args[0]).__name__}', "
f"return_value='{self.args[0]}'"
)
@dataclass
class BadRequestError(APIDaoraError):
name: str
info: Dict[str, Any]
headers: Optional[Sequence[Header]] = None
def __str__(self) -> str:
return f"name='{self.name}', info={self.info}"
@property
def dict(self) -> Dict[str, Any]:
return {'name': self.name, 'info': self.info}
class InvalidTasksRepositoryError(APIDaoraError):
...
class InvalidRouteArgumentsError(APIDaoraError):
...
class InvalidPathError(APIDaoraError):
...
|
Domaman202/DmNCaCuTi
|
src/main/java/ru/DmN/cacuti/mixin/PlayerManagerMixin.java
|
<filename>src/main/java/ru/DmN/cacuti/mixin/PlayerManagerMixin.java
package ru.DmN.cacuti.mixin;
import com.mojang.authlib.GameProfile;
import net.minecraft.block.Blocks;
import net.minecraft.network.ClientConnection;
import net.minecraft.network.packet.s2c.play.TitleS2CPacket;
import net.minecraft.server.PlayerManager;
import net.minecraft.server.network.ServerPlayerEntity;
import net.minecraft.text.LiteralText;
import net.minecraft.text.Text;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import ru.DmN.cacuti.Main;
import java.net.SocketAddress;
import java.util.concurrent.CompletableFuture;
import static ru.DmN.cacuti.Main.unsafe;
@Mixin(PlayerManager.class)
public class PlayerManagerMixin {
@Inject(method = "checkCanJoin", at = @At("HEAD"), cancellable = true)
void connectJoin(SocketAddress address, GameProfile profile, CallbackInfoReturnable<Text> cir) {
if (Main.coolDownPlayerList.containsKey(profile.getId())) {
unsafe.putInt(Main.coolDownPlayerList.get(profile.getId()), 0);
unsafe.storeFence();
cir.setReturnValue(null);
cir.cancel();
}
}
@Inject(method = "onPlayerConnect", at = @At("TAIL"))
public void onPlayerConnect(ClientConnection connection, ServerPlayerEntity player, CallbackInfo ci) {
Main.getPlayer.get(player).set(false);
player.setInvulnerable(true);
player.sendMessage(new LiteralText("§9Добро пожаловать на CaCuTi!\n§eПожалуйста войдите с помощью /login или зарегестрируйтесь /register"), false);
World world = player.world;
if (world.getBlockState(player.getBlockPos()).getBlock() == Blocks.NETHER_PORTAL) {
CompletableFuture.runAsync(() -> {
try {
BlockPos pos = player.getBlockPos();
if (world.getBlockState(pos.up()).getBlock() == Blocks.NETHER_PORTAL)
world.setBlockState(pos.up(), Blocks.AIR.getDefaultState());
world.setBlockState(pos, Blocks.AIR.getDefaultState());
Thread.sleep(10000);
world.setBlockState(pos, Blocks.FIRE.getDefaultState());
} catch (Exception e) {
e.printStackTrace();
}
});
}
}
@Inject(method = "remove", at = @At("TAIL"))
public void remove(ServerPlayerEntity player, CallbackInfo ci) {
Main.getPlayer.get(player).set(false);
}
}
|
vicpatel/corteza-server
|
auth/auth.go
|
<reponame>vicpatel/corteza-server
package auth
import (
"context"
"embed"
"fmt"
"github.com/Masterminds/sprig"
"github.com/cortezaproject/corteza-server/auth/external"
"github.com/cortezaproject/corteza-server/auth/handlers"
"github.com/cortezaproject/corteza-server/auth/oauth2"
"github.com/cortezaproject/corteza-server/auth/request"
"github.com/cortezaproject/corteza-server/auth/settings"
"github.com/cortezaproject/corteza-server/pkg/actionlog"
"github.com/cortezaproject/corteza-server/pkg/auth"
"github.com/cortezaproject/corteza-server/pkg/options"
"github.com/cortezaproject/corteza-server/pkg/version"
"github.com/cortezaproject/corteza-server/store"
systemService "github.com/cortezaproject/corteza-server/system/service"
"github.com/cortezaproject/corteza-server/system/types"
"github.com/go-chi/chi"
oauth2def "github.com/go-oauth2/oauth2/v4"
"go.uber.org/zap"
"html/template"
"net/http"
"strconv"
"strings"
"time"
)
type (
service struct {
handlers *handlers.AuthHandlers
log *zap.Logger
opt options.AuthOpt
settings *settings.Settings
store store.Storer
}
)
//go:embed assets/public
var publicAssets embed.FS
// New initializes Auth service that orchestrates session manager, oauth2 manager and http request handlers
func New(ctx context.Context, log *zap.Logger, s store.Storer, opt options.AuthOpt) (svc *service, err error) {
var (
tpls templateExecutor
defClient *types.AuthClient
)
log = log.Named("auth")
ctx = actionlog.RequestOriginToContext(ctx, actionlog.RequestOrigin_Auth)
svc = &service{
opt: opt,
log: log,
store: s,
settings: &settings.Settings{ /* all disabled by default. */ },
}
// use modified log ger for the resrt
if opt.LogEnabled {
log = log.WithOptions(zap.AddStacktrace(zap.PanicLevel))
} else {
log = zap.NewNop()
}
sesManager := request.NewSessionManager(s, opt, log)
oauth2Manager := oauth2.NewManager(
opt,
&oauth2.ContextClientStore{},
&oauth2.CortezaTokenStore{Store: s},
)
oauth2Server := oauth2.NewServer(oauth2Manager)
// Called after oauth2 authorization request is validated
// We'll try to get valid user out of the session or redirect user to login page
oauth2Server.SetUserAuthorizationHandler(oauth2.NewUserAuthorizer(
sesManager,
handlers.GetLinks().Login,
handlers.GetLinks().OAuth2AuthorizeClient,
))
oauth2Server.SetClientAuthorizedHandler(func(id string, grant oauth2def.GrantType) (bool, error) {
// this is a bit silly and a bad design of the oauth2 server lib
// why do we need to keep on load the client??
var (
clientID uint64
client *types.AuthClient
err error
)
clientID, err = strconv.ParseUint(id, 10, 64)
if err != nil {
return false, fmt.Errorf("could not authorize client: %w", err)
}
client, err = store.LookupAuthClientByID(ctx, s, clientID)
if err != nil {
return false, fmt.Errorf("could not authorize client: %w", err)
}
// each client only has 1 valid grant type (+ refresh_token)!
if client.ValidGrant != grant.String() && oauth2def.Refreshing != grant {
return false, fmt.Errorf("client does not support %s flow", grant)
}
return true, nil
})
oauth2Server.SetClientScopeHandler(func(id, ss string) (allowed bool, err error) {
// this is a bit silly and a bad design of the oauth2 server lib
// why do we need to keep on load the client??
var (
clientID uint64
client *types.AuthClient
)
clientID, err = strconv.ParseUint(id, 10, 64)
if err != nil {
return false, fmt.Errorf("could not authorize client: %w", err)
}
client, err = store.LookupAuthClientByID(ctx, s, clientID)
if err != nil {
return false, fmt.Errorf("could not authorize client: %w", err)
}
// ensure all requested scopes are allowed on a client
for _, scope := range strings.Split(ss, " ") {
if !auth.CheckScope(client.Scope, scope) {
return false, fmt.Errorf("client does not allow use of '%s' scope", scope)
}
}
return true, nil
})
oauth2Server.SetExtensionFieldsHandler(func(ti oauth2def.TokenInfo) (fieldsValue map[string]interface{}) {
fieldsValue = make(map[string]interface{})
handlers.SubSplit(ti, fieldsValue)
fieldsValue["refresh_token_expires_in"] = int(ti.GetRefreshExpiresIn() / time.Second)
if err = handlers.Profile(ctx, ti, fieldsValue); err != nil {
log.Error("failed to add profile data", zap.Error(err))
}
return
})
if opt.DefaultClient != "" {
// default client will help streamline authorization with default clients
defClient, err = store.LookupAuthClientByHandle(ctx, s, opt.DefaultClient)
if err != nil {
return nil, fmt.Errorf("cannot load default client: %w", err)
}
}
var (
tplBase = template.New("").
Funcs(sprig.FuncMap()).
Funcs(template.FuncMap{
"version": func() string { return version.Version },
"buildtime": func() string { return version.BuildTime },
"links": handlers.GetLinks,
})
tplLoader templateLoader
)
if len(opt.AssetsPath) > 0 {
tplLoader = func(t *template.Template) (tpl *template.Template, err error) {
if tpl, err = t.Clone(); err != nil {
return nil, fmt.Errorf("can not clone templates: %w", err)
} else {
return tpl.ParseGlob(opt.AssetsPath + "/templates/*.tpl")
}
}
log.Info("loading assets from filesystem", zap.String("path", opt.AssetsPath))
} else {
tplLoader = EmbeddedTemplates
log.Info("using embedded assets")
}
if !opt.DevelopmentMode || len(opt.AssetsPath) == 0 {
log.Info("initializing templates without reloading (production mode)")
tpls, err = NewStaticTemplates(tplBase, tplLoader)
if err != nil {
return nil, fmt.Errorf("can not load templates: %w", err)
}
} else {
log.Info("initializing reloadable templates (development mode)")
tpls = NewReloadableTemplates(tplBase, tplLoader)
}
svc.handlers = &handlers.AuthHandlers{
Log: log,
Templates: tpls,
SessionManager: sesManager,
OAuth2: oauth2Server,
AuthService: systemService.DefaultAuth,
UserService: systemService.DefaultUser,
ClientService: &clientService{s},
TokenService: &tokenService{s},
DefaultClient: defClient,
Opt: svc.opt,
Settings: svc.settings,
}
external.Init(log, sesManager.Store())
return
}
func (svc *service) UpdateSettings(s *settings.Settings) {
if svc.settings.LocalEnabled != s.LocalEnabled {
svc.log.Debug("setting changed", zap.Bool("localEnabled", s.LocalEnabled))
}
if svc.settings.SignupEnabled != s.SignupEnabled {
svc.log.Debug("setting changed", zap.Bool("signupEnabled", s.SignupEnabled))
}
if svc.settings.EmailConfirmationRequired != s.EmailConfirmationRequired {
svc.log.Debug("setting changed", zap.Bool("emailConfirmationRequired", s.EmailConfirmationRequired))
}
if svc.settings.PasswordResetEnabled != s.PasswordResetEnabled {
svc.log.Debug("setting changed", zap.Bool("passwordResetEnabled", s.PasswordResetEnabled))
}
if svc.settings.ExternalEnabled != s.ExternalEnabled {
svc.log.Debug("setting changed", zap.Bool("externalEnabled", s.ExternalEnabled))
}
if svc.settings.MultiFactor != s.MultiFactor {
svc.log.Debug("setting changed", zap.Any("mfa", s.MultiFactor))
}
if len(svc.settings.Providers) != len(s.Providers) {
svc.log.Debug("setting changed", zap.Int("providers", len(s.Providers)))
external.SetupGothProviders(svc.opt.ExternalRedirectURL, s.Providers...)
}
svc.settings = s
svc.handlers.Settings = s
}
func (svc *service) Watch(ctx context.Context) {
go svc.gc(ctx)
}
func (svc service) gc(ctx context.Context) {
svc.log.Info("running startup garbage collection")
go svc.gcSessions(ctx)
go svc.gcOAuth2Tokens(ctx)
i := svc.opt.GarbageCollectorInterval
if i < time.Minute {
svc.log.Warn("garbage collection interval less than 1 minute, disabling")
} else {
svc.log.Info("starting garbage collecting process", zap.Duration("interval", i))
}
tck := time.NewTicker(i)
for {
select {
case <-ctx.Done():
svc.log.Info("stopping gc", zap.Error(ctx.Err()))
return
case <-tck.C:
svc.log.Info("garbage collector")
go svc.gcSessions(ctx)
go svc.gcOAuth2Tokens(ctx)
return
}
}
}
func (svc service) gcSessions(ctx context.Context) {
err := store.DeleteExpiredAuthSessions(ctx, svc.store)
if err != nil {
svc.log.Error("failed to collect session garbage", zap.Error(err))
}
}
func (svc service) gcOAuth2Tokens(ctx context.Context) {
err := store.DeleteExpiredAuthOA2Tokens(ctx, svc.store)
if err != nil {
svc.log.Error("failed to collect oauth2 token garbage", zap.Error(err))
}
}
func (svc service) MountHttpRoutes(r chi.Router) {
svc.handlers.MountHttpRoutes(r)
const uriRoot = "/auth/assets/public"
if len(svc.opt.AssetsPath) == 0 && !svc.opt.DevelopmentMode {
r.Handle(uriRoot+"/*", http.StripPrefix("/auth/", http.FileServer(http.FS(publicAssets))))
} else {
var root = strings.TrimRight(svc.opt.AssetsPath, "/") + "/public"
r.Handle(uriRoot+"/*", http.StripPrefix(uriRoot, http.FileServer(http.Dir(root))))
}
}
//func (svc service) WellKnownOpenIDConfiguration() http.HandlerFunc {
// return func(w http.ResponseWriter, r *http.Request) {
// json.NewEncoder(w).Encode(map[string]interface{}{
// "issuer": svc.opt.BaseURL,
// "authorization_endpoint": svc.opt.BaseURL + "/oauth2/authorize",
// "token_endpoint": svc.opt.BaseURL + "/oauth2/token",
// "jwks_uri": svc.opt.BaseURL + "/oauth2/public-keys", // @todo
// "subject_types_supported": []string{"public"},
// "response_types_supported": []string{"public"},
// "id_token_signing_alg_values_supported": []string{"RS256", "HS512"},
// })
//
// w.Header().Set("Content-Type", "application/json")
// }
//}
|
qussarah/declare
|
idea/testData/refactoring/move/kotlin/moveFile/moveFileAndDirWithJavaFileReferringToPackageFragementWithUnmatchedDir/before/test/Bar.java
|
<filename>idea/testData/refactoring/move/kotlin/moveFile/moveFileAndDirWithJavaFileReferringToPackageFragementWithUnmatchedDir/before/test/Bar.java
package test;
public class Bar {
}
|
99cm/open
|
backend/app/controllers/spree/admin/orders/customer_details_controller.rb
|
# frozen_string_literal: true
module Spree
module Admin
module Orders
class CustomerDetailsController < Spree::Admin::BaseController
rescue_from Spree::Order::InsufficientStock, with: :insufficient_stock_error
before_action :load_order
def show
edit
end
def edit
country_iso = default_country_iso
@order.build_bill_address(country_iso: country_iso) if @order.bill_address.nil?
@order.build_ship_address(country_iso: country_iso) if @order.ship_address.nil?
@order.bill_address.country_iso = iso if @order.bill_address.country.nil?
@order.ship_address.country_iso = iso if @order.ship_address.country.nil?
end
def update
if @order.update_cart(order_params)
if should_associate_user?
requested_user = Spree.user_class.find(params[:user_id])
@order.associate_user!(requested_user, @order.email.blank?)
end
if @order.address?
@order.next
@order.refresh_shipment_rates
end
flash[:success] = t('spree.customer_details_updated')
redirect_to edit_admin_order_url(@order)
else
render action: :edit
end
end
private
def order_params
params.require(:order).permit(
:email, :user_id, :use_billing,
bill_address_attributes: permitted_address_attributes,
ship_address_attributes: permitted_address_attributes
)
end
def load_order
@order = Order.includes(:adjustments).find_by!(number: params[:order_id])
end
def model_class
Spree::Order
end
def should_associate_user?
params[:guest_checkout] == "false" && params[:user_id] && params[:user_id].to_i != @order.user_id
end
def insufficient_stock_error
flash[:error] = t('spree.insufficient_stock_for_order')
redirect_to edit_admin_order_customer_url(@order)
end
end
end
end
end
|
ComputerScienceTrolls/simpleGameGL
|
gameEngine/gameEngine/Colliders/AbstractCollider.h
|
<reponame>ComputerScienceTrolls/simpleGameGL<gh_stars>0
#ifndef ABSTRACT_COLLIDER_H
#define ABSTRACT_COLLIDER_H
#include <iostream>
#include <vector>
#include <glm/glm.hpp>
#include "../sprite_renderer.h"
#include "../MovingSceneObject.h"
#include "../DrawSceneObject.h"
//only Needed for poly collider
#include "Edge.h"
class AbstractSprite;
class AbstractCollider : virtual public MovingSceneObject, virtual public DrawSceneObject
{
class SpriteRender;
public:
AbstractCollider();
virtual bool collide(std::vector<AbstractCollider*>) { return false; };
virtual bool collide(AbstractCollider*) { return false; };
virtual bool collide(AbstractSprite*) { return false; };
virtual glm::vec2 getSpriteCenterPos() { return glm::vec2(0, 0); };
virtual glm::vec2 getSpritePos() { return glm::vec2(0, 0); };
virtual glm::vec2 getSpriteSize() { return glm::vec2(0, 0); };
//virtual void Draw(SpriteRenderer &renderer) = 0;
//virtual void Update() {};
//needed for poly collider
virtual std::vector<double> project(glm::vec2) { return std::vector<double>(); }
virtual std::vector<glm::vec2> getVerticies() { return std::vector<glm::vec2>(); }
virtual std::vector<Edge*> getEdges() { return std::vector<Edge*>(); }
virtual void updateVecs() { }
//needed for circle collider
virtual float getRadius() {return 0;};
//only in Abstract Collider
virtual std::string getType();
~AbstractCollider();
protected:
std::string type;
bool staticState;
private:
SpriteRender *render;
};
#endif
|
lingzhou2018/hal_uwp5
|
drivers/include/hal_sfc.h
|
/*
* Copyright (c) 2018, UNISOC Incorporated
*
* SPDX-License-Identifier: Apache-2.0
*/
#ifndef __MARLIN3_HAL_SFC_H
#define __MARLIN3_HAL_SFC_H
#ifdef __cplusplus
extern "C" {
#endif
#include <zephyr/types.h>
#include <arch/arm/cortex_m/exc.h>
#include <irq.h>
#include "uwp_hal.h"
typedef struct SPIFLASH_ExtCfg {
int voltage;
u32_t desity;
u32_t reserved1;
u32_t reserved2;
u32_t reserved3;
u32_t reserved4;
u32_t reserved5;
} *Spiflash_ExtCfg_PRT;
typedef struct nor_flash_config_s {
u32_t bank_num;
u32_t sect_num;
u32_t file_sect_num;
u32_t sect_size;
u32_t start_addr;
u32_t efs_start_addr;
u32_t flash_size;
u32_t fixnv_addr;
u32_t prodinfo_addr;
u32_t mmi_res;
u32_t umem_addr;
u32_t umem_size;
u32_t spload_addr;
u32_t ps_addr;
} NOR_FLASH_CONFIG_T, *NOR_FLASH_CONFIG_PTR;
typedef struct DFILE_CONFIG_Tag {
u32_t magic_first;
u32_t magic_second;
u32_t image_addr;
u32_t res_addr;
u32_t nv_addr;
u32_t dsp_addr;
u32_t reserved2;
u32_t ext[24];
u32_t magic_end;
} DFILE_CONFIG_T;
struct spi_flash_region {
unsigned int count;
unsigned int size;
};
typedef enum READ_CMD_TYPE_E_TAG {
READ_SPI = 0,
READ_SPI_FAST,
READ_SPI_2IO,
READ_SPI_4IO,
READ_QPI_FAST,
READ_QPI_4IO,
} READ_CMD_TYPE_E;
struct spi_flash {
u32_t cs;
const char *name;
u32_t size;
u32_t page_size;
u32_t sector_size;
u32_t dummy_bytes;
u8_t work_mode;
u8_t support_4addr;
int spi_rw_mode;
int (*read_noxip) (struct spi_flash * flash, u32_t address,
u8_t * buf, u32_t buf_size, READ_CMD_TYPE_E type);
int (*read) (struct spi_flash * flash, u32_t offset, u32_t * buf,
u32_t dump_len, READ_CMD_TYPE_E type);
int (*write) (struct spi_flash * flash, u32_t offset, u32_t len,
const void *buf);
int (*read_sec_noxip) (struct spi_flash * flash, u8_t * buf,
u32_t buf_size, READ_CMD_TYPE_E type);
int (*read_sec) (struct spi_flash * flash, u32_t offset, u32_t * buf,
u32_t dump_len, READ_CMD_TYPE_E type);
int (*write_sec) (struct spi_flash * flash, u32_t offset, u32_t len,
const void *buf);
int (*erase) (struct spi_flash * flash, u32_t offset, u32_t len);
int (*erase_chip) (struct spi_flash * flash);
int (*reset) (void);
int (*suspend) (struct spi_flash * flash);
int (*resume) (struct spi_flash * flash);
int (*wren)(struct spi_flash *flash);
int (*lock) (struct spi_flash * flash, u32_t offset, u32_t len);
int (*unlock) (struct spi_flash * flash, u32_t offset, u32_t len);
int (*set_4io) (struct spi_flash * flash, u32_t op);
int (*set_qpi) (struct spi_flash * flash, u32_t op);
int (*set_encrypt) (u32_t op);
void *priv;
};
struct spi_flash_spec_s {
u16_t id_manufacturer;
u16_t table_num;
struct spi_flash_params *table;
};
struct spi_flash_params {
u16_t idcode1;
u16_t idcode2;
u16_t page_size;
u16_t sector_size;
u16_t nr_sectors;
u16_t nr_blocks;
u16_t support_qpi;
u16_t read_freq_max;
u16_t dummy_clocks;
const char *name;
};
struct spi_flash_struct {
struct spi_flash flash;
const struct spi_flash_params *params;
};
void uwp_spi_xip_init(void);
__ramfunc void spiflash_select_xip(u32_t op);
__ramfunc void spiflash_set_clk(void);
__ramfunc int uwp_spi_flash_init(struct spi_flash *flash,
struct spi_flash_params **params);
void spi_flash_free(struct spi_flash *flash);
void uwp_spi_dump(u32_t arg_in);
static ALWAYS_INLINE unsigned int irq_lock_primask(void)
{
unsigned int key;
__asm__ volatile("mrs %0, PRIMASK;"
"cpsid i"
: "=r" (key)
:
: "memory");
return key;
}
static ALWAYS_INLINE void irq_unlock_primask(unsigned int key)
{
if (key) {
return;
}
__asm__ volatile("cpsie i" : : : "memory");
}
#ifdef __cplusplus
}
#endif
#endif
|
dariovillalta/INTEGEVAL
|
app/components/Umbral/Umbral.js
|
<reponame>dariovillalta/INTEGEVAL<filename>app/components/Umbral/Umbral.js<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _react = _interopRequireDefault(require("react"));
var _mssql = _interopRequireDefault(require("mssql"));
var _VistaUmbral = _interopRequireDefault(require("./VistaUmbral.js"));
var _CrearUmbral = _interopRequireDefault(require("./CrearUmbral.js"));
var _ListaRestoUmbrales = _interopRequireDefault(require("./ListaRestoUmbrales.js"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
var secciones = [{
nombre: "MONO 1",
color: "#00c853",
width: "25"
}, {
nombre: "MONO 2",
color: "#ffab40",
width: "50"
}, {
nombre: "MONO 1",
color: "#00c853",
width: "25"
}];
var seccionesConRango = [];
var posicionesInsertadasRango = 0,
posicionesAInsertarRango = 0;
var Umbral =
/*#__PURE__*/
function (_React$Component) {
_inherits(Umbral, _React$Component);
function Umbral(props) {
var _this;
_classCallCheck(this, Umbral);
_this = _possibleConstructorReturn(this, _getPrototypeOf(Umbral).call(this, props));
_this.state = {
umbrales: [],
secciones: []
};
_this.traerUmbrales = _this.traerUmbrales.bind(_assertThisInitialized(_this));
_this.inicioTraerSecciones = _this.inicioTraerSecciones.bind(_assertThisInitialized(_this));
_this.traerSeccion = _this.traerSeccion.bind(_assertThisInitialized(_this));
_this.inicioTraerSeccionRango = _this.inicioTraerSeccionRango.bind(_assertThisInitialized(_this));
_this.traerSeccionRango = _this.traerSeccionRango.bind(_assertThisInitialized(_this));
_this.inicioCrearArregloSeccionRango = _this.inicioCrearArregloSeccionRango.bind(_assertThisInitialized(_this));
_this.ingresarSeccion = _this.ingresarSeccion.bind(_assertThisInitialized(_this));
return _this;
}
_createClass(Umbral, [{
key: "componentDidMount",
value: function componentDidMount() {
this.traerUmbrales();
}
}, {
key: "traerUmbrales",
value: function traerUmbrales() {
var _this2 = this;
var transaction = new _mssql["default"].Transaction(this.props.pool);
transaction.begin(function (err) {
var rolledBack = false;
transaction.on('rollback', function (aborted) {
rolledBack = true;
});
var request = new _mssql["default"].Request(transaction);
request.query("select * from Umbral where variableID = " + _this2.props.idVariable + " and tablaVariable = '" + _this2.props.tablaVariable + "'", function (err, result) {
if (err) {
if (!rolledBack) {
console.log(err);
_this2.props.showMessage("Error", "No se pudo traer valores de la tabla de umbrales.", true, false, {});
transaction.rollback(function (err) {});
}
} else {
transaction.commit(function (err) {
_this2.setState({
umbrales: result.recordset
}, _this2.inicioTraerSecciones);
});
}
});
}); // fin transaction
}
}, {
key: "inicioTraerSecciones",
value: function inicioTraerSecciones() {
var posicionesInsertadas = [];
seccionesConRango = [];
for (var i = 0; i < this.state.umbrales.length; i++) {
this.traerSeccion(this.state.umbrales[i], i, this.state.umbrales.length, posicionesInsertadas);
}
;
}
}, {
key: "traerSeccion",
value: function traerSeccion(umbral, index, ultimoIndex, posicionesInsertadas) {
var _this3 = this;
var transaction = new _mssql["default"].Transaction(this.props.pool);
transaction.begin(function (err) {
var rolledBack = false;
transaction.on('rollback', function (aborted) {
rolledBack = true;
});
var request = new _mssql["default"].Request(transaction);
request.query("select * from SeccionUmbral where umbralID = " + umbral.ID, function (err, result) {
if (err) {
if (!rolledBack) {
console.log(err);
_this3.props.showMessage("Error", "No se pudo traer valores de la tabla de secciones del umbral.", true, false, {});
transaction.rollback(function (err) {});
}
} else {
transaction.commit(function (err) {
if (seccionesConRango[index] == undefined) seccionesConRango[index] = [];
seccionesConRango[index] = result.recordset;
posicionesInsertadas.push(index);
if (posicionesInsertadas.length == ultimoIndex) _this3.inicioTraerSeccionRango();
});
}
});
}); // fin transaction
}
}, {
key: "inicioTraerSeccionRango",
value: function inicioTraerSeccionRango() {
posicionesInsertadasRango = 0, posicionesAInsertarRango = 0;
for (var i = 0; i < seccionesConRango.length; i++) {
for (var j = 0; j < seccionesConRango[i].length; j++) {
posicionesAInsertarRango++;
this.traerSeccionRango(seccionesConRango[i][j], i, j);
}
;
}
;
}
}, {
key: "traerSeccionRango",
value: function traerSeccionRango(seccionRango, indexUmbral, indexRango) {
var _this4 = this;
var transaction = new _mssql["default"].Transaction(this.props.pool);
transaction.begin(function (err) {
var rolledBack = false;
transaction.on('rollback', function (aborted) {
rolledBack = true;
});
var request = new _mssql["default"].Request(transaction);
request.query("select * from RangoSeccionUmbral where umbralID = " + seccionRango.umbralID + " and seccionUmbralID = " + seccionRango.ID, function (err, result) {
if (err) {
posicionesInsertadasRango++;
console.log(err);
_this4.props.showMessage("Error", "No se pudo traer valores de la tabla de rangos de sección del umbral.", true, false, {});
if (!rolledBack) {
transaction.rollback(function (err) {});
}
} else {
transaction.commit(function (err) {
posicionesInsertadasRango++;
if (seccionesConRango[indexUmbral] == undefined) seccionesConRango[indexUmbral] = [];
if (seccionesConRango[indexUmbral][indexRango] == undefined) seccionesConRango[indexUmbral][indexRango] = [];
seccionesConRango[indexUmbral][indexRango].rangos = result.recordset;
if (posicionesInsertadasRango == posicionesAInsertarRango) _this4.inicioCrearArregloSeccionRango();
});
}
});
}); // fin transaction
}
}, {
key: "inicioCrearArregloSeccionRango",
value: function inicioCrearArregloSeccionRango() {
var arrOrdenado = [];
for (var i = 0; i < seccionesConRango.length; i++) {
for (var j = 0; j < seccionesConRango[i].length; j++) {
for (var k = 0; k < seccionesConRango[i][j].rangos.length; k++) {
this.ingresarSeccion(seccionesConRango[i][j].rangos[k], arrOrdenado, seccionesConRango[i][j].nombre, seccionesConRango[i][j].color);
}
;
}
;
}
; //calculando porcentaje dentro del total
//suma del total
var sumTot = 0;
for (var i = 0; i < arrOrdenado.length; i++) {
var totSec = arrOrdenado[i].valorMaximo - arrOrdenado[i].valorMinimo;
sumTot += totSec;
}
;
for (var i = 0; i < arrOrdenado.length; i++) {
var totSec = arrOrdenado[i].valorMaximo - arrOrdenado[i].valorMinimo;
arrOrdenado[i].width = totSec / sumTot * 100;
}
;
this.setState({
secciones: arrOrdenado
});
}
}, {
key: "ingresarSeccion",
value: function ingresarSeccion(seccionNueva, arrSecciones, nombre, color) {
if (arrSecciones.length == 0) {
arrSecciones.push(seccionNueva);
arrSecciones[arrSecciones.length - 1].nombre = nombre;
arrSecciones[arrSecciones.length - 1].color = color;
return;
}
var encontroPos = false;
for (var i = 0; i < arrSecciones.length; i++) {
if (seccionNueva.valorMaximo < arrSecciones[i].valorMinimo) {
encontroPos = true;
break;
}
}
;
arrSecciones.splice(i, 0, seccionNueva);
arrSecciones[i].nombre = nombre;
arrSecciones[i].color = color;
}
}, {
key: "render",
value: function render() {
return _react["default"].createElement("div", null, this.props.navbar, _react["default"].createElement(_ListaRestoUmbrales["default"], {
lista: this.props.lista
}, " "), _react["default"].createElement(_VistaUmbral["default"], {
umbrales: this.state.secciones
}, " "), _react["default"].createElement(_CrearUmbral["default"], {
idVariable: this.props.idVariable,
pool: this.props.pool,
tablaVariable: this.props.tablaVariable,
tituloUmbral: this.props.tituloUmbral,
traerUmbralesPADRE: this.traerUmbrales,
maximoUmbral: this.props.maximoUmbral,
showSuccesMessage: this.props.showSuccesMessage,
showMessage: this.props.showMessage
}, " "));
}
}]);
return Umbral;
}(_react["default"].Component);
exports["default"] = Umbral;
//# sourceMappingURL=Umbral.js.map
|
naojiangzhalie666/morebit-android-app
|
app/src/main/java/com/zjzy/morebit/adapter/FloorAdapter.java
|
package com.zjzy.morebit.adapter;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.v4.app.FragmentManager;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.zjzy.morebit.R;
import com.zjzy.morebit.pojo.FloorChildInfo;
import com.zjzy.morebit.pojo.FloorInfo;
import com.zjzy.morebit.utils.C;
import com.zjzy.morebit.utils.DensityUtil;
import com.zjzy.morebit.view.CustomRecyclerView;
import com.zjzy.morebit.view.GridItemDecoration;
import java.util.ArrayList;
import java.util.List;
/**
* @Author: wuchaowen
* @Description:
* @Time: $date$ $time$
**/
public class FloorAdapter extends RecyclerView.Adapter<FloorAdapter.ViewHolder> {
private LayoutInflater mInflater;
private List<FloorInfo> mDatas = new ArrayList<>();
private Context mContext;
private FragmentManager fm;
public FloorAdapter(Context mContext, FragmentManager fm) {
mInflater = LayoutInflater.from(mContext);
this.mContext = mContext;
this.fm = fm;
}
public void clearData() {
mDatas.clear();
notifyDataSetChanged();
}
public void setData(List<FloorInfo> data) {
if (data != null) {
mDatas.clear();
mDatas.addAll(data);
notifyDataSetChanged();
}
}
@NonNull
@Override
public ViewHolder onCreateViewHolder(@NonNull ViewGroup viewGroup, int viewType) {
View view = null;
ViewHolder viewHolder = null;
if (viewType == C.ViewType.FLOOR_ONE) {
//横
view = mInflater.inflate(R.layout.vlayout_floor_one, viewGroup, false);
} else if (viewType == C.ViewType.FLOOR_TWO) {
view = mInflater.inflate(R.layout.vlayout_floor_two, viewGroup, false);
} else if (viewType == C.ViewType.FLOOR_THREE) {
view = mInflater.inflate(R.layout.vlayout_floor_three, viewGroup, false);
} else if (viewType == C.ViewType.FLOOR_FOUR) {
view = mInflater.inflate(R.layout.vlayout_floor_four, viewGroup, false);
} else {
view = mInflater.inflate(R.layout.view_empty, viewGroup, false);
}
if (null != view) {
viewHolder = new ViewHolder(view);
}
return viewHolder;
}
@Override
public void onBindViewHolder(@NonNull ViewHolder holder, int position) {
int viewType = getItemViewType(position);
FloorInfo floorInfo = mDatas.get(position);
final List<FloorChildInfo> floorChildInfos = mDatas.get(position).getChild();
FloorInfo childTwo = mDatas.get(position).getChildTwo();
if (viewType == C.ViewType.FLOOR_ONE) {
if (null != floorChildInfos && floorChildInfos.size() > 0) {
setFloorTitle(holder, floorInfo);
FloorOneAdapter floorHorizontalImageAdapter = new FloorOneAdapter(mContext);
floorHorizontalImageAdapter.setmPosition(position+1);
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(mContext);
linearLayoutManager.setOrientation(LinearLayoutManager.HORIZONTAL);
holder.mBannerRv.setLayoutManager(linearLayoutManager);
holder.mBannerRv.setAdapter(floorHorizontalImageAdapter);
floorHorizontalImageAdapter.setData(floorChildInfos);
}else{
holder.floorOneLayout.removeAllViews();
}
} else if (viewType == C.ViewType.FLOOR_TWO) {
setFloorTitle(holder, floorInfo);
if (null != floorChildInfos && floorChildInfos.size() > 0) {
int count = getRatio(floorChildInfos.size(),2);
if (count >= 2) {
List<FloorChildInfo> spiltfloorChildInfos = floorChildInfos.subList(0, count);
FloorTwoAdapter floorTwoAdapter = new FloorTwoAdapter(mContext);
floorTwoAdapter.setmPosition(position+1);
GridLayoutManager floorTwoLayoutManager = new GridLayoutManager(mContext, 2);
holder.mTwoRv.setLayoutManager(floorTwoLayoutManager);
GridItemDecoration divider = new GridItemDecoration.Builder(mContext)
.setHorizontalSpan(R.dimen.grid_line)
.setVerticalSpan(R.dimen.grid_line)
.setColorResource(R.color.color_ECECEC)
.setShowLastLine(false)
.setHorizontalPadding(DensityUtil.dip2px(mContext,12),DensityUtil.dip2px(mContext,12))
.setVerticaPadding(DensityUtil.dip2px(mContext,26),DensityUtil.dip2px(mContext,3))
.build();
if(holder.mTwoRv.getItemDecorationCount() == 0){
holder.mTwoRv.addItemDecoration(divider);
}
holder.mTwoRv.setAdapter(floorTwoAdapter);
floorTwoAdapter.setData(spiltfloorChildInfos);
}else{
holder.floorTwoLayout.removeAllViews();
}
}else{
holder.floorTwoLayout.removeAllViews();
}
} else if (viewType == C.ViewType.FLOOR_THREE) {
setFloorTitle(holder, floorInfo);
if (null != floorChildInfos && floorChildInfos.size() > 0) {
int count = getRatio(floorChildInfos.size(),3);
if (count >= 3) {
List<FloorChildInfo> spiltfloorChildInfos = floorChildInfos.subList(0, count);
FloorThreeAdapter floorthreeAdapter = new FloorThreeAdapter(mContext);
floorthreeAdapter.setmPosition(position+1);
GridLayoutManager floorThreeLayoutManager = new GridLayoutManager(mContext, 3);
holder.mThreeRv.setLayoutManager(floorThreeLayoutManager);
GridItemDecoration divider = new GridItemDecoration.Builder(mContext)
.setHorizontalSpan(R.dimen.grid_line_floor_three)
.setVerticalSpan(R.dimen.grid_line_floor_three)
.setColorResource(R.color.color_F8F8F8)
.setShowLastLine(false)
.setHorizontalPadding(DensityUtil.dip2px(mContext,12),DensityUtil.dip2px(mContext,12))
.setVerticaPadding(DensityUtil.dip2px(mContext,12),DensityUtil.dip2px(mContext,12))
.build();
if(holder.mThreeRv.getItemDecorationCount() == 0){
holder.mThreeRv.addItemDecoration(divider);
}
holder.mThreeRv.setAdapter(floorthreeAdapter);
floorthreeAdapter.setData(spiltfloorChildInfos);
}else{
holder.floorThreeLayout.removeAllViews();
}
}else{
holder.floorThreeLayout.removeAllViews();
}
} else if (viewType == C.ViewType.FLOOR_FOUR) {
setFloorTitle(holder, floorInfo);
if (null != floorChildInfos && floorChildInfos.size() > 0) {
int count = getRatio(floorChildInfos.size(),2);
if (getRatio(floorChildInfos.size(),2) >= 2) {
List<FloorChildInfo> spiltfloorChildInfos = floorChildInfos.subList(0, count);
FloorFourAdapter floorFourAdapter = new FloorFourAdapter(mContext);
floorFourAdapter.setmPosition(position+1);
GridLayoutManager floorFourLayoutManager = new GridLayoutManager(mContext, 2);
holder.mfloorFourRv.setLayoutManager(floorFourLayoutManager);
GridItemDecoration divider = new GridItemDecoration.Builder(mContext)
.setHorizontalSpan(R.dimen.grid_line)
.setVerticalSpan(R.dimen.grid_line)
.setColorResource(R.color.color_ECECEC)
.setShowLastLine(false)
.setHorizontalPadding(DensityUtil.dip2px(mContext,12),DensityUtil.dip2px(mContext,12))
.setVerticaPadding(DensityUtil.dip2px(mContext,11),DensityUtil.dip2px(mContext,13))
.build();
if(holder.mfloorFourRv.getItemDecorationCount() == 0){
holder.mfloorFourRv.addItemDecoration(divider);
}
holder.mfloorFourRv.setAdapter(floorFourAdapter);
floorFourAdapter.setData(spiltfloorChildInfos);
}else{
holder.floorFourLayout.removeAllViews();
}
}else{
holder.floorFourLayout.removeAllViews();
}
}
setRecommodRv(holder, childTwo);
}
private void setRecommodRv(@NonNull ViewHolder holder, FloorInfo childTwo) {
if(null != childTwo){
final List<FloorChildInfo> infosRecommonds = childTwo.getChild();
String recommodMainTitle = childTwo.getMainTitle();
String recommodSubTitle = childTwo.getSubTitle();
if(null != infosRecommonds && infosRecommonds.size()>0){
if(null != holder.recommod_external_layout){
holder.recommod_external_layout.setVisibility(View.VISIBLE);
}
holder.recommondLayout.setVisibility(View.VISIBLE);
//为你推荐
FloorRecommondAdapter recommondAdapter = new FloorRecommondAdapter(mContext,childTwo.getShowType());
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(mContext);
linearLayoutManager.setOrientation(LinearLayoutManager.HORIZONTAL);
holder.recommondFourRv.setLayoutManager(linearLayoutManager);
holder.recommondFourRv.setAdapter(recommondAdapter);
recommondAdapter.setData(infosRecommonds);
if(!TextUtils.isEmpty(recommodMainTitle)){
holder.recommondMainTitle.setText(recommodMainTitle);
}
if(!TextUtils.isEmpty(recommodSubTitle)){
holder.recommondSubTitle.setText(recommodSubTitle);
}
}else{
//隐藏
if(null != holder.recommod_external_layout){
holder.recommod_external_layout.setVisibility(View.GONE);
}
holder.recommondLayout.setVisibility(View.GONE);
}
}else{
if(null != holder.recommod_external_layout){
holder.recommod_external_layout.setVisibility(View.GONE);
}
}
}
private void setFloorTitle(@NonNull ViewHolder holder, FloorInfo floorInfo) {
if (floorInfo.getMainTitleShow() == 1 && !TextUtils.isEmpty(floorInfo.getMainTitle())) {
holder.mainTitle.setVisibility(View.VISIBLE);
holder.mainTitle.setText(floorInfo.getMainTitle());
} else {
holder.mainTitle.setVisibility(View.GONE);
}
if (!TextUtils.isEmpty(floorInfo.getSubTitle())) {
holder.subTitle.setVisibility(View.VISIBLE);
holder.subTitle.setText(floorInfo.getSubTitle());
} else {
holder.subTitle.setVisibility(View.GONE);
}
}
@Override
public int getItemViewType(int position) {
int viewType = this.mDatas.get(position).getShowType();
switch (viewType) {
case C.ViewType.FLOOR_ONE:
viewType = C.ViewType.FLOOR_ONE;
break;
case C.ViewType.FLOOR_TWO:
viewType = C.ViewType.FLOOR_TWO;
break;
case C.ViewType.FLOOR_THREE:
viewType = C.ViewType.FLOOR_THREE;
break;
case C.ViewType.FLOOR_FOUR:
viewType = C.ViewType.FLOOR_FOUR;
break;
}
return viewType;
}
/**
* 获取倍数
* @return
*/
private int getRatio(int size,int ratio){
int count = 0;
for (int i = 0; i < size; i++) {
if ((i+1) % ratio == 0) {
count = i+1;
}
}
return count;
}
@Override
public int getItemCount() {
return mDatas.size();
}
static class ViewHolder extends RecyclerView.ViewHolder {
private CustomRecyclerView mBannerRv;
private TextView mainTitle;
private TextView subTitle;
private RecyclerView mTwoRv;
private RecyclerView mThreeRv;
private RecyclerView mfloorFourRv;
private LinearLayout floorTwoLayout;
private LinearLayout floorThreeLayout;
private LinearLayout floorFourLayout;
private LinearLayout floorOneLayout;
private RecyclerView recommondFourRv;
private TextView recommondMainTitle;
private TextView recommondSubTitle;
private LinearLayout recommondLayout;
private LinearLayout recommod_external_layout;
public ViewHolder(View itemView) {
super(itemView);
mBannerRv = (CustomRecyclerView) itemView.findViewById(R.id.bannerRv);
mainTitle = itemView.findViewById(R.id.mainTitle);
subTitle = itemView.findViewById(R.id.subTitle);
mTwoRv = itemView.findViewById(R.id.floorTwoRv);
mThreeRv = itemView.findViewById(R.id.floorThreeRv);
mfloorFourRv = itemView.findViewById(R.id.floorFourRv);
floorTwoLayout = itemView.findViewById(R.id.floorTwoLayout);
floorThreeLayout = itemView.findViewById(R.id.floorThreeLayout);
floorFourLayout = itemView.findViewById(R.id.floorFourLayout);
floorOneLayout = itemView.findViewById(R.id.floorOneLayout);
recommondFourRv = itemView.findViewById(R.id.recommondRv);
recommondMainTitle = itemView.findViewById(R.id.recommondMainTitle);
recommondSubTitle = itemView.findViewById(R.id.recommondSubTitle);
recommondLayout = itemView.findViewById(R.id.recommondLayout);
recommod_external_layout = itemView.findViewById(R.id.recommod_layout);
}
}
}
|
dplbsd/soc2013
|
head/usr.sbin/apmd/contrib/pccardq.c
|
/* $FreeBSD: soc2013/dpl/head/usr.sbin/apmd/contrib/pccardq.c 208118 2010-05-14 14:26:56Z uqs $ */
#include <err.h>
#include <errno.h>
#include <limits.h>
#include <stdarg.h>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/un.h>
const char *const pccardd_file = "/var/tmp/.pccardd";
const char *prog = "pccardq";
const char *tmp_dir = "/tmp";
unsigned slot_map = ~0;
void
usage(void)
{
fprintf(stderr, "usage: %s [-a] [-n] [-s slot]\n", prog);
}
int
proc_arg(int ac, char **av)
{
int rc = -1;
int ch;
char *p = strrchr(av[0], '/');
prog = p ? p + 1 : av[0];
tmp_dir = getenv("TMPDIR") ? getenv("TMPDIR") : tmp_dir;
while ((ch = getopt(ac, av, "ans:")) != -1) {
switch (ch) {
case 'a':
slot_map = ~0;
break;
case 'n':
slot_map = 0;
break;
case 's':
{
int n = atoi(optarg);
if (n < 0 || n >= CHAR_BIT * sizeof slot_map) {
warnc(0, "Invalid slot number.");
usage();
goto out;
}
if (slot_map == ~0)
slot_map = 0;
slot_map |= 1 << n;
}
break;
default:
usage();
goto out;
}
}
rc = 0;
out:
return rc;
}
int
connect_to_pccardd(char **path)
{
int so = -1;
int pccardd_len;
struct sockaddr_un pccardq;
struct sockaddr_un pccardd;
if ((so = socket(PF_UNIX, SOCK_DGRAM, 0)) < 0) {
warn("socket");
goto err;
}
snprintf(pccardq.sun_path, sizeof pccardq.sun_path,
"%s/%s%ld%ld", tmp_dir, prog, (long) getpid(), (long) time(0));
pccardq.sun_family = AF_UNIX;
pccardq.sun_len = offsetof(struct sockaddr_un, sun_path) + strlen(pccardq.sun_path);
if (bind(so, (struct sockaddr *) &pccardq, pccardq.sun_len) < 0) {
warn("bind: %s", pccardq.sun_path);
goto err;
}
if ((*path = strdup(pccardq.sun_path)) == NULL) {
warn("strdup");
goto err;
}
pccardd_len = strlen(pccardd_file) + 1;
if (pccardd_len > sizeof pccardd.sun_path) {
warnc(0, "%s: too long", pccardd_file);
goto err;
}
pccardd.sun_len = offsetof(struct sockaddr_un, sun_path) + pccardd_len;
pccardd.sun_family = AF_UNIX;
strcpy(pccardd.sun_path, pccardd_file);
if (connect(so, (struct sockaddr *) &pccardd, pccardd.sun_len) < 0) {
warn("connect: %s", pccardd_file);
goto err;
}
return so;
err:
if (so >= 0)
close(so);
return -1;
}
int
get_slot_number(int so)
{
char buf[8];
int rv;
int nslot;
if ((rv = write(so, "S", 1)) < 1) {
warn("write");
goto err;
} else if (rv != 1) {
warnc(0, "write: fail.");
goto err;
}
if ((rv = read(so, buf, sizeof buf)) < 0) {
warn("read");
goto err;
}
buf[sizeof buf - 1] = 0;
if (sscanf(buf, "%d", &nslot) != 1) {
warnc(0, "Invalid response.");
goto err;
}
return nslot;
err:
return -1;
}
enum {
SLOT_EMPTY = 0,
SLOT_FILLED = 1,
SLOT_INACTIVE = 2,
SLOT_UNDEFINED = 9
};
int
get_slot_info(int so, int slot, char **manuf, char **version, char
**device, int *state)
{
int rc = -1;
int rv;
static char buf[1024];
int slen;
char *s;
char *sl;
char *_manuf;
char *_version;
char *_device;
if ((slen = snprintf(buf, sizeof buf, "N%d", slot)) < 0) {
warnc(0, "write");
goto err;
}
if ((rv = write(so, buf, slen)) < 0) {
warn("write");
goto err;
} else if (rv != slen) {
warnc(0, "write");
goto err;
}
if ((rv = read(so, buf, sizeof buf)) < 0) {
warn("read");
goto err;
}
s = buf;
if ((sl = strsep(&s, "~")) == NULL)
goto parse_err;
if (atoi(sl) != slot)
goto parse_err;
if ((_manuf = strsep(&s, "~")) == NULL)
goto parse_err;
if ((_version = strsep(&s, "~")) == NULL)
goto parse_err;
if ((_device = strsep(&s, "~")) == NULL)
goto parse_err;
if (sscanf(s, "%1d", state) != 1)
goto parse_err;
if (s != NULL && strchr(s, '~') != NULL)
goto parse_err;
if ((*manuf = strdup(_manuf)) == NULL) {
warn("strdup");
goto err;
}
if ((*version = strdup(_version)) == NULL) {
warn("strdup");
goto err;
}
if ((*device = strdup(_device)) == NULL) {
warn("strdup");
goto err;
}
if (*manuf == NULL || *version == NULL || *device == NULL) {
warn("strdup");
goto err;
}
rc = 0;
err:
return rc;
parse_err:
warnc(0, "Invalid response: %*s", rv, buf);
return rc;
}
const char *
strstate(int state)
{
switch (state) {
case 0:
return "empty";
case 1:
return "filled";
case 2:
return "inactive";
default:
return "unknown";
}
}
int
main(int ac, char **av)
{
char *path = NULL;
int so = -1;
int nslot;
int i;
if (proc_arg(ac, av) < 0)
goto out;
if ((so = connect_to_pccardd(&path)) < 0)
goto out;
if ((nslot = get_slot_number(so)) < 0)
goto out;
if (slot_map == 0) {
printf("%d\n", nslot);
} else {
for (i = 0; i < nslot; i++) {
if ((slot_map & (1 << i))) {
char *manuf;
char *version;
char *device;
int state;
if (get_slot_info(so, i, &manuf, &version, &device,
&state) < 0)
goto out;
if (manuf == NULL || version == NULL || device == NULL)
goto out;
printf("%d~%s~%s~%s~%s\n",
i, manuf, version, device, strstate(state));
free(manuf);
free(version);
free(device);
}
}
}
out:
if (path) {
unlink(path);
free(path);
}
if (so >= 0)
close(so);
exit(0);
}
|
iam-Legend/Project-Assembly
|
Source/FactoryGame/FGProjectile.h
|
<reponame>iam-Legend/Project-Assembly
// Copyright 1998-2016 Epic Games, Inc. All Rights Reserved.
#pragma once
#include "Array.h"
#include "UObject/Class.h"
#include "GameFramework/Actor.h"
#include "Equipment/FGWeaponProjectileFire.h"
#include "FGProjectile.generated.h"
UCLASS(config=Game)
class AFGProjectile : public AActor, public IFGSaveInterface
{
GENERATED_BODY()
public:
AFGProjectile();
/** Decide on what properties to replicate */
virtual void GetLifetimeReplicatedProps( TArray<FLifetimeProperty>& OutLifetimeProps ) const override;
/** initial setup */
//MODDING EDIT PostInitialize crashed when creating a blueprint child
//virtual void PostInitializeComponents() override;
//** Save Game Interface. Default is to not save, but the save interface is implemented here to allow for enabling in children (eg. FGNobeliskExplosive) */
virtual bool ShouldSave_Implementation() const override;
virtual bool NeedTransform_Implementation() override;
//** End Save Game Interface */
// Start AActor interface
virtual float TakeDamage( float DamageAmount, struct FDamageEvent const& DamageEvent, class AController* EventInstigator, AActor* DamageCauser );
/** Called when the lifespan of an actor expires (if he has one). */
virtual void LifeSpanExpired();
// End AActor interface
/** handle hit */
UFUNCTION()
virtual void OnImpact( const FHitResult& hitResult );
/** handle bounce */
UFUNCTION()
virtual void OnBounce( const FHitResult& hitResult, const FVector& hitVelocity );
/** Returns CollisionComp subobject **/
FORCEINLINE class USphereComponent* GetCollisionComp() const { return mCollisionComp; }
/** Returns ProjectileMovement subobject **/
FORCEINLINE class UProjectileMovementComponent* GetProjectileMovement() const { return mProjectileMovement; }
/** Returns the location we are aiming for ( if any ) */
UFUNCTION( BlueprintPure, Category = "Projectile" )
FORCEINLINE FVector GetProjectileTargetLocation() { return mTargetLocation; }
/** Returns the location we are aiming for ( if any ) */
UFUNCTION( BlueprintCallable, Category = "Projectile" )
void SetTargetLocation( FVector targetLocation ) { mTargetLocation = targetLocation; }
/** Function to set up explosion effects in Blueprint */
UFUNCTION( BlueprintImplementableEvent, Category = "Projectile" )
void PlayExplosionEffects();
void SetProjectileData( FProjectileData projectileData );
/** Called when we attach this actor to something like the world, a factory, a character */
UFUNCTION( BlueprintImplementableEvent, Category = "Projectile" )
void PlayAttachEffect();
/** Returns the collision sphere */
UFUNCTION( BlueprintPure, Category = "Projectile" )
FORCEINLINE USphereComponent* GetCollisionSphere(){ return mCollisionComp; }
/** Sets the initial velocity so that it can be replicated to clients */
UFUNCTION( BlueprintCallable, Category = "Projectile" )
void SetInitialVelocity( FVector inVelocity );
protected:
/** trigger explosion */
virtual void DealExplosionDamage( const FHitResult& impact );
/** Deal damage from the impact */
virtual void DealImpactDamage( const FHitResult& impact );
/** shutdown projectile and prepare for destruction */
void DisableAndSetLifeSpan();
/** [client] explosion happened */
UFUNCTION()
void OnRep_Exploded();
/** Virtual function for any additional client side effect handling in child classes*/
virtual void OnNotifiedExploded();
/** attach this projectile to an enemy or alike */
bool AttachProjectileToImpactActor( const FHitResult& impact );
UFUNCTION()
void OnRep_InitialVelocity();
public:
/** This projectile is just used for cosmetics and shouldn't deal damage. Like on remote clients */
bool mIsCosmeticProjectile;
protected:
/** projectile data */
UPROPERTY( SaveGame, EditDefaultsOnly, Category = "Projectile" )
FProjectileData mProjectileData;
/** did it explode? */
UPROPERTY( Transient, ReplicatedUsing = OnRep_Exploded )
bool mHasExploded;
/** Used to get the velocity over to clients */
UPROPERTY( ReplicatedUsing = OnRep_InitialVelocity )
FVector mInitialVelocity;
private:
/** Sphere collision component */
UPROPERTY( VisibleDefaultsOnly, Category = "Projectile" )
class USphereComponent* mCollisionComp;
/** Projectile movement component */
UPROPERTY( VisibleAnywhere, BlueprintReadOnly, Category = Movement, meta = (AllowPrivateAccess = "true") )
class UProjectileMovementComponent* mProjectileMovement;
/** Location we are aiming at ( if any ) */
UPROPERTY( Replicated )
FVector mTargetLocation;
/** Indicates if we should explode if we are taking damage from same actor class as ourselves */
UPROPERTY( EditDefaultsOnly, Category = "Projectile" )
bool mCanTriggerExplodeBySameClass;
/** Should the projectile explode when it dies of lifespan? */
UPROPERTY( EditDefaultsOnly, Category = "Projectile" )
bool mExplodeAtEndOfLife;
/** Caching the PC so that we can do damage even without a valid weapon */
UPROPERTY()
class AFGPlayerController* mCachedPC;
/** Was projectile fired by a weapon */
bool mWasFiredByWeapon;
};
|
shinesolutions/swagger-aem
|
clients/java/generated/src/test/java/com/shinesolutions/swaggeraem4j/model/BundleDataPropTest.java
|
<reponame>shinesolutions/swagger-aem
/*
* Adobe Experience Manager (AEM) API
* Swagger AEM is an OpenAPI specification for Adobe Experience Manager (AEM) API
*
* The version of the OpenAPI document: 3.5.0-pre.0
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.shinesolutions.swaggeraem4j.model;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
/**
* Model tests for BundleDataProp
*/
public class BundleDataPropTest {
private final BundleDataProp model = new BundleDataProp();
/**
* Model tests for BundleDataProp
*/
@Test
public void testBundleDataProp() {
// TODO: test BundleDataProp
}
/**
* Test the property 'key'
*/
@Test
public void keyTest() {
// TODO: test key
}
/**
* Test the property 'value'
*/
@Test
public void valueTest() {
// TODO: test value
}
}
|
surajkr/azure-sdk-for-java
|
sdk/resourcemanager/azure-resourcemanager-resources/src/test/java/com/azure/resourcemanager/resources/implementation/TypeSerializationTests.java
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager.resources.implementation;
import com.azure.core.management.serializer.SerializerFactory;
import com.azure.core.util.serializer.SerializerAdapter;
import com.azure.core.util.serializer.SerializerEncoding;
import com.azure.resourcemanager.resources.models.DeploymentProperties;
import com.azure.resourcemanager.resources.fluent.inner.DeploymentExtendedInner;
import com.azure.resourcemanager.resources.fluent.inner.DeploymentInner;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.lang.reflect.Field;
public class TypeSerializationTests {
@Test
public void testDeploymentSerialization() throws Exception {
final String templateJson = "{ \"/subscriptions/<redacted>/resourceGroups/<redacted>/providers/Microsoft.ManagedIdentity/userAssignedIdentities/<redacted>\": {} }";
DeploymentImpl deployment = new DeploymentImpl(new DeploymentExtendedInner(), "", null);
deployment.withTemplate(templateJson);
SerializerAdapter serializerAdapter = SerializerFactory.createDefaultManagementSerializerAdapter();
String deploymentJson = serializerAdapter.serialize(createRequestFromInner(deployment), SerializerEncoding.JSON);
Assertions.assertTrue(deploymentJson.contains("Microsoft.ManagedIdentity"));
}
private static DeploymentInner createRequestFromInner(DeploymentImpl deployment) throws NoSuchFieldException, IllegalAccessException {
Field field = DeploymentImpl.class.getDeclaredField("deploymentCreateUpdateParameters");
field.setAccessible(true);
DeploymentInner implInner = (DeploymentInner) field.get(deployment);
DeploymentInner inner = new DeploymentInner()
.withProperties(new DeploymentProperties());
inner.properties().withMode(deployment.mode());
inner.properties().withTemplate(implInner.properties().template());
inner.properties().withTemplateLink(deployment.templateLink());
inner.properties().withParameters(deployment.parameters());
inner.properties().withParametersLink(deployment.parametersLink());
return inner;
}
}
|
herrgrossmann/jo-widgets
|
modules/examples/org.jowidgets.examples.common/src/main/java/org/jowidgets/examples/common/workbench/demo1/ImportantViewDemo1.java
|
/*
* Copyright (c) 2011, grossmann
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the jo-widgets.org nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL jo-widgets.org BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*/
package org.jowidgets.examples.common.workbench.demo1;
import org.jowidgets.api.color.Colors;
import org.jowidgets.api.image.IconsSmall;
import org.jowidgets.api.toolkit.Toolkit;
import org.jowidgets.api.widgets.IComposite;
import org.jowidgets.api.widgets.IContainer;
import org.jowidgets.api.widgets.blueprint.factory.IBluePrintFactory;
import org.jowidgets.common.image.IImageConstant;
import org.jowidgets.examples.common.workbench.base.AbstractDemoView;
import org.jowidgets.tools.layout.MigLayoutFactory;
import org.jowidgets.workbench.api.IViewContext;
public class ImportantViewDemo1 extends AbstractDemoView {
public static final String ID = ImportantViewDemo1.class.getName();
public static final String DEFAULT_LABEL = "View";
public static final String DEFAULT_TOOLTIP = "View tooltip";
public static final IImageConstant DEFAULT_ICON = IconsSmall.WARNING;
public ImportantViewDemo1(final IViewContext context) {
super(ID);
final IContainer container = context.getContainer();
container.setLayout(MigLayoutFactory.growingInnerCellLayout());
final IBluePrintFactory bpf = Toolkit.getBluePrintFactory();
final IComposite content = container.add(
bpf.composite().setBackgroundColor(Colors.WHITE),
MigLayoutFactory.GROWING_CELL_CONSTRAINTS);
content.setLayout(MigLayoutFactory.growingInnerCellLayout());
content.add(bpf.textLabel("Demo for component deacivate listener"), "alignx center, aligny center");
}
}
|
echoprotocol/echo-studio-tools
|
remix-simulator/test/whisper.js
|
<filename>remix-simulator/test/whisper.js
/* global describe, before, it */
var Web3 = require('web3')
var RemixSim = require('../index.js')
let web3 = new Web3()
var assert = require('assert')
describe('Whisper', function() {
before(function() {
let provider = new RemixSim.Provider()
web3.setProvider(provider)
})
it('should get correct remix simulator version', async function() {
let version = await web3.shh.getVersion()
assert.equal(version, 5)
})
})
|
laffra/pava
|
pava/implementation/natives/sun/awt/Win32GraphicsDevice.py
|
<filename>pava/implementation/natives/sun/awt/Win32GraphicsDevice.py
def add_native_methods(clazz):
def initDevice__int__(a0, a1):
raise NotImplementedError()
def isPixFmtSupported__int__int__(a0, a1, a2):
raise NotImplementedError()
def enterFullScreenExclusive__int__java_awt_peer_WindowPeer__(a0, a1, a2):
raise NotImplementedError()
def exitFullScreenExclusive__int__java_awt_peer_WindowPeer__(a0, a1, a2):
raise NotImplementedError()
def getCurrentDisplayMode__int__(a0, a1):
raise NotImplementedError()
def configDisplayMode__int__java_awt_peer_WindowPeer__int__int__int__int__(a0, a1, a2, a3, a4, a5, a6):
raise NotImplementedError()
def enumDisplayModes__int__java_util_ArrayList__(a0, a1, a2):
raise NotImplementedError()
clazz.initDevice__int__ = initDevice__int__
clazz.isPixFmtSupported__int__int__ = isPixFmtSupported__int__int__
clazz.enterFullScreenExclusive__int__java_awt_peer_WindowPeer__ = enterFullScreenExclusive__int__java_awt_peer_WindowPeer__
clazz.exitFullScreenExclusive__int__java_awt_peer_WindowPeer__ = exitFullScreenExclusive__int__java_awt_peer_WindowPeer__
clazz.getCurrentDisplayMode__int__ = getCurrentDisplayMode__int__
clazz.configDisplayMode__int__java_awt_peer_WindowPeer__int__int__int__int__ = configDisplayMode__int__java_awt_peer_WindowPeer__int__int__int__int__
clazz.enumDisplayModes__int__java_util_ArrayList__ = enumDisplayModes__int__java_util_ArrayList__
|
mactrix-markjohn/Stranger-Social-Platform
|
Stranger/app/src/main/java/com/mactrixapp/www/stranger/SearchGroup.java
|
<gh_stars>0
package com.mactrixapp.www.stranger;
import android.icu.util.Freezable;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.View;
import android.support.design.widget.NavigationView;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import com.mactrixapp.www.stranger.Adapters.SearchGroupAdapter;
import com.mactrixapp.www.stranger.Model.Group;
import com.mactrixapp.www.stranger.Model.IsListContain;
import java.util.ArrayList;
public class SearchGroup extends AppCompatActivity
implements NavigationView.OnNavigationItemSelectedListener {
private EditText searchfield;
private TextView searchcount;
private ListView searchlist;
private ArrayList<Group> groups;
private DatabaseReference groupReference;
private DatabaseReference stranGroupRef;
private IsListContain contain;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_search_group);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
searchfield = (EditText)findViewById(R.id.searchfield);
searchcount = (TextView)findViewById(R.id.searchcount);
searchlist = (ListView)findViewById(R.id.searchlist);
contain = new IsListContain();
groupReference = FirebaseDatabase.getInstance().getReference().child(getString(R.string.group));
stranGroupRef = FirebaseDatabase.getInstance().getReference().child(getString(R.string.strangersgroup));
groups = new ArrayList<>();
groupReference.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
for (DataSnapshot snapshot:dataSnapshot.getChildren()){
Group group = snapshot.getValue(Group.class);
if(group != null){
group.setGrouptype(getString(R.string.group));
String groupname = group.getName().toLowerCase();
if (!contain.isGroupContain(groups,group)) {
groups.add(group);
}
}
}
SearchGroupAdapter groupAdapter = new SearchGroupAdapter(SearchGroup.this,groups);
searchlist.setAdapter(groupAdapter);
searchcount.setText(String.valueOf(searchlist.getCount()));
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
stranGroupRef.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
for (DataSnapshot snapshot:dataSnapshot.getChildren()){
Group group = snapshot.getValue(Group.class);
if(group != null){
group.setGrouptype(getString(R.string.strangersgroup));
String groupname = group.getName().toLowerCase();
if (!contain.isGroupContain(groups,group)) {
groups.add(group);
}
}
}
SearchGroupAdapter groupAdapter = new SearchGroupAdapter(SearchGroup.this,groups);
searchlist.setAdapter(groupAdapter);
searchcount.setText(String.valueOf(searchlist.getCount()));
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
searchfield.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
groups = new ArrayList<>();
groupReference.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
String searchname = s.toString().toLowerCase();
for (DataSnapshot snapshot:dataSnapshot.getChildren()){
Group group = snapshot.getValue(Group.class);
if(group != null){
group.setGrouptype(getString(R.string.group));
String groupname = group.getName().toLowerCase();
if (groupname.contains(searchname)) {
if (!contain.isGroupContain(groups,group)) {
groups.add(group);
}
}
}
}
SearchGroupAdapter groupAdapter = new SearchGroupAdapter(SearchGroup.this,groups);
searchlist.setAdapter(groupAdapter);
searchcount.setText(String.valueOf(searchlist.getCount()));
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
stranGroupRef.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
String searchname = s.toString().toLowerCase();
for (DataSnapshot snapshot:dataSnapshot.getChildren()){
Group group = snapshot.getValue(Group.class);
if(group != null){
group.setGrouptype(getString(R.string.strangersgroup));
String groupname = group.getName().toLowerCase();
if (groupname.contains(searchname)) {
if (!contain.isGroupContain(groups,group)) {
groups.add(group);
}
}
}
}
SearchGroupAdapter groupAdapter = new SearchGroupAdapter(SearchGroup.this,groups);
searchlist.setAdapter(groupAdapter);
searchcount.setText(String.valueOf(searchlist.getCount()));
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
}
@Override
public void afterTextChanged(Editable s) {
}
});
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null).show();
}
});
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(
this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
drawer.addDrawerListener(toggle);
toggle.syncState();
/*NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view);
navigationView.setNavigationItemSelectedListener(this);*/
}
@Override
public void onBackPressed() {
super.onBackPressed();
/* DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
if (drawer.isDrawerOpen(GravityCompat.START)) {
drawer.closeDrawer(GravityCompat.START);
} else {
super.onBackPressed();
}*/
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.search_group, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
@SuppressWarnings("StatementWithEmptyBody")
@Override
public boolean onNavigationItemSelected(MenuItem item) {
// Handle navigation view item clicks here.
int id = item.getItemId();
if (id == R.id.nav_camera) {
// Handle the camera action
} else if (id == R.id.nav_gallery) {
} else if (id == R.id.nav_slideshow) {
} else if (id == R.id.nav_manage) {
} else if (id == R.id.nav_share) {
} else if (id == R.id.nav_send) {
}
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
drawer.closeDrawer(GravityCompat.START);
return true;
}
public void back(View view) {
onBackPressed();
}
}
|
sfpd/rlreloaded
|
cpp/3rdparty/ale_0_4/src/emucore/Cart4A50.cxx
|
//============================================================================
//
// SSSS tt lll lll
// SS SS tt ll ll
// SS tttttt eeee ll ll aaaa
// SSSS tt ee ee ll ll aa
// SS tt eeeeee ll ll aaaaa -- "An Atari 2600 VCS Emulator"
// SS SS tt ee ll ll aa aa
// SSSS ttt eeeee llll llll aaaaa
//
// Copyright (c) 1995-2007 by <NAME> and the Stella team
//
// See the file "license" for information on usage and redistribution of
// this file, and for a DISCLAIMER OF ALL WARRANTIES.
//
// $Id: Cart4A50.cxx,v 1.4 2007/01/14 16:17:52 stephena Exp $
//============================================================================
#include <cassert>
#include "System.hxx"
#include "Serializer.hxx"
#include "Deserializer.hxx"
#include "Cart4A50.hxx"
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Cartridge4A50::Cartridge4A50(const uInt8* image)
{
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Cartridge4A50::~Cartridge4A50()
{
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
const char* Cartridge4A50::name() const
{
return "Cartridge4A50";
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
void Cartridge4A50::reset()
{
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
void Cartridge4A50::install(System& system)
{
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
uInt8 Cartridge4A50::peek(uInt16 address)
{
return 0;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
void Cartridge4A50::poke(uInt16, uInt8)
{
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
bool Cartridge4A50::save(Serializer& out)
{
return false;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
bool Cartridge4A50::load(Deserializer& in)
{
return false;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
void Cartridge4A50::bank(uInt16 b)
{
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
int Cartridge4A50::bank()
{
return 0;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
int Cartridge4A50::bankCount()
{
return 1;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
bool Cartridge4A50::patch(uInt16 address, uInt8 value)
{
return false;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
uInt8* Cartridge4A50::getImage(int& size)
{
size = 0;
return 0;
}
|
sarang-apps/darshan_browser
|
chrome/browser/accessibility/caption_settings_dialog.h
|
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_ACCESSIBILITY_CAPTION_SETTINGS_DIALOG_H_
#define CHROME_BROWSER_ACCESSIBILITY_CAPTION_SETTINGS_DIALOG_H_
#include "base/macros.h"
namespace captions {
// An abstraction of a caption settings dialog. This is used for the captions
// sub-section of Settings.
class CaptionSettingsDialog {
public:
// Displays the native captions manager dialog.
static void ShowCaptionSettingsDialog();
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(CaptionSettingsDialog);
};
} // namespace captions
#endif // CHROME_BROWSER_ACCESSIBILITY_CAPTION_SETTINGS_DIALOG_H_
|
turnonline/ecosystem-admin-widgets
|
src/main/java/biz/turnonline/ecosystem/widget/purchase/event/DeleteCategoryEvent.java
|
<filename>src/main/java/biz/turnonline/ecosystem/widget/purchase/event/DeleteCategoryEvent.java<gh_stars>0
package biz.turnonline.ecosystem.widget.purchase.event;
import biz.turnonline.ecosystem.widget.shared.rest.payment.Category;
import com.google.gwt.event.shared.GwtEvent;
/**
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class DeleteCategoryEvent
extends GwtEvent<DeleteCategoryEventHandler>
{
public static Type<DeleteCategoryEventHandler> TYPE = new Type<DeleteCategoryEventHandler>();
private final Category category;
public DeleteCategoryEvent( Category category )
{
this.category = category;
}
public Type<DeleteCategoryEventHandler> getAssociatedType()
{
return TYPE;
}
protected void dispatch( DeleteCategoryEventHandler handler )
{
handler.onDelete( this );
}
public Category getCategory()
{
return category;
}
}
|
rsassi/hivemind1
|
framework/src/java/org/apache/hivemind/schema/rules/InstanceTranslator.java
|
<filename>framework/src/java/org/apache/hivemind/schema/rules/InstanceTranslator.java
// Copyright 2004 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.hivemind.schema.rules;
import org.apache.hivemind.ApplicationRuntimeException;
import org.apache.hivemind.HiveMind;
import org.apache.hivemind.Location;
import org.apache.hivemind.internal.Module;
/**
* Used to translate from a class name to an instance of the class.
*
* @author <NAME>
*/
public class InstanceTranslator extends ClassTranslator
{
public Object translate(
Module contributingModule,
Class propertyType,
String inputValue,
Location location)
{
Class objectClass = extractClass(contributingModule, inputValue);
if (objectClass == null)
return null;
try
{
Object result = objectClass.newInstance();
HiveMind.setLocation(result, location);
return result;
}
catch (Exception ex)
{
// JDK 1.4 produces a good message here, but JDK 1.3 does not, so we
// create our own.
throw new ApplicationRuntimeException(
RulesMessages.unableToInstantiateInstanceOfClass(objectClass, ex),
location,
ex);
}
}
}
|
Gkdnz/SfePy
|
sfepy/__init__.py
|
<reponame>Gkdnz/SfePy<gh_stars>0
import os, glob
from config import Config
from version import __version__, in_source_tree, top_dir
data_dir = os.path.realpath(top_dir)
base_dir = os.path.dirname(os.path.normpath(os.path.realpath(__file__)))
def get_paths(pattern):
"""
Get files/paths matching the given pattern in the sfepy source tree.
"""
if not in_source_tree:
pattern = '../' + pattern
files = glob.glob(os.path.normpath(os.path.join(top_dir, pattern)))
return files
|
SXiaoXu/java-unified-sdk
|
core/src/main/java/cn/leancloud/AVPush.java
|
<gh_stars>0
package cn.leancloud;
import cn.leancloud.callback.SendCallback;
import cn.leancloud.core.PaasClient;
import cn.leancloud.utils.AVUtils;
import cn.leancloud.utils.LogUtil;
import cn.leancloud.utils.StringUtil;
import cn.leancloud.json.JSON;
import cn.leancloud.json.JSONObject;
import io.reactivex.Observable;
import io.reactivex.Observer;
import io.reactivex.disposables.Disposable;
import java.util.*;
public class AVPush {
private static final AVLogger LOGGER = LogUtil.getLogger(AVPush.class);
private static final String deviceTypeTag = "deviceType";
private static final Set<String> DEVICE_TYPES = new HashSet<String>();
private static final String FlowControlTag = "flow_control";
private static final String APNsTeamIdTag = "apns_team_id";
private static final String APNsTopicTag = "topic";
private static final String iOSEnvironmentTag = "prod";
private static final String NotificationIdTag = "notification_id";
private static final String RequestIdTag = "req_id";
public static final String iOSEnvironmentDev = "dev";
public static final String iOSEnvironmentProd = "prod";
private static final int FlowControlMinValue = 1000;
static {
DEVICE_TYPES.add("android");
DEVICE_TYPES.add("ios");
}
private final Set<String> channelSet;
private AVQuery<? extends AVInstallation> pushQuery;
private String cql;
private long expirationTime;
private long expirationTimeInterval;
private final Set<String> pushTarget;
private final Map<String, Object> pushData;
private volatile AVObject notification;
private Date pushDate = null;
private int flowControl = 0; // add since v6.1.2
private String iOSEnvironment = null; // add since v6.5.2
private String APNsTopic = null; // add since v6.5.2
private String APNsTeamId = null; // add since v6.5.2
private String notificationId = null; // add since v6.5.2
private String requestId = null; // add since v6.5.2
/**
* Creates a new push notification. The default channel is the empty string, also known as the
* global broadcast channel, but this value can be overridden using AVPush.setChannel(String),
* AVPush.setChannels(Collection) or AVPush.setQuery(AVQuery). Before sending the push
* notification you must call either AVPush.setMessage(String) or AVPush.setData(JSONObject).
*/
public AVPush() {
channelSet = new HashSet<String>();
pushData = new HashMap<String, Object>();
pushTarget = new HashSet<String>(DEVICE_TYPES);
pushQuery = AVInstallation.getQuery();
}
/**
* Return channel set.
* @return channel set.
*/
public Set<String> getChannelSet() {
return channelSet;
}
/**
* Return the instance of _Notification。
*
* @return notification instance.
*/
public AVObject getNotification() {
return notification;
}
/**
* Return push query instance.
* @return push query instance.
*/
public AVQuery<? extends AVInstallation> getPushQuery() {
return pushQuery;
}
/**
* Get push date.
* @return push date
*/
public Date getPushDate() {
return pushDate;
}
/**
* Get expiration time.
* @return expiration time
*/
public long getExpirationTime() {
return expirationTime;
}
/**
* Get expiration time interval.
* @return expiration time interval
*/
public long getExpirationTimeInterval() {
return expirationTimeInterval;
}
/**
* Get push target.
* @return push target
*/
public Set<String> getPushTarget() {
return pushTarget;
}
/**
* Get push data.
* @return push data
*/
public Map<String, Object> getPushData() {
return pushData;
}
/**
* Get push flow control value.
* @return flow control value.
*/
public int getFlowControl() {
return flowControl;
}
/**
* set flow control for send speed.
* flow control value indicates how many devices will be pushed per second.
* the min value is 1000, if flowControl less than 1000, it will be replaced with 1000.
*
* @since 6.1.2
* @param flowControl flow control value which stands how many devices will be pushed per second.
*/
public void setFlowControl(int flowControl) {
if (flowControl < FlowControlMinValue) {
flowControl = FlowControlMinValue;
}
this.flowControl = flowControl;
}
/**
* set iOS Environment(optinal, default is production environment).
* When using Token Authentication, this parameter determines which of environment(dev or prod)
* will become the push target.
* @param iOSEnvironment iOS environment, allowed values as following:
* AVPush.iOSEnvironmentDev("dev") - development environment
* AVPush.iOSEnvironmentProd("prod") - production environment
* @since 6.5.2
*/
public void setiOSEnvironment(String iOSEnvironment) {
this.iOSEnvironment = iOSEnvironment;
}
/**
* set APNs Topic(optinal, only used by Token Authentication)
* @param APNsTopic apns topic
* @since 6.5.2
*/
public void setAPNsTopic(String APNsTopic) {
this.APNsTopic = APNsTopic;
}
/**
* set APNs Team Id(optinal, only used by Token Authentication)
* @param APNsTeamId apns team id.
* @since 6.5.2
*/
public void setAPNsTeamId(String APNsTeamId) {
this.APNsTeamId = APNsTeamId;
}
/**
* set notification id(optional).
* at now, notification id's max length is 16 characters, only letter and number is valid.
*
* @param notificationId customized notification id.
*/
public void setNotificationId(String notificationId) {
this.notificationId = notificationId;
}
/**
* set customized request id(optional).
* at now, request id's max length is 16 characters, only letter and number is valid.
* when many requests with the same request id within 5 minutes, only one request works.
* @param requestId customized request id.
*/
public void setRequestId(String requestId) {
this.requestId = requestId;
}
/**
* Sets the channel on which this push notification will be sent. The channel name must start with
* a letter and contain only letters, numbers, dashes, and underscores. A push can either have
* channels or a query. Setting this will unset the query.
* @param channel channel string.
*/
public void setChannel(String channel) {
channelSet.clear();
channelSet.add(channel);
}
/**
* Sets the collection of channels on which this push notification will be sent. Each channel name
* must start with a letter and contain only letters, numbers, dashes, and underscores. A push can
* either have channels or a query. Setting this will unset the query.
*
* @param channels channel collection.
*/
public void setChannels(Collection<String> channels) {
channelSet.clear();
channelSet.addAll(channels);
}
/**
* Sets the entire data of the push message. See the push guide for more details on the data
* format. This will overwrite any data specified in AVPush.setMessage(String).
*
* @param data push data.
* @since 1.4.4
*/
public void setData(Map<String, Object> data) {
this.pushData.put("data", data);
}
/**
* Sets the entire data of the push message. See the push guide for more details on the data
* format. This will overwrite any data specified in AVPush.setMessage(String).
* @param data push data.
*/
public void setData(JSONObject data) {
try {
Map<String, Object> map = new HashMap<String, Object>();
Iterator<Map.Entry<String, Object>> iter = data.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, Object> entry = iter.next();
map.put(entry.getKey(), entry.getValue());
}
this.pushData.put("data", map);
} catch (Exception exception) {
LOGGER.w(exception);
}
}
private Date expirationDateTime() {
return new Date(expirationTime);
}
/**
* Set the push date at which the push will be sent.
*
* @param date The push date.
*/
public void setPushDate(Date date) {
this.pushDate = date;
}
/**
* Sets a UNIX epoch timestamp at which this notification should expire, in seconds (UTC). This
* notification will be sent to devices which are either online at the time the notification is
* sent, or which come online before the expiration time is reached. Because device clocks are not
* guaranteed to be accurate, most applications should instead use
* AVPush.setExpirationTimeInterval(long).
* @param time timestamp.
*/
public void setExpirationTime(long time) {
this.expirationTime = time;
}
/**
* Sets the time interval after which this notification should expire, in seconds. This
* notification will be sent to devices which are either online at the time the notification is
* sent, or which come online within the given number of seconds of the notification being
* received by AVOSCloud's server. An interval which is less than or equal to zero indicates that
* the message should only be sent to devices which are currently online.
* @param timeInterval time interval.
*/
public void setExpirationTimeInterval(long timeInterval) {
this.expirationTimeInterval = timeInterval;
}
/**
* Sets the message that will be shown in the notification. This will overwrite any data specified
* in AVPush.setData(JSONObject).
* @param message push message.
*/
public void setMessage(String message) {
pushData.clear();
Map<String, Object> map = AVUtils.createStringObjectMap("alert", message);
pushData.put("data", map);
}
/**
* set push target only android device.
* @param pushToAndroid flag to push to android or not.
*/
public void setPushToAndroid(boolean pushToAndroid) {
if (pushToAndroid) {
this.pushTarget.add("android");
} else {
this.pushTarget.remove("android");
}
}
/**
* set push target only ios device.
* @param pushToIOS flag to push to iOS or not.
*/
public void setPushToIOS(boolean pushToIOS) {
if (pushToIOS) {
this.pushTarget.add("ios");
} else {
this.pushTarget.remove("ios");
}
}
/**
* set push target only windows phone device.
* @param pushToWP flag to push to Windows Phone or not.
*/
public void setPushToWindowsPhone(boolean pushToWP) {
if (pushToWP) {
this.pushTarget.add("wp");
} else {
this.pushTarget.remove("wp");
}
}
/**
* Sets the query for this push for which this push notification will be sent. This query will be
* executed in the AVOSCloud cloud; this push notification will be sent to Installations which
* this query yields. A push can either have channels or a query. Setting this will unset the
* channels.
*
* @param query A query to which this push should target. This must be a AVInstallation query.
*/
public void setQuery(AVQuery<? extends AVInstallation> query) {
this.pushQuery = query;
}
/**
* 可以通过 cql来针对push进行筛选
*
* 请注意cql 的主体应该是_Installation表
*
* 请在设置cql的同时,不要设置pushTarget(ios,android,wp)
*
* @param cql query cql.
* @since 2.6.7
*/
public void setCloudQuery(String cql) {
this.cql = cql;
}
/**
* Clears both expiration values, indicating that the notification should never expire.
*/
public void clearExpiration() {
expirationTime = 0L;
expirationTimeInterval = 0L;
}
/**
* Sends this push notification while blocking this thread until the push notification has
* successfully reached the AVOSCloud servers. Typically, you should use AVPush.sendInBackground()
* instead of this, unless you are managing your own threading.
*/
public void send() {
sendInBackground().blockingFirst();
}
/**
* Sends this push notification in a background thread. This is preferable to using send(), unless
* your code is already running from a background thread.
* @return observable instance.
*/
public Observable<JSONObject> sendInBackground() {
try {
Map<String, Object> map = postDataMap();
return PaasClient.getPushClient().sendPushRequest(map);
} catch (Exception ex) {
return Observable.error(ex);
}
}
private Map<String, Object> pushChannelsData() {
return AVUtils.createStringObjectMap("channels", channelSet);
}
private Map<String, Object> postDataMap() throws AVException {
Map<String, Object> map = new HashMap<String, Object>();
if (pushQuery != null) {
if (pushTarget.size() == 0) {
pushQuery.whereNotContainedIn(deviceTypeTag, DEVICE_TYPES);
} else if (pushTarget.size() == 1) {
pushQuery.whereEqualTo(deviceTypeTag, pushTarget.toArray()[0]);
}
Map<String, String> pushParameters = pushQuery.assembleParameters();
if (pushParameters.keySet().size() > 0 && !StringUtil.isEmpty(cql)) {
throw new IllegalStateException("You can't use AVQuery and Cloud query at the same time.");
}
for (Map.Entry<String, String> entry: pushParameters.entrySet()) {
map.put(entry.getKey(), JSON.parse(entry.getValue()));
}
}
if (!StringUtil.isEmpty(cql)) {
map.put("cql", cql);
}
if (channelSet.size() > 0) {
map.putAll(pushChannelsData());
}
if (this.expirationTime > 0) {
map.put("expiration_time", this.expirationDateTime());
}
if (this.expirationTimeInterval > 0) {
map.put("push_time", StringUtil.stringFromDate(new Date()));
map.put("expiration_interval", Long.valueOf(this.expirationTimeInterval));
}
if (this.pushDate != null) {
map.put("push_time", StringUtil.stringFromDate(pushDate));
}
if (this.flowControl > 0) {
map.put(FlowControlTag, this.flowControl);
}
if (!StringUtil.isEmpty(this.iOSEnvironment)) {
map.put(iOSEnvironmentTag, this.iOSEnvironment);
}
if (!StringUtil.isEmpty(this.APNsTopic)) {
map.put(APNsTopicTag, this.APNsTopic);
}
if (!StringUtil.isEmpty(this.APNsTeamId)) {
map.put(APNsTeamIdTag, this.APNsTeamId);
}
if (!StringUtil.isEmpty(this.notificationId)) {
map.put(NotificationIdTag, this.notificationId);
}
map.putAll(pushData);
return map;
}
/**
* Sends this push notification in a background thread. This is preferable to using send(), unless
* your code is already running from a background thread.
*
* @param callback callback.done(e) is called when the send completes.
*/
public void sendInBackground(final SendCallback callback) {
sendInBackground().subscribe(new Observer<JSONObject>() {
@Override
public void onSubscribe(Disposable disposable) {
}
@Override
public void onNext(JSONObject jsonObject) {
notification = new AVObject("_Notification");
notification.resetServerData(jsonObject.getInnerMap());
if (null != callback) {
callback.internalDone(null);
}
}
@Override
public void onError(Throwable throwable) {
if (null != callback) {
callback.internalDone(new AVException(throwable));
}
}
@Override
public void onComplete() {
}
});
}
/**
* A helper method to concisely send a push to a query. This method is equivalent to
*
* <pre>
* AVPush push = new AVPush();
* push.setData(data);
* push.setQuery(query);
* push.sendInBackground(callback);
* </pre>
*
* @param data The entire data of the push message. See the push guide for more details on the
* data format.
* @param query A AVInstallation query which specifies the recipients of a push.
* @param callback callback.done(e) is called when the send completes.
*/
public static void sendDataInBackground(JSONObject data, AVQuery<? extends AVInstallation> query,
final SendCallback callback) {
AVPush push = new AVPush();
push.setData(data);
push.setQuery(query);
push.sendInBackground(callback);
}
/**
* A helper method to concisely send a push to a query. This method is equivalent to
*
* <pre>
* AVPush push = new AVPush();
* push.setData(data);
* push.setQuery(query);
* push.sendInBackground(callback);
* </pre>
*
* @param data The entire data of the push message. See the push guide for more details on the
* data format.
* @param query A AVInstallation query which specifies the recipients of a push.
* @return observable instance.
*/
public static Observable<JSONObject> sendDataInBackground(JSONObject data, AVQuery<? extends AVInstallation> query) {
AVPush push = new AVPush();
push.setData(data);
push.setQuery(query);
return push.sendInBackground();
}
/**
* A helper method to concisely send a push message to a query. This method is equivalent to
*
* <pre>
* AVPush push = new AVPush();
* push.setMessage(message);
* push.setQuery(query);
* push.sendInBackground();
* </pre>
*
* @param message The message that will be shown in the notification.
* @param query A AVInstallation query which specifies the recipients of a push.
* @return observable instance.
*/
public static Observable<JSONObject> sendMessageInBackground(String message, AVQuery<? extends AVInstallation> query) {
AVPush push = new AVPush();
push.setMessage(message);
push.setQuery(query);
return push.sendInBackground();
}
/**
* A helper method to concisely send a push message to a query. This method is equivalent to
*
* <pre>
* AVPush push = new AVPush();
* push.setMessage(message);
* push.setQuery(query);
* push.sendInBackground(callback);
* </pre>
*
* @param message The message that will be shown in the notification.
* @param query A AVInstallation query which specifies the recipients of a push.
* @param callback callback.done(e) is called when the send completes.
*/
public static void sendMessageInBackground(String message,
AVQuery<? extends AVInstallation> query, final SendCallback callback) {
AVPush push = new AVPush();
push.setMessage(message);
push.setQuery(query);
push.sendInBackground(callback);
}
}
|
jgiovaresco/my-home
|
core/src/main/java/my/home/bc/library/query/projection/OnNewBookAddedFillBooksProjection.java
|
<filename>core/src/main/java/my/home/bc/library/query/projection/OnNewBookAddedFillBooksProjection.java<gh_stars>0
package my.home.bc.library.query.projection;
import cqrs.event.EventHandler;
import my.home.bc.library.model.NewBookAdded;
import reactor.core.publisher.Mono;
public class OnNewBookAddedFillBooksProjection implements EventHandler<NewBookAdded> {
private final BooksProjection booksProjection;
public OnNewBookAddedFillBooksProjection(BooksProjection booksProjection) {
this.booksProjection = booksProjection;
}
@Override
public Mono<Void> execute(NewBookAdded event) {
return booksProjection.update(event);
}
@Override
public Class<NewBookAdded> eventType() {
return NewBookAdded.class;
}
}
|
joshluisaac/etl-framework
|
etl-server/src/main/java/com/kollect/etl/service/CsvService.java
|
<gh_stars>0
package com.kollect.etl.service;
import com.kollect.etl.util.FileUtils;
import org.springframework.stereotype.Service;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
public class CsvService {
String pathName = "/home/joshua/martian/kvworkspace/dataconnector-MAHB/sourceFiles/20171118003002/CUSTLIST_20171118003004.csv";
private List<String> readCsv() throws IOException {
return new FileUtils().readFile(new File(pathName));
}
public List<Map<String, String>> buildListOfMap() throws IOException {
List<Map<String, String>> listMap = new ArrayList<>();
List<String> csvList = readCsv();
for (int i = 0; i < csvList.size(); i++) {
String[] arr = csvList.get(i).split("\\|");
Map<String, String> map = new HashMap<>();
map.put("name", arr[0]);
map.put("name1", arr[1]);
map.put("sname", arr[2]);
map.put("name2", arr[3]);
listMap.add(map);
}
return listMap;
}
}
|
donghL-dev/Reactive-Blog
|
src/main/java/com/donghun/reactiveblog/handler/ArticleHandler.java
|
package com.donghun.reactiveblog.handler;
import com.donghun.reactiveblog.service.ArticleService;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.server.ServerRequest;
import org.springframework.web.reactive.function.server.ServerResponse;
import reactor.core.publisher.Mono;
/**
* @author donghL-dev
* @since 2019-12-09
*/
@Component
@RequiredArgsConstructor
public class ArticleHandler {
private final Logger logger = LoggerFactory.getLogger(ArticleHandler.class);
private final ArticleService articleService;
public Mono<ServerResponse> getArticles(ServerRequest request) {
logger.info("Get Articles Handler Accessed");
return articleService.getArticlesProcessLogic(request);
}
public Mono<ServerResponse> getFeedArticles(ServerRequest request) {
logger.info("Get Feed Articles Handler Accessed");
return articleService.getFeedArticlesProcessLogic(request);
}
public Mono<ServerResponse> getArticle(ServerRequest request) {
logger.info("Get Article Handler Accessed");
return articleService.getArticleProcessLogic(request);
}
public Mono<ServerResponse> postArticle(ServerRequest request) {
logger.info("Post Article Handler Accessed");
return articleService.postArticleProcessLogic(request);
}
public Mono<ServerResponse> putArticle(ServerRequest request) {
logger.info(("Put Article Handler Accessed"));
return articleService.putArticleProcessLogic(request);
}
public Mono<ServerResponse> deleteArticle(ServerRequest request) {
logger.info(("Delete Article Handler Accessed"));
return articleService.deleteArticleProcessLogic(request);
}
public Mono<ServerResponse> favoriteArticle(ServerRequest request) {
logger.info("Favorite Article Handler Accessed");
return articleService.favoriteArticle(request);
}
public Mono<ServerResponse> unFavoriteArticle(ServerRequest request) {
logger.info("Un Favorite Article Handler Accessed");
return articleService.unFavoriteArticle(request);
}
}
|
UeliKurmann/igesture
|
igesture-demo/src/main/java/org/ximtec/igesture/app/showcaseapp/Application.java
|
<reponame>UeliKurmann/igesture<gh_stars>0
/*
* @(#)$Id$
*
* Author : <NAME>, <EMAIL>
*
* Purpose :
*
* -----------------------------------------------------------------------
*
* Revision Information:
*
* Date Who Reason
*
* Nov 15, 2006 ukurmann Initial Release
* Mar 24, 2007 bsigner Cleanup
*
* -----------------------------------------------------------------------
*
* Copyright 1999-2009 ETH Zurich. All Rights Reserved.
*
* This software is the proprietary information of ETH Zurich.
* Use is subject to license terms.
*
*/
package org.ximtec.igesture.app.showcaseapp;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;
import org.sigtec.ink.Note;
import org.sigtec.util.Constant;
import org.ximtec.igesture.Recogniser;
import org.ximtec.igesture.algorithm.AlgorithmException;
import org.ximtec.igesture.app.showcaseapp.descriptor.ArrowDescriptor;
import org.ximtec.igesture.app.showcaseapp.descriptor.LineDescriptor;
import org.ximtec.igesture.app.showcaseapp.descriptor.RectangleDescriptor;
import org.ximtec.igesture.app.showcaseapp.descriptor.TriangleDescriptor;
import org.ximtec.igesture.app.showcaseapp.eventhandler.DeleteEventHandler;
import org.ximtec.igesture.app.showcaseapp.eventhandler.DrawEventHandler;
import org.ximtec.igesture.app.showcaseapp.eventhandler.RejectEventHandler;
import org.ximtec.igesture.app.showcaseapp.eventhandler.StyleEventHandler;
import org.ximtec.igesture.configuration.Configuration;
import org.ximtec.igesture.core.DigitalDescriptor;
import org.ximtec.igesture.core.Gesture;
import org.ximtec.igesture.core.GestureSet;
import org.ximtec.igesture.core.ResultSet;
import org.ximtec.igesture.event.GestureActionManager;
import org.ximtec.igesture.io.GestureEventListener;
import org.ximtec.igesture.io.mouseclient.SwingMouseReader;
import org.ximtec.igesture.util.XMLTool;
/**
* @version 1.0 Nov 2006
* @author <NAME>, <EMAIL>
* @author <NAME>, <EMAIL>
*/
public class Application implements GestureEventListener {
private static final Logger LOGGER = Logger.getLogger(Application.class
.getName());
private Recogniser recogniser;
private SwingMouseReader client;
private JFrame frame;
private BufferedImage bufferedImage;
public Application() {
initialiseGUI();
initGestures();
initGestureInputFrame();
}
private void initGestureInputFrame() {
JFrame frame = new JFrame();
frame.setTitle("Input Frame - Draw your Gesture");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setSize(440, 620);
frame.add(client.getPanel());
frame.setLocation(440, 0);
frame.setVisible(true);
}
private void initGestures() {
Configuration configuration = XMLTool.importConfiguration(ClassLoader
.getSystemResourceAsStream("rubineconfiguration.xml"));
GestureSet gestureSet = XMLTool.importGestureSet(ClassLoader
.getSystemResourceAsStream("demogestures.xml"));
GestureActionManager eventManager = new GestureActionManager(){
@Override
public synchronized void handle(ResultSet resultSet) {
super.handle(resultSet);
client.clear();
}
};
eventManager.registerRejectEvent(new RejectEventHandler(){
});
Style style = new Style();
StyleEventHandler styleEventHandler = new StyleEventHandler(style);
Graphics2D graphic = (Graphics2D) bufferedImage.getGraphics();
DrawEventHandler drawEventHandler = new DrawEventHandler(graphic, style);
gestureSet.getGestureClass("Rectangle").addDescriptor(
DigitalDescriptor.class, new RectangleDescriptor());
gestureSet.getGestureClass("LeftRight").addDescriptor(
DigitalDescriptor.class, new LineDescriptor());
gestureSet.getGestureClass("Triangle").addDescriptor(
DigitalDescriptor.class, new TriangleDescriptor());
gestureSet.getGestureClass("Arrow").addDescriptor(
DigitalDescriptor.class, new ArrowDescriptor());
eventManager.registerEventHandler("Rectangle", drawEventHandler);
eventManager.registerEventHandler("LeftRight", drawEventHandler);
eventManager.registerEventHandler("Triangle", drawEventHandler);
eventManager.registerEventHandler("Arrow", drawEventHandler);
eventManager.registerEventHandler("Delete", new DeleteEventHandler(
graphic));
eventManager.registerEventHandler("Red", styleEventHandler);
eventManager.registerEventHandler("Black", styleEventHandler);
eventManager.registerEventHandler("Yellow", styleEventHandler);
eventManager.registerEventHandler("Thin", styleEventHandler);
eventManager.registerEventHandler("Fat", styleEventHandler);
configuration.addGestureSet(gestureSet);
try {
recogniser = new Recogniser(configuration);
recogniser.addGestureHandler(eventManager);
} catch (AlgorithmException e) {
LOGGER.log(Level.SEVERE, Constant.EMPTY_STRING, e);
}
client = new SwingMouseReader();
client.init();
client.addGestureHandler(this);
} // initGestures
private void initialiseGUI() {
frame = new JFrame();
frame.setTitle("Output Frame");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setSize(440, 620);
bufferedImage = new BufferedImage(420, 600, BufferedImage.TYPE_INT_ARGB);
bufferedImage.getGraphics().setColor(Color.WHITE);
bufferedImage.getGraphics().fillRect(0, 0, 420, 600);
bufferedImage.getGraphics().setColor(Color.BLACK);
JLabel label = new JLabel();
label.setSize(420, 620);
label.setIcon(new ImageIcon(bufferedImage));
frame.add(label);
frame.setVisible(true);
} // initialiseGUI
public static void main(String[] args) {
new Application();
}
@Override
public void handleChunks(List<?> chunks) {
// TODO Auto-generated method stub
}
@Override
public void handleGesture(Gesture<?> gesture) {
if (gesture.getGesture() instanceof Note) {
Note note = (Note) gesture.getGesture();
Note clone = (Note) note.clone();
recogniser.recognise(clone);
frame.repaint();
}
}
}
|
archiecobbs/linode-apiv4-java
|
linode-apiv4-java-model/src/main/java/org/dellroad/linode/apiv4/request/package-info.java
|
/*
* Copyright (C) 2017 <NAME>. All rights reserved.
*/
/**
* Linode v4 API request objects.
*/
package org.dellroad.linode.apiv4.request;
|
SpatioTemporal/STAREPandas
|
starepandas/staredataframe.py
|
<gh_stars>1-10
import geopandas
import geopandas.plotting
import pystare
import pandas
import numpy
import starepandas
import netCDF4
import starepandas.tools.trixel_conversions
DEFAULT_SID_COLUMN_NAME = 'sids'
DEFAULT_TRIXEL_COLUMN_NAME = 'trixels'
class STAREDataFrame(geopandas.GeoDataFrame):
"""
A STAREDataFrame object is a pandas.DataFrame that has a special column
with STARE indices and optionally a special column holding the trixel representation.
In addition to the standard DataFrame constructor arguments,
STARE also accepts the following keyword arguments:
Parameters
----------
sids : str or array-like
If str, column to use as stare column. If array, will be set as 'stare' column on STAREDataFrame.
add_sids : bool
If true, STARE index values will be generated using a geometry column
resolution: int
If add_stare is True, then use resolution as the maximum STARE resolution.
trixels : str or array-like
If str, column to use as trixel column. If array, will be set as 'trixel' column on STAREDataFrame.
add_trixels : bool
If true, trixels will be generated from the STARE column.
Examples
---------
>>> cities = ['Buenos Aires', 'Brasilia', 'Santiago', 'Bogota', 'Caracas']
>>> latitudes = [-34.58, -15.78, -33.45, 4.60, 10.48]
>>> longitudes = [-58.66, -47.91, -70.66, -74.08, -66.86]
>>> data = {'City': cities, 'Latitude': latitudes, 'Longitude': longitudes}
>>> sids = starepandas.sids_from_xy(longitudes, latitudes, resolution=5)
>>> sdf = starepandas.STAREDataFrame(data, sids=sids)
"""
_metadata = ['_sid_column_name', '_trixel_column_name', '_geometry_column_name', '_crs']
_sid_column_name = DEFAULT_SID_COLUMN_NAME
_trixel_column_name = DEFAULT_TRIXEL_COLUMN_NAME
def __init__(self, *args,
sids=None, add_sids=False, resolution=None,
trixels=None, add_trixels=False, n_workers=1,
**kwargs):
super(STAREDataFrame, self).__init__(*args, **kwargs)
if args and isinstance(args[0], STAREDataFrame):
self._geometry_column_name = args[0]._geometry_column_name
if sids is not None:
self.set_sids(sids, inplace=True)
elif add_sids:
if resolution is None:
raise ValueError('Resolution has to be specified if SIDs are to be added')
sids = self.make_sids(resolution=resolution, n_workers=n_workers)
self.set_sids(sids, inplace=True)
if trixels is not None:
self.set_trixels(trixels, inplace=True)
elif add_trixels:
trixels = self.make_trixels(n_workers=n_workers)
self.set_trixels(trixels, inplace=True)
def __getitem__(self, key):
result = super(STAREDataFrame, self).__getitem__(key)
sid_col = self._sid_column_name
if isinstance(result, (geopandas.GeoDataFrame, pandas.DataFrame, starepandas.STAREDataFrame)):
result.__class__ = STAREDataFrame
result._sid_column_name = sid_col
elif isinstance(result, (geopandas.GeoSeries, pandas.Series)):
# result.__class__ = starepandas.STARESeries
pass
else:
pass
# result.__class__ = geopandas.GeoDataFrame
return result
def __setattr__(self, attr, val):
# have to special case geometry b/c pandas tries to use as column...
if attr == "stare":
object.__setattr__(self, attr, val)
else:
super(STAREDataFrame, self).__setattr__(attr, val)
def make_sids(self, resolution, convex=False, force_ccw=True, n_workers=1):
"""
Generates and returns the STARE representation of each feauture.
Parameters
-----------
resolution: int; 0<=resolution<=27
STARE resolution to use for the STARE lookup
convex: bool
Toggle if STARE indices for the convex hull rather than the G-Ring should be looked up
force_ccw: bool
Toggle if a counter clockwise orientation of the geometries should be enforced
n_workers: int
Number of workers used to lookup STARE indices in parallel
Returns
---------
sids: numpy.ndarray
array of (set of) STARE index values
Examples
----------
From points
>>> import starepandas, geopandas
>>> lats = [-72.609177, -72.648590, -72.591286]
>>> lons = [-41.255402, -42.054047, -41.625336]
>>> geoms = geopandas.points_from_xy(lons, lats)
>>> sdf = starepandas.STAREDataFrame(geometry=geoms)
>>> sdf.make_sids(resolution=6, convex=False)
[2299437706637111654, 2299435211084507366, 2299436587616075270]
From polygons
>>> gdf = geopandas.read_file(geopandas.datasets.get_path("naturalearth_lowres"))
>>> sdf = starepandas.STAREDataFrame(gdf)
>>> sids = sdf.make_sids(resolution=5)
"""
sids = starepandas.sids_from_geoseries(self.geometry, resolution=resolution, convex=convex,
force_ccw=force_ccw, n_workers=n_workers)
return sids
def set_sids(self, col=None, inplace=False):
""" Set the StareDataFrame stare indices using either an existing column or
the specified input. By default yields a new object.
The original stare column is replaced with the input.
:param col: f stare indices or column name
:type col: array-like
:param inplace: Modify the StareDataFrame in place (do not create a new object)
:type inplace: Bool
:return: df
:rtype: StareDataFrame
Examples
--------
>>> import starepandas
>>> sdf = starepandas.STAREDataFrame()
>>> sids = [4611686018427387903, 2299435211084507590, 2299566194809236966]
>>> sdf.set_sids(sids, inplace=True)
"""
# Most of the code here is taken from GeoDataFrame.set_geometry()
if inplace:
frame = self
else:
frame = self.copy()
if col is None:
col = self.make_sids()
if isinstance(col, (list, numpy.ndarray, pandas.Series)):
frame[frame._sid_column_name] = col
elif hasattr(col, "ndim") and col.ndim != 1:
raise ValueError("Must pass array with one dimension only.")
elif isinstance(col, str) and col in frame.columns:
frame._sid_column_name = col
else:
raise ValueError("Must pass array-like object or column name")
if not inplace:
return frame
def has_trixels(self):
return self._trixel_column_name in self
def has_sids(self):
return self._sid_column_name in self
def make_trixels(self, sid_column=None, n_workers=1):
"""
Returns a Polygon or Multipolygon GeoSeries
containing the trixels referred by the STARE indices
Parameters
-----------
sid_column: str
Column to use as STARE column. Default: 'stare'
n_workers: int
number of (dask) workers to use to generate trixels
Returns
-----------
trixels_series: numpy.array
array of polygons or multipolygons representing the trixels
Examples
--------
>>> import starepandas
>>> sids = [648518346341351428, 900719925474099204, 1170935903116328964]
>>> sdf = starepandas.STAREDataFrame(sids=sids)
>>> trixels = sdf.make_trixels()
"""
if sid_column is None:
sid_column = self._sid_column_name
trixels_series = starepandas.tools.trixel_conversions.trixels_from_stareseries(self[sid_column],
n_workers=n_workers)
return trixels_series
def set_trixels(self, col=None, inplace=False):
"""
Set the trixel column
:param col: If array like, will add the array as a new trixel column. If string, will set the df['col'] as the trixel column. If None, will generate trixels from the STARE column.
:type col: Array-like, string, or None
:param inplace: Modify the StareDataFrame in place (do not create a new object)
:type inplace: bool
:return: DataFrame or None
Examples
---------
>>> import starepandas
>>> sids = [4611686018427387903, 4611686018427387903, 4611686018427387903]
>>> sdf = starepandas.STAREDataFrame(sids=sids)
>>> trixels = sdf.make_trixels()
>>> sdf.set_trixels(trixels, inplace=True)
"""
if inplace:
frame = self
else:
frame = self.copy()
if col is None:
col = self.make_trixels()
if isinstance(col, (pandas.Series, list, numpy.ndarray)):
frame[frame._trixel_column_name] = col
elif isinstance(col, str) and col in self.columns:
frame._trixel_column_name = col
else:
raise ValueError("Must pass array-like object or column name")
if not inplace:
return frame
def trixel_vertices(self):
""" Returns the vertices and centerpoints of the trixels.
Requires stare column to be set. Vertices are a tuple of:
1. the latitudes of the corners
2. the longitudes of the corners
3. the latitudes of the centers
4. the longitudes of the centers
Returns
---------
vertices
A vertices data structure
Examples
---------
>>> sids = numpy.array([3458764513820540928])
>>> df = starepandas.STAREDataFrame(sids=sids)
>>> df.trixel_vertices()
(array([29.9999996 , 45.00000069, 29.9999996 ]), array([-170.26439001, -45. , 80.26439001]), array([80.264389]), array([135.]))
"""
return starepandas.tools.trixel_conversions.to_vertices(self[self._sid_column_name])
def trixel_centers(self, vertices=None):
""" Returns the trixel centers.
If vertices is set, the trixel centers are extracted from the vertices (c.f. :func:`~trixel_vertices`).
If not, they are generated from the stare column.
Parameters
--------------
vertices: vertices data structure
If set, the centers are extracted from the vertices data structure.
Returns
---------
trixel_centers : numpy.array
Trixel centers. First dimension are the SIDs, second dimension lon/lat.
Examples
---------
>>> sids = numpy.array([3458764513820540928])
>>> df = starepandas.STAREDataFrame(sids=sids)
>>> df.trixel_centers()
array([[135. , 80.264389]])
"""
if vertices:
return starepandas.tools.trixel_conversions.vertices2centers(vertices)
else:
return starepandas.tools.trixel_conversions.to_centers(self[self._sid_column_name])
def trixel_centers_ecef(self, vertices=None):
""" Returns the trixel centers as ECEF vectors.
If vertices is set, the trixel centers are extracted from the vertices (c.f. :func:`~trixel_vertices`).
If not, they are generated from the stare column.
Parameters
--------------
vertices: vertices data structure
If set, the centers are extracted from the vertices data structure.
Returns
---------
trixel_centers : numpy.array
Trixel centers. First dimension are the sids, second dimension are x/y/z.
Examples
---------
>>> sids = numpy.array([3458764513820540928])
>>> df = starepandas.STAREDataFrame(sids=sids)
>>> df.trixel_centers_ecef()
array([[-0.11957316, 0.11957316, 0.98559856]])
"""
if vertices:
return starepandas.tools.trixel_conversions.vertices2centers_ecef(vertices)
else:
return starepandas.tools.trixel_conversions.to_centers_ecef(self[self._sid_column_name])
def trixel_centerpoints(self, vertices=None):
""" Returns the trixel centers as shapely points.
If vertices is set, the trixel centers are extracted from the vertices (c.f. :func:`~trixel_vertices`).
If not, they are generated from the stare column.
Parameters
----------------
vertices: tuple (vertices data structure)
If set, the centers are extracted from the vertices.
Returns
---------
trixel_centerpoints: Geometery Array
Series of shapely trixel center points
Examples
---------
>>> sids = numpy.array([3458764513820540928])
>>> df = starepandas.STAREDataFrame(sids=sids)
>>> centers = df.trixel_centerpoints()
>>> print(centers[0])
POINT (135 80.26438899520531)
"""
if vertices:
return starepandas.tools.trixel_conversions.vertices2centerpoints(vertices)
else:
return starepandas.tools.trixel_conversions.to_centerpoints(self[self._sid_column_name])
def trixel_corners(self, vertices=None, from_trixels=False):
""" Returns corners of trixels as lon/lat.
If vertices is set, the trixel corners are extracted from vertices (c.f. :func:`~trixel_vertices`).
If from_trixels is True and dataframe contains trixel column, corners are extracted from trixels.
If not, corners are generated from stare column
Parameters
----------
vertices : tuple (vertices data structure)
If set, the centers are extracted from the vertices.
from_trixels: bool
If true and dataframe contains trixel column, corners are extracted from trixels.
Returns
----------
corners : numpy array
Corners of the trixels in lon/lat representation. First dimension are the SIDs,
second dimension the corners (1 through 3), third dimension lon/lat.
Examples
----------
>>> sids = numpy.array([3458764513820540928])
>>> df = starepandas.STAREDataFrame(sids=sids)
>>> df.trixel_corners()
array([[[-170.26439001, 29.9999996 ],
[ -45. , 45.00000069],
[ 80.26439001, 29.9999996 ]]])
"""
if vertices:
corners = starepandas.tools.trixel_conversions.vertices2corners(vertices)
elif from_trixels and self._trixel_column_name in self.columns:
corners = []
for trixel in self[self._trixel_column_name]:
# Trixel is a polygon. Its first element is the outer ring.
corners.append(tuple(trixel[0].boundary.coords)[0:3])
else:
corners = starepandas.tools.trixel_conversions.to_corners(self[self._sid_column_name])
return corners
def trixel_corners_ecef(self, vertices=None):
""" Returns ECEF norm vectors of great circles constraining the trixels.
If vertices is set, the trixel corners are extracted from vertices (c.f. :func:`~trixel_vertices`).
If not, corners are generated from stare column.
Parameters
----------
vertices : tuple (vertices data structure)
If set, the centers are extracted from the vertices.
Returns
----------
corners : numpy array
Corners of the trixels in ECEF representation. First dimension are the sids, second
dimension the great circles, third dimension x/y/z
Examples
----------
>>> sids = numpy.array([3458764513820540928])
>>> df = starepandas.STAREDataFrame(sids=sids)
>>> df.trixel_corners_ecef()
array([[[-0.85355339, -0.14644661, 0.49999999],
[ 0.49999999, -0.49999999, 0.70710679],
[ 0.14644661, 0.85355339, 0.49999999]]])
"""
corners = self.trixel_corners(vertices)
corners_ecef = starepandas.tools.trixel_conversions.corners2ecef(corners)
return corners_ecef
def trixel_grings(self, vertices=None):
""" Returns corners of trixels as ECEF.
If vertices is set, the trixel corners are extracted from vertices (c.f. :func:`~trixel_vertices`).
If not, corners are generated from stare column
Parameters
----------
vertices : tuple (vertices data structure)
If set, the centers are extracted from the vertices.
Returns
----------
corners : numpy array
ECEF norm vectors of great circles constraining the trixels. First dimension are the sids, second
dimension the great circles, third dimension x/y/z
Examples
----------
>>> sids = numpy.array([3458764513820540928])
>>> df = starepandas.STAREDataFrame(sids=sids)
>>> df.trixel_grings()
array([[[ 0.14644661, 0.85355339, 0.49999999],
[-0.85355339, -0.14644661, 0.49999999],
[ 0.49999999, -0.49999999, 0.70710679]]])
"""
corners = self.trixel_corners_ecef(vertices)
gring = starepandas.tools.trixel_conversions.corners2gring(corners)
return gring
def plot(self, *args, trixels=True, boundary=False, **kwargs):
""" Generate a plot with matplotlib.
Seminal method to
`GeoDataFrame.plot() <https://geopandas.org/docs/reference/api/geopandas.GeoDataFrame.plot.html>`_
All GeoDataFrame.plot() kwargs are available.
:param trixels: Toggle if trixels (rather than the SF geometry) is to be plotted
:type trixels: bool
:param boundary: Toggle if the ring is to be plotted as a linestring rather than the polygon
:type boundary: bool
:return: ax
Examples
--------
>>> import starepandas
>>> world = geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres'))
>>> germany = world[world.name=='Germany']
>>> germany = starepandas.STAREDataFrame(germany, add_sids=True, resolution=8, add_trixels=True, n_workers=1)
>>> ax = germany.plot(trixels=True, boundary=True, color='y', zorder=0)
"""
if trixels:
if not self.has_trixels():
raise AttributeError('No trixels set (expected in "{}" column)'.format(self._trixel_column_name))
boundary = True
df = self.set_geometry(self._trixel_column_name, inplace=False)
else:
df = self.copy()
if boundary:
df = df[df.geometry.is_empty == False]
df = df.set_geometry(df.geometry.boundary)
return geopandas.plotting.plot_dataframe(df, *args, **kwargs)
def to_scidb(self, connection):
pass
def stare_intersects(self, other, method='binsearch', n_workers=1):
"""Returns a ``Series`` of ``dtype('bool')`` with value ``True`` for
each geometry that intersects `other`.
An object is said to intersect `other` if its `ring` and `interior`
intersects in any way with those of the other.
Parameters
-------------
other: int or listlike
The SID collection representing the spatial object to test if is intersected.
method: str
Method for STARE intersects test 'skiplist', 'binsearch' or 'nn'. Default: 'binsearch'.
n_workers: int
number of workers to be used for intersects tests
Examples
--------
>>> germany = [4251398048237748227, 4269412446747230211, 4278419646001971203,
... 4539628424389459971, 4548635623644200963, 4566650022153682947]
>>> cities = {'name': ['berlin', 'madrid'], 'sid': [4258121269174388239, 4288120002905386575]}
>>> cities = starepandas.STAREDataFrame(cities, sids='sid')
>>> cities.stare_intersects(germany)
0 True
1 False
dtype: bool
"""
if isinstance(other, (int, numpy.int64)):
# Other is a single STARE index value
other = [other]
elif isinstance(other, (numpy.ndarray, list)):
# Other is a collection/set of STARE index values
pass
else:
raise ValueError("Other must be array-like object or int64")
intersects = starepandas.series_intersects(other=other,
series=self[self._sid_column_name],
method=method,
n_workers=n_workers)
return pandas.Series(intersects)
def stare_disjoint(self, other, method='binsearch', n_workers=1):
""" Returns a ``Series`` of ``dtype('bool')`` with value ``True`` for
each geometry that is disjoint from `other`.
This is the inverse operation of STAREDataFrame.stare_intersects()
Parameters
------------
other: array-like
The STARE index collection representing the spatial object to test if is intersected.
method: str
Method for STARE intersects test 'skiplist', 'binsearch' or 'nn'. Default: 'binsearch'.
n_workers: int
number of workers to be used for intersects tests
See also
--------
STAREDataFrame.stare_intersects : intersects test
"""
return ~self.stare_intersects(other, method, n_workers)
def stare_intersection(self, other):
"""Returns a ``STARESeries`` of the (STARE) spatial intersection of self with `other`.
Parameters
------------
other : Array-like
The STARE index value collection representing the object to find the intersection with.
Returns
--------
intersection : STARESeries
A series of STARE index values representing the STARE interesection of each feature with other
Examples
---------
>>> import shapely
>>> nodes1 = [[102, 33], [101, 35], [105, 34], [104, 33], [102, 33]]
>>> nodes2 = [[102, 34], [106, 35], [106, 33], [102, 33.5], [102, 34]]
>>> polygon1 = shapely.geometry.Polygon(nodes1)
>>> polygon2 = shapely.geometry.Polygon(nodes2)
>>> sids1 = starepandas.sids_from_polygon(polygon1, resolution=5, force_ccw=True)
>>> sids2 = starepandas.sids_from_polygon(polygon2, resolution=5, force_ccw=True)
>>> df = starepandas.STAREDataFrame(sids=[sids1])
>>> df.stare_intersection(sids2).iloc[0]
array([694117292568477701, 701435641962954757, 701998591916376069])
"""
data = []
for srange in self[self._sid_column_name]:
data.append(pystare.intersection(srange, other))
return pandas.Series(data, index=self.index)
def stare_dissolve(self, by=None, dissolve_sids=True, n_workers=1,
n_chunks=1, geom=False, aggfunc="first", **kwargs):
"""
Dissolves a dataframe subject to a field. I.e. grouping by a field/column.
Seminal method to GeoDataFrame.dissolve()
Parameters
-------------
by: str
column to use the dissolve on. If None, dissolve all rows.
dissolve_sids: bool
Toggle if STARE index values get dissolved. If not, sids will be appended.
If not dissolved, there may be repetitive sids and sids that could get merged into the parent sid.
n_workers: int
workers to use for the dissolve
n_chunks: int
Performance optimization; number of chunks to use for the stare dissolve.
geom: bool
Toggle if the geometry column is to be dissolved. Geom column Will be dropped if set to False.
aggfunc: str
aggregation function. E.g. 'first', 'sum', 'mean'.
Examples
--------
>>> import geopandas
>>> world = geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres'))
>>> west = world[world['continent'].isin(['Europe', 'North America'])]
>>> west = starepandas.STAREDataFrame(west, add_sids=True, resolution=4, add_trixels=False)
>>> west.stare_dissolve(by='continent', aggfunc='sum') # doctest: +SKIP
stare ... gdp_md_est
continent ...
Europe [648518346341351428, 900719925474099204, 10448... ... 25284877.0
North America [1170935903116328964, 1173187702930014212, 117... ... 23505137.0
"""
if by is None:
sids = starepandas.merge_stare(self[self._sid_column_name], dissolve_sids, n_workers, n_chunks)
return sids
else:
data = self.drop(columns=[self._sid_column_name, self._trixel_column_name], errors='ignore')
if geom:
aggregated_data = data.dissolve(by=by, aggfunc=aggfunc, **kwargs)
else:
data = data.drop(columns=[self._geometry_column_name], errors='ignore')
aggregated_data = data.groupby(by=by, **kwargs).agg(aggfunc)
sids = self.groupby(group_keys=True, by=by)[self._sid_column_name].agg(starepandas.merge_stare,
dissolve_sids,
n_workers, n_chunks)
sdf = starepandas.STAREDataFrame(sids, sids=self._sid_column_name)
aggregated = sdf.join(aggregated_data)
return aggregated
def to_stare_resolution(self, resolution, inplace=False, clear_to_resolution=False):
"""
Changes resolution of STARE index values to resolution; optionally clears location bits up to resolution.
Caution: This methods is not intended for use on feautures represented by sets of sids.
Parameters
------------
inplace: bool
If True, modifies the DataFrame in place (do not create a new object).
resolution: int
STARE resolution to change to.
clear_to_resolution: bool
Toggle if the location bits below resolutions should be cleared
Returns
-------------
if not inplace, returns stare index values, otherwise None
Examples
--------
>>> sids = [2299437706637111721, 2299435211084507593, 2299566194809236969]
>>> sdf = starepandas.STAREDataFrame(sids=sids)
>>> sdf.to_stare_resolution(resolution=6, clear_to_resolution=False)
0 2299437706637111718
1 2299435211084507590
2 2299566194809236966
Name: sids, dtype: int64
"""
if inplace:
sids = self[self._sid_column_name]
else:
sids = self[self._sid_column_name].copy()
sids = pystare.spatial_coerce_resolution(sids, resolution)
if clear_to_resolution:
# pystare_terminator_mask uses << operator, which requires us to cast to numpy array first
sids = pystare.spatial_clear_to_resolution(numpy.array(sids))
if inplace:
self[self._sid_column_name] = sids
else:
return sids
def clear_to_resolution(self, inplace=False):
"""
Clears location bits to resolution
Parameters
-----------
inplace: bool
If True, modifies the DataFrame in place (do not create a new object).
Examples
----------
>>> sids = [2299437706637111721, 2299435211084507593, 2299566194809236969]
>>> sdf = starepandas.STAREDataFrame(sids=sids)
>>> sdf.clear_to_resolution(inplace=False)
array([2299437254470270985, 2299435055447015433, 2299564797819093001])
"""
if inplace:
sids = self[self._sid_column_name]
else:
sids = self[self._sid_column_name].copy()
sids = pystare.spatial_clear_to_resolution(numpy.array(sids))
if inplace:
self[self._sid_column_name] = sids
else:
return sids
def to_stare_singleres(self, resolution=None, inplace=False):
"""
Changes the STARE index values to single resolution representation (in contrary to multiresolution).
Parameters
-----------
resolution: int
resolution to change thre
inplace: bool
If True, modifies the DataFrame in place (do not create a new object).
Returns
------------
if not inplace, returns stare index values, otherwise None
Examples
---------
>>> import geopandas
>>> world = geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres'))
>>> germany = world[world.name=='Germany']
>>> germany = starepandas.STAREDataFrame(germany, add_sids=True, resolution=6, add_trixels=False)
>>> len(germany.sids.iloc[0])
43
>>> sids_singleres = germany.to_stare_singleres()
>>> len(sids_singleres[0])
46
"""
if inplace:
sids_col = self[self._sid_column_name]
else:
sids_col = self[self._sid_column_name].copy()
new_sids_col = []
for sids in sids_col:
if resolution:
r = resolution
else:
r = int(pystare.spatial_resolution(sids).max())
sids = pystare.expand_intervals(sids, level=r, multi_resolution=False)
new_sids_col.append(sids)
if inplace:
self[self._sid_column_name] = new_sids_col
else:
return new_sids_col
def hex(self):
"""
Returns the hex16 representation of the stare column
Examples
---------
>>> sdf = starepandas.STAREDataFrame(sids=[2251799813685252, 4503599627370500])
>>> sdf.hex()
['0x0008000000000004', '0x0010000000000004']
>>> sdf = starepandas.STAREDataFrame(sids=[[2251799813685252, 4503599627370500],
... [4604930618986332164, 4607182418800017412]])
>>> sdf.hex()
[['0x0008000000000004', '0x0010000000000004'], ['0x3fe8000000000004', '0x3ff0000000000004']]
"""
sids = []
for row in self[self._sid_column_name]:
try:
# Ducktyping collection of sids
sids.append(list(map(pystare.int2hex, row)))
except TypeError:
sids.append(pystare.int2hex(row))
return sids
def write_pods(self, pod_root, resolution, chunk_name, hex=True):
""" Writes dataframe into a starepods hierarchy
Parameters
--------------
pod_root: str
Root directory of starepods
resolution: str
resolution of starepods
chunk_name: str
name of the pod
hex: bool
toggle pods being hex vs int
"""
grouped = self.groupby(self.to_stare_resolution(resolution=resolution, clear_to_resolution=True))
for group in grouped.groups:
g = grouped.get_group(group)
if hex:
pod = pystare.int2hex(group)
else:
pod = group
g.to_pickle('{pod_root}/{pod}/{chunk_name}'.format(pod_root=pod_root, pod=pod, chunk_name=chunk_name))
@property
def _constructor(self):
return STAREDataFrame
def to_array(self, column, shape=None, pivot=False):
"""Converts the 'column' to a numpy array.
Either a shape argument has to be provided or the dataframe has to contain a column x and y
holding the original array coordinates.
If the dataframe has x/y columns, the column can also be pivoted. I.e. rather than
reshaping according to the shape, pivoted along the x/y columns.
This may be relevant if the dataframe's row order has changed.
Parameters
----------
column: str
column name to be converted to an array
shape: tuple
x and y shape of the array. x*y has to equal the length of the dataframe
pivot: bool
if true, rather than simple reshaping, the dataframe is pivoted along the x and y column
Examples
----------
>>> df = starepandas.STAREDataFrame({'x': [0, 0, 1, 1],
... 'y': [1, 0, 0, 1],
... 'a': [1, 2, 3, 4]})
>>> df.to_array('a', pivot=False)
array([[1, 2],
[3, 4]])
>>> df.to_array('a', pivot=True)
array([[2, 1],
[3, 4]])
See also
--------
STAREDataFrame.to_arrays
"""
if shape is None:
shape = (max(self['x']) + 1, max(self['y']) + 1)
if pivot:
array = self.pivot(index='x', columns='y', values=column).to_numpy()
else:
array = self[column].to_numpy().reshape(shape)
return array
def to_sids_array(self, shape=None, pivot=False):
return self.to_array(self._sid_column_name, shape, pivot)
def to_arrays(self, shape=None, pivot=False):
""" Converts a STAREDataFrame into a dictionary of arrays; one array per column/field.
This may be useful to write data back to granules.
Either a shape argument has to be provided or the dataframe has to contain a column x and y
holding the original array coordinates.
If no shape is provided, the shape is assumed to be (max(x)+1, max(y)+1).
If the dataframe has x/y columns, the column can also be pivoted. I.e. rather than
reshaping according to the shape, pivoted along the x/y columns.
This may be relevant if the dataframe's row order has changed.
Parameters
----------
shape: tuple
x and y shape of the array. x*y has to equal the length of the dataframe
pivot: bool
if true, rather than simple reshaping, the dataframe is pivoted along the x and y column
See also
---------
STAREDataFrame.to_array
"""
arrays = {}
for column in self.columns:
if column in ['x', 'y']:
continue
arrays[column] = self.to_array(column, shape=shape, pivot=pivot)
return arrays
def to_sidecar(self, fname, cover=False, shuffle=True, zlib=True):
""" Writes STARE Sidecar
"""
sids = self.to_array(self._sid_column_name)
#lat = self.to_array(self['lat'])
#lon = self.to_array(self['lon'])
if cover:
sids_cover = self.stare_dissolve()
l = sids_cover.size
i = sids.shape[0]
j = sids.shape[1]
with netCDF4.Dataset(fname, 'w', format="NETCDF4") as rootgrp:
rootgrp.createDimension('i', i)
rootgrp.createDimension('j', j)
sids_netcdf = rootgrp.createVariable(varname='STARE_index',
datatype='u8',
dimensions=('i', 'j'),
chunksizes=[i, j],
shuffle=shuffle,
zlib=zlib)
sids_netcdf.long_name = 'SpatioTemporal Adaptive Resolution Encoding (STARE) index'
sids_netcdf[:, :] = sids
if cover:
rootgrp.createDimension('l', l)
cover_netcdf = rootgrp.createVariable(varname='STARE_cover',
datatype='u8',
dimensions=('l'),
chunksizes=[l],
shuffle=shuffle,
zlib=zlib)
cover_netcdf.long_name = 'SpatioTemporal Adaptive Resolution Encoding (STARE) cover'
cover_netcdf[:] = sids_cover
def _dataframe_set_sids(self, col, inplace=False):
# We create a function here so that we can take conventional DataFrames and convert them to sdfs
if inplace:
raise ValueError(
"Can't do inplace setting when converting from (Geo)DataFrame to STAREDataFrame"
)
sdf = STAREDataFrame(self)
# this will copy so that BlockManager gets copied
return sdf.set_sids(col, inplace=False)
geopandas.GeoDataFrame.set_sids = _dataframe_set_sids
pandas.DataFrame.set_sids = _dataframe_set_sids
|
zjsrose/zjsheng_blog
|
c/alloc.c
|
<filename>c/alloc.c
#include<stdio.h>
#include<stdlib.h>
#include<string.h>
#define MAX_REMIND 50
#define MSG_LEN 60
// result = s1+s2;
char *concat(const char *s1, const char *s2){
char *result = (char *)malloc(strlen(s1) + strlen(s2) + 1);
if(result == NULL){
printf("Error");
exit(EXIT_FAILURE);
}
strcpy(result,s1);
strcat(result,s2);
return result;
}
//
int read_line(char str[], int n){
char ch;
int i = 0;
while((ch = getchar())!= '\n')
if(i < n)
str[i++] = ch;
str[i] = '\0';
return i;
}
int main(){
char *reminders[MAX_REMIND];
char day_str[3], msg_str[MSG_LEN+1];
int day, i, j, num_remind = 0;
for(;;){
if(num_remind == MAX_REMIND){
printf("-- No space left --\n");
break;
}
printf("Enter day and reminder:");
scanf("%2d",&day);
if(day == 0)
break;
sprintf(day_str, "%2d", day);
read_line(msg_str, MSG_LEN);
for(i = 0; i < num_remind; i++)
if(strcmp(day_str,reminders[i]) <0 )
break;
for(j = num_remind; j>i; j--)
reminders[j] = reminders[j-1];
reminders[i] = malloc(2 + strlen(msg_str) + 1);
if(reminders[i] == NULL){
printf("-- NO space left --\n");
break;
}
strcpy(reminders[i], day_str);
strcat(reminders[i], msg_str);
num_remind++;
}
printf("\nDay Reminder\n");
for(i = 0; i < num_remind; i++)
printf("%s\n",reminders[i]);
return 0;
}
|
gustavonalle/protostream
|
core/src/main/java/org/infinispan/protostream/impl/parser/mappers/package-info.java
|
/**
* Converters from the protoparser model elements to protostream descriptors
*/
package org.infinispan.protostream.impl.parser.mappers;
|
edwino-stein/elrond-common
|
old/include/module/BaseInputModule.hpp
|
#if !defined _ELROND_BASE_INPUT_MODULE_HPP
#define _ELROND_BASE_INPUT_MODULE_HPP
#include "module/BaseModule.hpp"
namespace elrond {
namespace module {
class BaseInputModule : public elrond::module::BaseModule {
public:
#ifdef ELROND_WITH_DESTRUCTORS
virtual ~BaseInputModule();
#endif
virtual void addInputListener(const elrond::sizeT key,
elrond::input::InputListener* listener)=0;
virtual elrond::ModuleType getType() const override;
};
}
}
#endif
|
BrotherSharper/hey-wait
|
src/module/MacroOperations.js
|
<filename>src/module/MacroOperations.js
export default class MacroOperations {
/**
* MacroOperations constructor.
*
* @param {User} user
* The injected User dependency. The current user.
* @param {BackgroundLayer} backgroundLayer
* The injected BackgroundLayer dependency.
* @param {Map} macros
* The injected game macros map dependency.
* @param {Notifications} notifications
* The injected Notifications dependency.
*/
constructor(user, backgroundLayer, macros, notifications) {
this.user = user;
this.backgroundLayer = backgroundLayer;
this.macros = macros;
this.notifications = notifications;
}
/**
* Handle the tile triggering macro execution, if one is set.
*
* Can only be executed as the GM.
*
* @param {string} tileId
* The relevant tile ID.
* @param {TokenDocument} tokenDoc
* The relevant TokenDocument that is the protagonist.
*/
handleTileMacroFiring(tileId, tokenDoc) {
if (!this.user.isGM) {
return;
}
let tile;
for (const bgTile of this.backgroundLayer.tiles) {
if (bgTile.id === tileId) {
tile = bgTile;
break;
}
}
if (!tile) {
return;
}
const macroId = tile.data?.flags?.['hey-wait']?.macro;
if (!macroId || macroId === '0') {
return;
}
const macro = this.macros.get(macroId);
if (!macro) {
this.notifications.error(
'The macro triggered by the Hey, Wait! tile no longer exists.',
);
return;
}
macro.execute({
actor: tokenDoc.getActor(),
token: tokenDoc.object,
});
}
}
|
mguidon/aiohttp-dsm
|
datcore-sdk/python/datcore_sdk/api/tabular_api.py
|
<reponame>mguidon/aiohttp-dsm
# coding: utf-8
"""
Blackfynn Swagger
Swagger documentation for the Blackfynn api # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from datcore_sdk.api_client import ApiClient
class TabularApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def column_stats(self, package_id, column, **kwargs): # noqa: E501
"""gets the statistics for a numeric column # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.column_stats(package_id, column, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param str column: the name of the table column for which we will compute statistics (required)
:return: ColumnStats
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.column_stats_with_http_info(package_id, column, **kwargs) # noqa: E501
else:
(data) = self.column_stats_with_http_info(package_id, column, **kwargs) # noqa: E501
return data
def column_stats_with_http_info(self, package_id, column, **kwargs): # noqa: E501
"""gets the statistics for a numeric column # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.column_stats_with_http_info(package_id, column, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param str column: the name of the table column for which we will compute statistics (required)
:return: ColumnStats
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_id', 'column'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method column_stats" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_id' is set
if ('package_id' not in local_var_params or
local_var_params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `column_stats`") # noqa: E501
# verify the required parameter 'column' is set
if ('column' not in local_var_params or
local_var_params['column'] is None):
raise ValueError("Missing the required parameter `column` when calling `column_stats`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in local_var_params:
path_params['packageId'] = local_var_params['package_id'] # noqa: E501
if 'column' in local_var_params:
path_params['column'] = local_var_params['column'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tabular/{packageId}/{column}/stats', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ColumnStats', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def count(self, package_id, **kwargs): # noqa: E501
"""gets the number of rows in a table # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.count(package_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:return: CountResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.count_with_http_info(package_id, **kwargs) # noqa: E501
else:
(data) = self.count_with_http_info(package_id, **kwargs) # noqa: E501
return data
def count_with_http_info(self, package_id, **kwargs): # noqa: E501
"""gets the number of rows in a table # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.count_with_http_info(package_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:return: CountResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method count" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_id' is set
if ('package_id' not in local_var_params or
local_var_params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `count`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in local_var_params:
path_params['packageId'] = local_var_params['package_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tabular/{packageId}/count', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CountResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def distinct_count(self, package_id, column, **kwargs): # noqa: E501
"""gets the number of distinct values in a column # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.distinct_count(package_id, column, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param str column: the name of the table column for which we will compute the distinct values (required)
:return: DistinctResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.distinct_count_with_http_info(package_id, column, **kwargs) # noqa: E501
else:
(data) = self.distinct_count_with_http_info(package_id, column, **kwargs) # noqa: E501
return data
def distinct_count_with_http_info(self, package_id, column, **kwargs): # noqa: E501
"""gets the number of distinct values in a column # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.distinct_count_with_http_info(package_id, column, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param str column: the name of the table column for which we will compute the distinct values (required)
:return: DistinctResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_id', 'column'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method distinct_count" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_id' is set
if ('package_id' not in local_var_params or
local_var_params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `distinct_count`") # noqa: E501
# verify the required parameter 'column' is set
if ('column' not in local_var_params or
local_var_params['column'] is None):
raise ValueError("Missing the required parameter `column` when calling `distinct_count`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in local_var_params:
path_params['packageId'] = local_var_params['package_id'] # noqa: E501
if 'column' in local_var_params:
path_params['column'] = local_var_params['column'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tabular/{packageId}/{column}/distinct/count', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DistinctResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def distinct_values(self, package_id, column, limit, **kwargs): # noqa: E501
"""gets the distinct values and their counts for a column # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.distinct_values(package_id, column, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param str column: the name of the table column for which we will compute the distinct values (required)
:param str limit: the number of distinct values to include (required)
:return: DistinctValues
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.distinct_values_with_http_info(package_id, column, limit, **kwargs) # noqa: E501
else:
(data) = self.distinct_values_with_http_info(package_id, column, limit, **kwargs) # noqa: E501
return data
def distinct_values_with_http_info(self, package_id, column, limit, **kwargs): # noqa: E501
"""gets the distinct values and their counts for a column # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.distinct_values_with_http_info(package_id, column, limit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param str column: the name of the table column for which we will compute the distinct values (required)
:param str limit: the number of distinct values to include (required)
:return: DistinctValues
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_id', 'column', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method distinct_values" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_id' is set
if ('package_id' not in local_var_params or
local_var_params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `distinct_values`") # noqa: E501
# verify the required parameter 'column' is set
if ('column' not in local_var_params or
local_var_params['column'] is None):
raise ValueError("Missing the required parameter `column` when calling `distinct_values`") # noqa: E501
# verify the required parameter 'limit' is set
if ('limit' not in local_var_params or
local_var_params['limit'] is None):
raise ValueError("Missing the required parameter `limit` when calling `distinct_values`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in local_var_params:
path_params['packageId'] = local_var_params['package_id'] # noqa: E501
if 'column' in local_var_params:
path_params['column'] = local_var_params['column'] # noqa: E501
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tabular/{packageId}/{column}/distinct/values', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DistinctValues', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_table_schema(self, package_id, **kwargs): # noqa: E501
"""gets the schema for a tabular data package # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_table_schema(package_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:return: GraphNode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_table_schema_with_http_info(package_id, **kwargs) # noqa: E501
else:
(data) = self.get_table_schema_with_http_info(package_id, **kwargs) # noqa: E501
return data
def get_table_schema_with_http_info(self, package_id, **kwargs): # noqa: E501
"""gets the schema for a tabular data package # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_table_schema_with_http_info(package_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:return: GraphNode
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_table_schema" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_id' is set
if ('package_id' not in local_var_params or
local_var_params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `get_table_schema`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in local_var_params:
path_params['packageId'] = local_var_params['package_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tabular/{packageId}/schema', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GraphNode', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tabular_data(self, package_id, **kwargs): # noqa: E501
"""gets the tabular data for a Tabular package # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tabular_data(package_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param int limit: limits the number of rows to return (default=1000, max=10000)
:param int offset: starting row to get data onwards from (default=0)
:param str order_by: column to order the results by
:param str order_direction: direction to order by (from values {ASC,DESC}) (default=ASC)
:return: Table
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tabular_data_with_http_info(package_id, **kwargs) # noqa: E501
else:
(data) = self.get_tabular_data_with_http_info(package_id, **kwargs) # noqa: E501
return data
def get_tabular_data_with_http_info(self, package_id, **kwargs): # noqa: E501
"""gets the tabular data for a Tabular package # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tabular_data_with_http_info(package_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param int limit: limits the number of rows to return (default=1000, max=10000)
:param int offset: starting row to get data onwards from (default=0)
:param str order_by: column to order the results by
:param str order_direction: direction to order by (from values {ASC,DESC}) (default=ASC)
:return: Table
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_id', 'limit', 'offset', 'order_by', 'order_direction'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tabular_data" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_id' is set
if ('package_id' not in local_var_params or
local_var_params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `get_tabular_data`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in local_var_params:
path_params['packageId'] = local_var_params['package_id'] # noqa: E501
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'order_by' in local_var_params:
query_params.append(('orderBy', local_var_params['order_by'])) # noqa: E501
if 'order_direction' in local_var_params:
query_params.append(('orderDirection', local_var_params['order_direction'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tabular/{packageId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Table', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def insert_tabular_data(self, package_id, insert_tabular_data_request, **kwargs): # noqa: E501
"""inserts multiple rows of data into the contents of a tabular package # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insert_tabular_data(package_id, insert_tabular_data_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param InsertTabularDataRequest insert_tabular_data_request: rows of tabular data to be inserted (required)
:return: Table
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.insert_tabular_data_with_http_info(package_id, insert_tabular_data_request, **kwargs) # noqa: E501
else:
(data) = self.insert_tabular_data_with_http_info(package_id, insert_tabular_data_request, **kwargs) # noqa: E501
return data
def insert_tabular_data_with_http_info(self, package_id, insert_tabular_data_request, **kwargs): # noqa: E501
"""inserts multiple rows of data into the contents of a tabular package # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.insert_tabular_data_with_http_info(package_id, insert_tabular_data_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param InsertTabularDataRequest insert_tabular_data_request: rows of tabular data to be inserted (required)
:return: Table
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_id', 'insert_tabular_data_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method insert_tabular_data" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_id' is set
if ('package_id' not in local_var_params or
local_var_params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `insert_tabular_data`") # noqa: E501
# verify the required parameter 'insert_tabular_data_request' is set
if ('insert_tabular_data_request' not in local_var_params or
local_var_params['insert_tabular_data_request'] is None):
raise ValueError("Missing the required parameter `insert_tabular_data_request` when calling `insert_tabular_data`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in local_var_params:
path_params['packageId'] = local_var_params['package_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'insert_tabular_data_request' in local_var_params:
body_params = local_var_params['insert_tabular_data_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tabular/{packageId}/insert', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Table', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def store_schema(self, package_id, add_schema_request, **kwargs): # noqa: E501
"""stores a table schema for a tabular data package # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.store_schema(package_id, add_schema_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param AddSchemaRequest add_schema_request: the list of columns names, their types, and other properties that describe the table's schema (required)
:return: TableSchema
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.store_schema_with_http_info(package_id, add_schema_request, **kwargs) # noqa: E501
else:
(data) = self.store_schema_with_http_info(package_id, add_schema_request, **kwargs) # noqa: E501
return data
def store_schema_with_http_info(self, package_id, add_schema_request, **kwargs): # noqa: E501
"""stores a table schema for a tabular data package # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.store_schema_with_http_info(package_id, add_schema_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_id: package id (required)
:param AddSchemaRequest add_schema_request: the list of columns names, their types, and other properties that describe the table's schema (required)
:return: TableSchema
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_id', 'add_schema_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method store_schema" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_id' is set
if ('package_id' not in local_var_params or
local_var_params['package_id'] is None):
raise ValueError("Missing the required parameter `package_id` when calling `store_schema`") # noqa: E501
# verify the required parameter 'add_schema_request' is set
if ('add_schema_request' not in local_var_params or
local_var_params['add_schema_request'] is None):
raise ValueError("Missing the required parameter `add_schema_request` when calling `store_schema`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_id' in local_var_params:
path_params['packageId'] = local_var_params['package_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'add_schema_request' in local_var_params:
body_params = local_var_params['add_schema_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tabular/{packageId}/schema', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TableSchema', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
|
henry4k/apoapsis
|
engine/tests/TestTools.h
|
<reponame>henry4k/apoapsis
#ifndef __KONSTRUKT_TEST_TOOLS__
#define __KONSTRUKT_TEST_TOOLS__
#include <dummy/core.h>
#include <dummy/signal_sandbox.h>
#include <dummy/require.h>
#include <dummy/inline.hpp>
#define Require dummyRequire
#define AddTest(Name, Function) dummyAddTest(Name, dummySignalSandbox, Function)
#define InlineTest(Name) DUMMY_INLINE_TEST(Name, dummySignalSandbox)
void InitTests( int argc, char const * const * argv );
void InitTestVfs( const char* argv0 );
void InitTestJobManager();
int RunTests();
#endif
|
SURGroup/UncertaintyQuantification
|
src/UQpy/surrogates/gaussian_process/regression_models/__init__.py
|
<reponame>SURGroup/UncertaintyQuantification<gh_stars>0
from UQpy.surrogates.gaussian_process.regression_models.baseclass import *
from UQpy.surrogates.gaussian_process.regression_models.ConstantRegression import ConstantRegression
from UQpy.surrogates.gaussian_process.regression_models.LinearRegression import LinearRegression
from UQpy.surrogates.gaussian_process.regression_models.QuadraticRegression import QuadraticRegression
|
pratik963/civic311_test3
|
app/models/agent.rb
|
class Agent < ActiveRecord::Base
self.table_name = "agents"
self.primary_key = 'id'
#DEFAULT_RADIUS = APP_CONFIG["default_radius"]
#MAX_RADIUS = APP_CONFIG["max_radius"]
#MAX_AGENTS_TOTAL = APP_CONFIG["max_agents_total"]
#PENDING = "pending"
#APPROVED = "approved"
#REJECTED = "reject"
#UNKNOWN = "unknown"
#CHANGE_COMPANY_NAME = "change_company_name"
#CHANGE_PROFILE = "change_profile"
#geocoded_by :address
#after_validation :geocode
validates :phone_number, uniqueness: true
validates :sent_rating, numericality: true,allow_nil: true
has_many :acceptances, :inverse_of => :agent
has_many :agent_languages, :inverse_of => :agent
has_many :agent_specialties, :inverse_of => :agent
has_many :agent_designations, :inverse_of => :agent
has_many :agent_zip_codes, :inverse_of => :agent
has_many :licenses, :inverse_of => :agent
has_one :rating_request, :inverse_of => :agent
has_many :payment_transactions, :inverse_of => :agent
has_one :ratings, :inverse_of => :agent
# has_many :connections, :dependent => :delete_all
belongs_to :user, :inverse_of => :agent
belongs_to :state, :class_name => "State", :foreign_key => "license_state_issued"
#has_attached_file :avatar, :styles => { :medium => "300x300>", :thumb => "150x200^" }, :default_url => ""
#validates_attachment_content_type :avatar, :content_type => /\Aimage\/.*\Z/
#rails_admin do
#configure :connections do
#visible(false)
#end
#configure :ratings do
#visible(false)
#end
#configure :rating_requests do
#visible(false)
#end
#configure :helpings do
#visible(false)
#end
#end
def name
if first_name.present? && last_name.present?
"#{first_name} #{last_name}"
end
end
def as_json(request)
avatar = ""
avatar = request.protocol + request.host_with_port + self.avatar.url if self.avatar.present?
{
email: self.user.email,
first_name: self.first_name,
last_name: self.last_name,
company_name: self.company_name,
phone_number: self.phone_number,
about: self.about,
license_number: self.license_number,
license_state_issued: self.license_state_issued,
license_issued_date: self.license_issued_date,
license_url: self.try(:state).try(:main_page_url),
status: self.status,
website_url: self.website_url,
listing_url: self.listing_url,
video_url: self.video_url,
avatar: avatar,
edit_profile_state: self.edit_profile_state,
edit_company_name_state: self.edit_company_name_state
}
end
def as_json2(request)
avatar = ""
avatar = request.protocol + request.host_with_port + self.avatar.url if self.avatar.present?
agent_change_info = self.user.agent_change_info.where(action: CHANGE_PROFILE, status: [PENDING, APPROVED]).last
agent_change_company_name = self.user.agent_change_info.where(action: CHANGE_COMPANY_NAME , status: [PENDING, APPROVED]).last
license = self.licenses.where(status: ["Pending", "Actived"]).last
{
email: self.user.email,
first_name: agent_change_info ? agent_change_info.first_name_new : self.first_name,
last_name: agent_change_info ? agent_change_info.last_name_new : self.last_name,
company_name: agent_change_company_name ? agent_change_company_name.company_name_new : self.company_name,
phone_number: self.phone_number,
about: self.about,
license_number: license ? license.license_number : self.license_number,
license_state_issued: license ? license.license_state_issued : self.license_state_issued,
license_issued_date: license ? license.license_issued_date : self.license_issued_date,
license_url: self.try(:state).try(:main_page_url),
status: self.status,
website_url: self.website_url,
listing_url: self.listing_url,
video_url: self.video_url,
avatar: avatar,
edit_profile_state: self.edit_profile_state,
edit_company_name_state: self.edit_company_name_state
}
end
def get_rating_information
ratings = Rating.where(agent_id: self.id)
numCustomerRates = ratings.distinct.count(:customer_id)
avgAll = ratings.average('rate').to_f
avgQuestion = ratings.group('rating_question_id').average('rate').map{|question, rate| {question: question, rate: rate}}
ratings = ratings.joins(:rating_question).group('category')
avgCategory = ratings.average('rate').map{|category, rate| {category: category, rate: rate}}
numRateCategory = ratings.distinct.count("customer_id").map{|category, num| {category: category, numCustomerRates: num}}
avgCategory_temp = ratings.average('rate').map{|category, rate| {category: category, numCustomerRates: rate}}
{numCustomerRates: numCustomerRates, avgAllRates: avgAll, numRateCategory: numRateCategory,avgCategory_temp: avgCategory_temp, avgCategory: avgCategory, avgQuestion: avgQuestion}
end
def get_agent_info_history
agent_change_info = AgentChangeInfo.where(user_id: self.user_id,status: APPROVED,action: CHANGE_COMPANY_NAME).last
if agent_change_info.blank? || agent_change_info.updated_at < Time.now - 1.months
self.update_attributes({:edit_company_name_state => PENDING})
return agent_change_info
end
return agent_change_info
end
def save_company_name(company_name_new)
agent_change_info = AgentChangeInfo.find_by(user_id: self.user_id,status: PENDING,action: CHANGE_COMPANY_NAME)
if agent_change_info.blank?
agent_change_info = AgentChangeInfo.new( user_id: self.user_id,company_name_old: self.company_name,
company_name_new: company_name_new, status: PENDING,
action: CHANGE_COMPANY_NAME)
first_times = true
else
first_times = false
gt_month = agent_change_info.updated_at.utc < Time.now.utc - 1.minute
agent_change_info.company_name_new = company_name_new
end
if agent_change_info.save
begin
if first_times
self.update!(edit_company_name_state: UNKNOWN)
message = I18n.t 'saveCompanyName_success_gt_30'
else
if gt_month
self.update!(edit_company_name_state: UNKNOWN)
message = I18n.t 'saveCompanyName_success_gt_30'
else
self.update!(edit_company_name_state: PENDING)
message = I18n.t 'saveCompanyName_success_ltq_30'
end
end
return true, message
rescue Exception => e
puts "============errors"
p e
return false, ""
end
end
puts "=========error"
puts agent_change_info.errors.inspect
return false
end
def save_fullname(first_name_new, last_name_new)
agent_change_info = AgentChangeInfo.find_by(user_id: self.user_id,status: PENDING,action: CHANGE_PROFILE)
if agent_change_info.blank?
agent_change_info = AgentChangeInfo.new(first_name_new: first_name_new, first_name_old: self.first_name, last_name_new: last_name_new,
last_name_old: self.last_name, user_id: self.user_id,
status: PENDING,action: CHANGE_PROFILE)
else
agent_change_info.first_name_new = first_name_new
agent_change_info.last_name_new = last_name_new
end
if agent_change_info.save
begin
self.update!(edit_profile_state: PENDING)
return true
rescue Exception => e
p e
return false
end
end
return false
end
def push_notification_online
customer_location_alerts = CustomerLocationAlert.where("time_expired > (?) and is_alert IS TRUE",Time.now).within(DEFAULT_RADIUS,:units => :miles, origin: [self.latitude, self.longitude])
user_ids = customer_location_alerts.select("user_id")
customer_devices = CustomerDevice.where(user_id: user_ids)
unless customer_location_alerts.blank? || customer_devices.blank?
sns = Aws::SNS::Client.new
p "======================================"
p customer_devices
customer_devices.each do |customer_device|
customer_device.push_notification_agent_online(self)
end
end
end
def push_notification_apply_change(message,change_info)
customer_devices = self.user.customer_devices
unless customer_devices.blank?
customer_devices.each do |customer_device|
customer_device.push_notification_change_first_last_name(message,change_info)
end
end
end
def push_notification_change_license(status)
customer_devices = self.user.customer_devices
unless customer_devices.blank?
customer_devices.each do |customer_device|
customer_device.push_notification_change_license(status,self)
end
end
end
def push_notification_apply_change_company(message,change_info)
customer_devices = self.user.customer_devices
unless customer_devices.blank?
customer_devices.each do |customer_device|
customer_device.push_notification_change_company_name(message,change_info)
end
end
end
def push_notification_to_agent_background(type,customer,sharing)
customer_devices = self.user.customer_devices
unless customer_devices.blank?
customer_devices.each do |customer_device|
customer_device.push_notification_background(type,self,customer,sharing)
end
end
end
def self.get_total_agent_from_current_location(latitude, longtitude)
radius = DEFAULT_RADIUS
total_agents = Agent.joins(:user).where("users.is_online = true").near([latitude, longtitude], radius, :units => :mi)
#total_agents = Agent.joins(:user).near([latitude, longtitude], radius, :units => :mi)
while total_agents.size.to_i <= MAX_AGENTS_TOTAL && radius < MAX_RADIUS
p total_agents.size.to_i
radius += DEFAULT_RADIUS
total_agents = Agent.joins(:user).where("users.is_online = true").near([latitude, longtitude], radius, :units => :mi)
#total_agents = Agent.joins(:user).near([latitude, longtitude], radius, :units => :mi)
end
total_agents.size.to_s
end
def is_holdon?
self.edit_profile_state == PENDING or self.edit_company_name_state == PENDING
end
end
|
winbyhuang/demo
|
kafka/src/main/java/com/example/demo/kafka/KafakaTest.java
|
<gh_stars>1-10
package com.example.demo.kafka;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
/**
* Created by Administrator on 2016/10/13.
*/
@RequestMapping("kafka")
@RestController
public class KafakaTest {
// @Autowired
// private KafkaProducerConfig kafkaProducerConfig;
@Autowired
private MsgProducer msgProducer;
@RequestMapping(value = "a", method = RequestMethod.GET)
public Object send(HttpSession session, HttpServletRequest request) {
try {
int i = 0;
Long t1 = System.currentTimeMillis();
// for (int j = 0; j < 1; j++) {
//// kafkaProducerConfig.kafkaTemplate().send("aac_stats","msg"+i);
// i++;
// }
msgProducer.sendMessage("topic1", "topic--------1");
msgProducer.sendMessage("topic2", "topic--------2");
msgProducer.sendMessage("topic3", "topic--------3");
System.out.print("耗时:");
System.out.println(System.currentTimeMillis() - t1);
System.out.println("=============================");
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}
|
christopher-henderson/PyStream
|
tests/_async/test_take.py
|
import unittest
from pstream import AsyncStream
from tests._async.utils import Driver, Method
class Take(Method):
def __init__(self, args):
super(Take, self).__init__(AsyncStream.take, args)
class TestTake(unittest.TestCase):
@Driver(initial=range(10), method=Take(args=[5]), want=[0, 1, 2, 3, 4])
def test__a(self, got=None, want=None, exception=None):
if exception is not None:
raise exception
self.assertEqual(got, want)
@Driver(initial=range(10), method=Take(args=[5]), want=[0, 1, 2, 3, 4])
def test__s(self, got=None, want=None, exception=None):
if exception is not None:
raise exception
self.assertEqual(got, want)
###############################
@Driver(initial=range(10), method=Take(args=[10]), want=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
def test1__a(self, got=None, want=None, exception=None):
if exception is not None:
raise exception
self.assertEqual(got, want)
@Driver(initial=range(10), method=Take(args=[10]), want=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
def test1__s(self, got=None, want=None, exception=None):
if exception is not None:
raise exception
self.assertEqual(got, want)
###############################
@Driver(initial=[], method=Take(args=[1]), want=[])
def test2__a(self, got=None, want=None, exception=None):
if exception is not None:
raise exception
self.assertEqual(got, want)
@Driver(initial=[], method=Take(args=[1]), want=[])
def test2__s(self, got=None, want=None, exception=None):
if exception is not None:
raise exception
self.assertEqual(got, want)
###############################
@Driver(initial=range(10), method=Take(args=[0]), want=[])
def test3__a(self, got=None, want=None, exception=None):
if exception is not None:
raise exception
self.assertEqual(got, want)
@Driver(initial=range(10), method=Take(args=[0]), want=[])
def test3__s(self, got=None, want=None, exception=None):
if exception is not None:
raise exception
self.assertEqual(got, want)
if __name__ == '__main__':
unittest.main()
|
npofsi/RMPEScript
|
src/main/java/pro/npofsi/rmpescript/runtime/JSLoader.java
|
package pro.npofsi.rmpescript.runtime;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject;
import pro.npofsi.rmpescript.RMPEScript;
import pro.npofsi.rmpescript.broadcast.ForgeEventHandler;
import pro.npofsi.rmpescript.runtime.utils.JavaXJS;
public class JSLoader extends Loader{
private String script,srcName;
private Context cx=null;
// private Object ScriptManager= pro.npofsi.rmpescript.runtime.ScriptManager.getInstance();
public JSLoader(String srcName,String script){
this.srcName=srcName;
this.script=script;
this.setName(srcName);
}
@Override
public void run(){
synchronized(ScriptManager.getInstance()){
cx = Context.enter();
try {
Scriptable scope = cx.initStandardObjects();
Object result = cx.evaluateString(scope, script, srcName+RMPEScript.randomTag(), 1, null);
RMPEScript.Log.i("Script end: "+cx.toString(result));
}catch (Exception e) {
RMPEScript.Log.s(e,"RunTime");
}finally {
}
}
}
public void eval(String code){
synchronized(ScriptManager.getInstance()){
if(cx != null)try {
Scriptable scope = cx.initStandardObjects();
Object result = cx.evaluateString(scope, code, srcName+RMPEScript.randomTag(), 1, null);
RMPEScript.Log.i("Script eval end: "+cx.toString(result));
}catch (Exception e) {
RMPEScript.Log.s(e,"EvalRunTime");
}finally {
}
}
}
public void remove(){
synchronized(ScriptManager.getInstance()){
try {
if(cx!= null)cx.exit();
this.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
|
ivospijkerman/tmc-intellij
|
tmc-intellij/tmc-plugin-intellij/src/main/java/fi/helsinki/cs/tmc/intellij/importexercise/NewProjectUtilModified.java
|
<filename>tmc-intellij/tmc-plugin-intellij/src/main/java/fi/helsinki/cs/tmc/intellij/importexercise/NewProjectUtilModified.java
/*
* @author max
*/
package fi.helsinki.cs.tmc.intellij.importexercise;
import static com.intellij.ide.impl.NewProjectUtil.applyJdkToProject;
import com.intellij.ide.impl.ProjectUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.impl.ProjectManagerImpl;
import com.intellij.openapi.projectRoots.JavaSdk;
import com.intellij.openapi.projectRoots.ProjectJdkTable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.CompilerProjectExtension;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
/*
* this code is modified form intellij code. url below
* original class: NewProjectUtil
* https://github.com/JetBrains/intellij-community/blob/9132043620dd78fbff9e77e78d00efb8972613bc/java/idea-ui/src/com/intellij/ide/impl/NewProjectUtil.java
*/
public class NewProjectUtilModified {
private static final Logger logger = LoggerFactory.getLogger(NewProjectUtilModified.class);
/*
* Handles importing exercises to intellij only using file root as source of info.
* In original execution method is named "doImport(param.)"
* @param path project root dir
* @throws IOException
*/
public static void importExercise(String path) {
logger.info("Started importing exercise.");
final Project newProject;
try {
logger.info("Creates .idea");
File projectDir = new File(path).getParentFile();
if (projectDir == null) {
logger.warn("Cannot create project in '" + path + "': no parent file exists");
}
FileUtil.ensureExists(projectDir);
final File ideaDir = new File(path, Project.DIRECTORY_STORE_FOLDER);
FileUtil.ensureExists(ideaDir);
newProject =
ProjectManagerImpl.getInstanceEx()
.newProject(path, path, true, false);
logger.info("Setting JDK");
final Sdk jdk =
ProjectJdkTable.getInstance().findMostRecentSdkOfType(JavaSdk.getInstance());
if (jdk != null) {
CommandProcessor.getInstance()
.executeCommand(
newProject,
() ->
ApplicationManager.getApplication()
.runWriteAction(
() -> applyJdkToProject(newProject, jdk)),
null,
null);
}
logger.info("Sets compile output path");
final String compileOutput =
StringUtil.endsWithChar(path, '/') ? path + "out" : path + "/out";
CommandProcessor.getInstance()
.executeCommand(
newProject,
() ->
ApplicationManager.getApplication()
.runWriteAction(
() -> {
String canonicalPath = compileOutput;
try {
canonicalPath =
FileUtil
.resolveShortWindowsName(
compileOutput);
} catch (IOException e) {
//file doesn't exist
logger.warn("File doesn't exist.", e);
}
canonicalPath =
FileUtil.toSystemIndependentName(
canonicalPath);
CompilerProjectExtension.getInstance(
newProject)
.setCompilerOutputUrl(
VfsUtilCore.pathToUrl(
canonicalPath));
}),
null,
null);
logger.info("Saving project created this far");
// Saving changes seems to write things up.
if (!ApplicationManager.getApplication().isUnitTestMode()) {
newProject.save();
}
ProjectFromSourcesBuilderImplModified.commit(newProject, path);
logger.info("saving project after builder commit");
// without save method nothing happens
ProjectUtil.updateLastProjectLocation(path);
newProject.save();
if (!ApplicationManager.getApplication().isUnitTestMode()) {
newProject.save();
}
} catch (Exception e) {
logger.warn(e.getMessage());
}
logger.info("Exercise import progress is finished.");
}
}
|
mirofedurco/PyAstronomy
|
src/pyasl/asl/localtime.py
|
# -*- coding: utf-8 -*-
import numpy as np
from PyAstronomy.pyaC import pyaErrors as PE
def localTime(utc, lon, diff=True):
"""
Computes the Local Time for a given UTC at a given geographic longitude.
The local time is computed as UTC + LONGITUDE/15.
Parameters
----------
utc : float or array
The time in UTC in hours.
lon : float or array
The geographic (East) longitude in DEGREES for which
local time should be calculated.
diff : boolean, optional
If True (default), returns the difference in HOURS between
UTC and local time.
Returns
-------
Time : float or array
Local time in HOURS (0 - 24) for given geographic
longitude and UTC.
Time difference : float or array
The difference between local and UTC time in
hours (only returned if `diff` is True)
"""
utc = np.array(utc, ndmin=1)
lon = np.array(lon, ndmin=1)
if lon.size != utc.size:
raise(PE.PyAValError("You need to specify the same number of longitudes and times", \
solution="Make `lon` and `utc` arrays have the same length.", \
where="localTime"))
indi = np.where(np.logical_or(lon<0.0, lon>360.))[0]
if len(indi) > 0:
raise(PE.PyAValError("Longitude needs to be in the range 0-360.", \
solution="Change the input.", \
where="localTime"))
localtime = utc + lon/15.
localtime = localtime % 24.0
if diff == True:
return localtime, lon/15.
else:
return localtime
|
moon-chilled/Ares
|
ares/ngp/vpu/color.cpp
|
<filename>ares/ngp/vpu/color.cpp
auto VPU::colorNeoGeoPocket(uint32 color) -> uint64 {
uint3 l = color.bit(0,2);
natural L = image::normalize(7 - l, 3, 16);
return L << 32 | L << 16 | L << 0;
}
auto VPU::colorNeoGeoPocketColor(uint32 color) -> uint64 {
uint r = color.bit(0, 3);
uint g = color.bit(4, 7);
uint b = color.bit(8,11);
natural R = image::normalize(r, 4, 16);
natural G = image::normalize(g, 4, 16);
natural B = image::normalize(b, 4, 16);
return R << 32 | G << 16 | B << 0;
}
|
francis-pouatcha/forgelab
|
adpharma/adpharma.modules/adpharma.client.backoffice/src/main/java/org/adorsys/adpharma/client/jpa/procurementordertype/ProcurementOrderType.java
|
package org.adorsys.adpharma.client.jpa.procurementordertype;
import org.adorsys.javaext.description.Description;
@Description("ProcurementOrderType_description")
public enum ProcurementOrderType
{
@Description("ProcurementOrderType_ORDINARY_description")
ORDINARY, @Description("ProcurementOrderType_PACKAGED_description")
PACKAGED, @Description("ProcurementOrderType_SPECIAL_description")
SPECIAL
}
|
augustye/muniverse
|
games/injections/StickAvalanche-v0.js
|
<filename>games/injections/StickAvalanche-v0.js
(function () {
let gameOver = false;
window.muniverse = {
init: function () {
window.faketime.pause();
document.getElementById('score').style.display = 'none';
const gameElem = document.getElementById('game');
gameElem.style.left = '0';
gameElem.style.top = '0';
document.body.style.margin = '0';
document.body.style.padding = '0';
game.gameOver = () => gameOver = true;
game.newGame();
},
step: function (millis) {
window.faketime.advance(millis);
return Promise.resolve(gameOver);
},
score: function () {
return Promise.resolve(game.score);
}
};
})();
|
artback/networkGamingTest
|
internal/jsonwriter/jsonwriter.go
|
<reponame>artback/networkGamingTest<gh_stars>0
package jsonwriter
import "time"
type JsonWriter interface {
WriteJSON(v interface{}) error
WriteControl(messageType int, data []byte, deadline time.Time) error
Close() error
}
|
xiabing082/StudyProjectS
|
app/src/main/java/com/hotbitmapgg/studyproject/hcc/adapter/GitHubFollowInfoAdapter.java
|
<gh_stars>0
package com.hotbitmapgg.studyproject.hcc.adapter;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.hotbitmapgg.studyproject.R;
import com.hotbitmapgg.studyproject.hcc.model.GitHubFollowerInfo;
import com.hotbitmapgg.studyproject.hcc.widget.recyclehelper.AbsRecyclerViewAdapter;
import com.hotbitmapgg.studyproject.hcc.widget.CircleImageView;
import java.util.ArrayList;
import java.util.List;
public class GitHubFollowInfoAdapter extends AbsRecyclerViewAdapter
{
private List<GitHubFollowerInfo> followerInfos = new ArrayList<>();
public GitHubFollowInfoAdapter(RecyclerView recyclerView, List<GitHubFollowerInfo> followerInfos)
{
super(recyclerView);
this.followerInfos = followerInfos;
}
@Override
public ClickableViewHolder onCreateViewHolder(ViewGroup parent, int viewType)
{
bindContext(parent.getContext());
return new ItemViewHolder(LayoutInflater.from(getContext()).inflate(R.layout.item_github_follow, parent, false));
}
@Override
public void onBindViewHolder(ClickableViewHolder holder, int position)
{
if (holder instanceof ItemViewHolder)
{
ItemViewHolder itemViewHolder = (ItemViewHolder) holder;
GitHubFollowerInfo gitHubFollowerInfo = followerInfos.get(position);
Glide.with(getContext())
.load(gitHubFollowerInfo.avatarUrl)
.dontAnimate()
.placeholder(R.drawable.ic_slide_menu_avatar_no_login)
.into(itemViewHolder.mFollowAvatar);
itemViewHolder.mFollowName.setText(gitHubFollowerInfo.login);
}
super.onBindViewHolder(holder, position);
}
@Override
public int getItemCount()
{
return followerInfos.size();
}
public class ItemViewHolder extends AbsRecyclerViewAdapter.ClickableViewHolder
{
public CircleImageView mFollowAvatar;
public TextView mFollowName;
public ItemViewHolder(View itemView)
{
super(itemView);
mFollowAvatar = $(R.id.item_follow_avatar);
mFollowName = $(R.id.item_follow_name);
}
}
}
|
rohithsharmaks/cloudbreak
|
redbeams/src/main/java/com/sequenceiq/redbeams/flow/redbeams/provision/handler/AllocateDatabaseHandler.java
|
package com.sequenceiq.redbeams.flow.redbeams.provision.handler;
import com.sequenceiq.cloudbreak.common.event.Selectable;
import com.sequenceiq.flow.event.EventSelectorUtil;
import com.sequenceiq.flow.reactor.api.handler.EventHandler;
import com.sequenceiq.redbeams.flow.redbeams.provision.event.allocate.AllocateDatabaseServerRequest;
import com.sequenceiq.redbeams.flow.redbeams.provision.event.allocate.AllocateDatabaseServerSuccess;
import com.sequenceiq.redbeams.flow.stack.RedbeamsEvent;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import reactor.bus.Event;
import reactor.bus.EventBus;
@Component
public class AllocateDatabaseHandler implements EventHandler<AllocateDatabaseServerRequest> {
private static final Logger LOGGER = LoggerFactory.getLogger(AllocateDatabaseHandler.class);
@Inject
private EventBus eventBus;
@Override
public String selector() {
return EventSelectorUtil.selector(AllocateDatabaseServerRequest.class);
}
@Override
public void accept(Event<AllocateDatabaseServerRequest> event) {
RedbeamsEvent request = event.getData();
Selectable response = new AllocateDatabaseServerSuccess(request.getResourceId());
// TODO: Actually allocate databases
eventBus.notify(response.selector(), new Event<>(event.getHeaders(), response));
}
}
|
janreggie/AdventOfCode
|
aoc2019/day02_test.go
|
<filename>aoc2019/day02_test.go
package aoc2019
import (
"testing"
aoc "github.com/janreggie/aoc/internal"
"github.com/stretchr/testify/assert"
)
func TestDay02(t *testing.T) {
assert := assert.New(t)
testCases := []aoc.TestCase{
{Details: "Y2019D02 my input",
Input: day02myInput,
Result1: "3562672",
Result2: "8250"}, // my puzzle input
}
for _, tt := range testCases {
tt.Test(Day02, assert)
}
}
func BenchmarkDay02(b *testing.B) {
aoc.Benchmark(Day02, b, day02myInput)
}
|
WUSTL-CSPL/RT-TEE
|
linux/drivers/staging/fbtft/fb_watterott.c
|
<filename>linux/drivers/staging/fbtft/fb_watterott.c
// SPDX-License-Identifier: GPL-2.0+
/*
* FB driver for the Watterott LCD Controller
*
* Copyright (C) 2013 <NAME>
*/
#include <linux/module.h>
#include <linux/kernel.h>
#include <linux/init.h>
#include <linux/gpio.h>
#include <linux/delay.h>
#include "fbtft.h"
#define DRVNAME "fb_watterott"
#define WIDTH 320
#define HEIGHT 240
#define FPS 5
#define TXBUFLEN 1024
#define DEFAULT_BRIGHTNESS 50
#define CMD_VERSION 0x01
#define CMD_LCD_LED 0x10
#define CMD_LCD_RESET 0x11
#define CMD_LCD_ORIENTATION 0x20
#define CMD_LCD_DRAWIMAGE 0x27
#define COLOR_RGB323 8
#define COLOR_RGB332 9
#define COLOR_RGB233 10
#define COLOR_RGB565 16
static short mode = 565;
module_param(mode, short, 0000);
MODULE_PARM_DESC(mode, "RGB color transfer mode: 332, 565 (default)");
static void write_reg8_bus8(struct fbtft_par *par, int len, ...)
{
va_list args;
int i, ret;
u8 *buf = par->buf;
va_start(args, len);
for (i = 0; i < len; i++)
*buf++ = (u8)va_arg(args, unsigned int);
va_end(args);
fbtft_par_dbg_hex(DEBUG_WRITE_REGISTER, par,
par->info->device, u8, par->buf,
len, "%s: ", __func__);
ret = par->fbtftops.write(par, par->buf, len);
if (ret < 0) {
dev_err(par->info->device,
"write() failed and returned %d\n", ret);
return;
}
}
static int write_vmem(struct fbtft_par *par, size_t offset, size_t len)
{
unsigned int start_line, end_line;
u16 *vmem16 = (u16 *)(par->info->screen_buffer + offset);
__be16 *pos = par->txbuf.buf + 1;
__be16 *buf16 = par->txbuf.buf + 10;
int i, j;
int ret = 0;
start_line = offset / par->info->fix.line_length;
end_line = start_line + (len / par->info->fix.line_length) - 1;
/* Set command header. pos: x, y, w, h */
((u8 *)par->txbuf.buf)[0] = CMD_LCD_DRAWIMAGE;
pos[0] = 0;
pos[2] = cpu_to_be16(par->info->var.xres);
pos[3] = cpu_to_be16(1);
((u8 *)par->txbuf.buf)[9] = COLOR_RGB565;
for (i = start_line; i <= end_line; i++) {
pos[1] = cpu_to_be16(i);
for (j = 0; j < par->info->var.xres; j++)
buf16[j] = cpu_to_be16(*vmem16++);
ret = par->fbtftops.write(par,
par->txbuf.buf, 10 + par->info->fix.line_length);
if (ret < 0)
return ret;
udelay(300);
}
return 0;
}
#define RGB565toRGB323(c) ((((c) & 0xE000) >> 8) |\
(((c) & 000600) >> 6) |\
(((c) & 0x001C) >> 2))
#define RGB565toRGB332(c) ((((c) & 0xE000) >> 8) |\
(((c) & 000700) >> 6) |\
(((c) & 0x0018) >> 3))
#define RGB565toRGB233(c) ((((c) & 0xC000) >> 8) |\
(((c) & 000700) >> 5) |\
(((c) & 0x001C) >> 2))
static int write_vmem_8bit(struct fbtft_par *par, size_t offset, size_t len)
{
unsigned int start_line, end_line;
u16 *vmem16 = (u16 *)(par->info->screen_buffer + offset);
__be16 *pos = par->txbuf.buf + 1;
u8 *buf8 = par->txbuf.buf + 10;
int i, j;
int ret = 0;
start_line = offset / par->info->fix.line_length;
end_line = start_line + (len / par->info->fix.line_length) - 1;
/* Set command header. pos: x, y, w, h */
((u8 *)par->txbuf.buf)[0] = CMD_LCD_DRAWIMAGE;
pos[0] = 0;
pos[2] = cpu_to_be16(par->info->var.xres);
pos[3] = cpu_to_be16(1);
((u8 *)par->txbuf.buf)[9] = COLOR_RGB332;
for (i = start_line; i <= end_line; i++) {
pos[1] = cpu_to_be16(i);
for (j = 0; j < par->info->var.xres; j++) {
buf8[j] = RGB565toRGB332(*vmem16);
vmem16++;
}
ret = par->fbtftops.write(par,
par->txbuf.buf, 10 + par->info->var.xres);
if (ret < 0)
return ret;
udelay(700);
}
return 0;
}
static unsigned int firmware_version(struct fbtft_par *par)
{
u8 rxbuf[4] = {0, };
write_reg(par, CMD_VERSION);
par->fbtftops.read(par, rxbuf, 4);
if (rxbuf[1] != '.')
return 0;
return (rxbuf[0] - '0') << 8 | (rxbuf[2] - '0') << 4 | (rxbuf[3] - '0');
}
static int init_display(struct fbtft_par *par)
{
int ret;
unsigned int version;
u8 save_mode;
/* enable SPI interface by having CS and MOSI low during reset */
save_mode = par->spi->mode;
par->spi->mode |= SPI_CS_HIGH;
ret = spi_setup(par->spi); /* set CS inactive low */
if (ret) {
dev_err(par->info->device, "Could not set SPI_CS_HIGH\n");
return ret;
}
write_reg(par, 0x00); /* make sure mode is set */
mdelay(50);
par->fbtftops.reset(par);
mdelay(1000);
par->spi->mode = save_mode;
ret = spi_setup(par->spi);
if (ret) {
dev_err(par->info->device, "Could not restore SPI mode\n");
return ret;
}
write_reg(par, 0x00);
version = firmware_version(par);
fbtft_par_dbg(DEBUG_INIT_DISPLAY, par, "Firmware version: %x.%02x\n",
version >> 8, version & 0xFF);
if (mode == 332)
par->fbtftops.write_vmem = write_vmem_8bit;
return 0;
}
static void set_addr_win(struct fbtft_par *par, int xs, int ys, int xe, int ye)
{
/* not used on this controller */
}
static int set_var(struct fbtft_par *par)
{
u8 rotate;
/* this controller rotates clock wise */
switch (par->info->var.rotate) {
case 90:
rotate = 27;
break;
case 180:
rotate = 18;
break;
case 270:
rotate = 9;
break;
default:
rotate = 0;
}
write_reg(par, CMD_LCD_ORIENTATION, rotate);
return 0;
}
static int verify_gpios(struct fbtft_par *par)
{
if (par->gpio.reset < 0) {
dev_err(par->info->device, "Missing 'reset' gpio. Aborting.\n");
return -EINVAL;
}
return 0;
}
#ifdef CONFIG_FB_BACKLIGHT
static int backlight_chip_update_status(struct backlight_device *bd)
{
struct fbtft_par *par = bl_get_data(bd);
int brightness = bd->props.brightness;
fbtft_par_dbg(DEBUG_BACKLIGHT, par,
"%s: brightness=%d, power=%d, fb_blank=%d\n", __func__,
bd->props.brightness, bd->props.power,
bd->props.fb_blank);
if (bd->props.power != FB_BLANK_UNBLANK)
brightness = 0;
if (bd->props.fb_blank != FB_BLANK_UNBLANK)
brightness = 0;
write_reg(par, CMD_LCD_LED, brightness);
return 0;
}
static const struct backlight_ops bl_ops = {
.update_status = backlight_chip_update_status,
};
static void register_chip_backlight(struct fbtft_par *par)
{
struct backlight_device *bd;
struct backlight_properties bl_props = { 0, };
bl_props.type = BACKLIGHT_RAW;
bl_props.power = FB_BLANK_POWERDOWN;
bl_props.max_brightness = 100;
bl_props.brightness = DEFAULT_BRIGHTNESS;
bd = backlight_device_register(dev_driver_string(par->info->device),
par->info->device, par, &bl_ops,
&bl_props);
if (IS_ERR(bd)) {
dev_err(par->info->device,
"cannot register backlight device (%ld)\n",
PTR_ERR(bd));
return;
}
par->info->bl_dev = bd;
if (!par->fbtftops.unregister_backlight)
par->fbtftops.unregister_backlight = fbtft_unregister_backlight;
}
#else
#define register_chip_backlight NULL
#endif
static struct fbtft_display display = {
.regwidth = 8,
.buswidth = 8,
.width = WIDTH,
.height = HEIGHT,
.fps = FPS,
.txbuflen = TXBUFLEN,
.fbtftops = {
.write_register = write_reg8_bus8,
.write_vmem = write_vmem,
.init_display = init_display,
.set_addr_win = set_addr_win,
.set_var = set_var,
.verify_gpios = verify_gpios,
.register_backlight = register_chip_backlight,
},
};
FBTFT_REGISTER_DRIVER(DRVNAME, "watterott,openlcd", &display);
MODULE_ALIAS("spi:" DRVNAME);
MODULE_DESCRIPTION("FB driver for the Watterott LCD Controller");
MODULE_AUTHOR("<NAME>");
MODULE_LICENSE("GPL");
|
miraDask/Marathon
|
Client/marathon-client/src/components/issue-card/index.js
|
<reponame>miraDask/Marathon<gh_stars>1-10
import React, { useContext, useState, useEffect } from 'react';
import { IssuesContext } from '../../providers/issues-context.provider';
import { Context } from '../../providers/global-context.provider';
import PriorityIcon from '../priority-icon';
import IssueIcon from '../issue-icon';
import Avatar from '../avatar';
import Tag from '../tag';
const IssueCard = ({ issue, handleDragStart, handleDragEnter, invisible, handleClick }) => {
const { isModalOpen } = useContext(Context);
const { id, title, priority, type, storyPoints } = issue;
const renderAssignee = () =>
issue.assignee.fullName ? (
<Avatar user={issue.assignee} bgColor="green" />
) : (
<Tag text="unassigned" color="gray-500" size="w-24 h-4" />
);
return (
<div
id={id}
draggable
onClick={handleClick}
onDragStart={handleDragStart}
onDragEnter={handleDragEnter}
className={`${invisible
? 'invisible'
: ''} mx-auto cursor-pointer flex p-3 hover:bg-blue-200 bg-white rounded-lg shadow-xl mb-2 justify-between`}
>
<div>
<div className="text-gray-900 text-left">{title}</div>
<div className="mt-2 text-left">{!isModalOpen ? renderAssignee() : null}</div>
</div>
<div>
<IssueIcon type={type} size="h-5 w-5" />
<PriorityIcon priority={priority} size="h-5 w-5" />
<span className="rounded-full h-5 w-5 flex items-center justify-center bg-gray-300 text-black">
{storyPoints}
</span>
</div>
</div>
);
};
export default IssueCard;
|
msusky/org.hl7.fhir.core
|
org.hl7.fhir.dstu2016may/src/main/java/org/hl7/fhir/dstu2016may/model/codesystems/EncounterClass.java
|
package org.hl7.fhir.dstu2016may.model.codesystems;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Sun, May 8, 2016 03:05+1000 for FHIR v1.4.0
import org.hl7.fhir.exceptions.FHIRException;
public enum EncounterClass {
/**
* An encounter during which the patient is hospitalized and stays overnight.
*/
INPATIENT,
/**
* An encounter during which the patient is not hospitalized overnight.
*/
OUTPATIENT,
/**
* An encounter where the patient visits the practitioner in his/her office, e.g. a G.P. visit.
*/
AMBULATORY,
/**
* An encounter in the Emergency Care Department.
*/
EMERGENCY,
/**
* An encounter where the practitioner visits the patient at his/her home.
*/
HOME,
/**
* An encounter taking place outside the regular environment for giving care.
*/
FIELD,
/**
* An encounter where the patient needs more prolonged treatment or investigations than outpatients, but who do not need to stay in the hospital overnight.
*/
DAYTIME,
/**
* An encounter that takes place where the patient and practitioner do not physically meet but use electronic means for contact.
*/
VIRTUAL,
/**
* Any other encounter type that is not described by one of the other values. Where this is used it is expected that an implementer will include an extension value to define what the actual other type is.
*/
OTHER,
/**
* added to help the parsers
*/
NULL;
public static EncounterClass fromCode(String codeString) throws FHIRException {
if (codeString == null || "".equals(codeString))
return null;
if ("inpatient".equals(codeString))
return INPATIENT;
if ("outpatient".equals(codeString))
return OUTPATIENT;
if ("ambulatory".equals(codeString))
return AMBULATORY;
if ("emergency".equals(codeString))
return EMERGENCY;
if ("home".equals(codeString))
return HOME;
if ("field".equals(codeString))
return FIELD;
if ("daytime".equals(codeString))
return DAYTIME;
if ("virtual".equals(codeString))
return VIRTUAL;
if ("other".equals(codeString))
return OTHER;
throw new FHIRException("Unknown EncounterClass code '"+codeString+"'");
}
public String toCode() {
switch (this) {
case INPATIENT: return "inpatient";
case OUTPATIENT: return "outpatient";
case AMBULATORY: return "ambulatory";
case EMERGENCY: return "emergency";
case HOME: return "home";
case FIELD: return "field";
case DAYTIME: return "daytime";
case VIRTUAL: return "virtual";
case OTHER: return "other";
default: return "?";
}
}
public String getSystem() {
return "http://hl7.org/fhir/encounter-class";
}
public String getDefinition() {
switch (this) {
case INPATIENT: return "An encounter during which the patient is hospitalized and stays overnight.";
case OUTPATIENT: return "An encounter during which the patient is not hospitalized overnight.";
case AMBULATORY: return "An encounter where the patient visits the practitioner in his/her office, e.g. a G.P. visit.";
case EMERGENCY: return "An encounter in the Emergency Care Department.";
case HOME: return "An encounter where the practitioner visits the patient at his/her home.";
case FIELD: return "An encounter taking place outside the regular environment for giving care.";
case DAYTIME: return "An encounter where the patient needs more prolonged treatment or investigations than outpatients, but who do not need to stay in the hospital overnight.";
case VIRTUAL: return "An encounter that takes place where the patient and practitioner do not physically meet but use electronic means for contact.";
case OTHER: return "Any other encounter type that is not described by one of the other values. Where this is used it is expected that an implementer will include an extension value to define what the actual other type is.";
default: return "?";
}
}
public String getDisplay() {
switch (this) {
case INPATIENT: return "Inpatient";
case OUTPATIENT: return "Outpatient";
case AMBULATORY: return "Ambulatory";
case EMERGENCY: return "Emergency";
case HOME: return "Home";
case FIELD: return "Field";
case DAYTIME: return "Daytime";
case VIRTUAL: return "Virtual";
case OTHER: return "Other";
default: return "?";
}
}
}
|
qmg-aki/db-scheduler
|
src/test/java/com/github/kagkarlsson/scheduler/example/TasksMain.java
|
package com.github.kagkarlsson.scheduler.example;
import com.github.kagkarlsson.scheduler.HsqlTestDatabaseRule;
import com.github.kagkarlsson.scheduler.Scheduler;
import com.github.kagkarlsson.scheduler.task.helper.OneTimeTask;
import com.github.kagkarlsson.scheduler.task.helper.RecurringTask;
import com.github.kagkarlsson.scheduler.task.helper.Tasks;
import com.github.kagkarlsson.scheduler.task.schedule.FixedDelay;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.io.Serializable;
import java.time.Instant;
public class TasksMain {
private static final Logger LOG = LoggerFactory.getLogger(TasksMain.class);
public static void main(String[] args) throws Throwable {
try {
final HsqlTestDatabaseRule hsqlRule = new HsqlTestDatabaseRule();
hsqlRule.before();
final DataSource dataSource = hsqlRule.getDataSource();
// recurringTask(dataSource);
adhocTask(dataSource);
} catch (Exception e) {
LOG.error("Error", e);
}
}
private static void recurringTask(DataSource dataSource) {
RecurringTask<Void> hourlyTask = Tasks.recurring("my-hourly-task", FixedDelay.ofHours(1))
.execute((inst, ctx) -> {
System.out.println("Executed!");
});
final Scheduler scheduler = Scheduler
.create(dataSource)
.startTasks(hourlyTask)
.threads(5)
.build();
// hourlyTask is automatically scheduled on startup if not already started (i.e. exists in the db)
scheduler.start();
}
private static void adhocTask(DataSource dataSource) {
OneTimeTask<MyTaskData> myAdhocTask = Tasks.oneTime("my-typed-adhoc-task", MyTaskData.class)
.execute((inst, ctx) -> {
System.out.println("Executed! Custom data, Id: " + inst.getData().id);
});
final Scheduler scheduler = Scheduler
.create(dataSource, myAdhocTask)
.threads(5)
.build();
scheduler.start();
// Schedule the task for execution a certain time in the future and optionally provide custom data for the execution
scheduler.schedule(myAdhocTask.instance("1045", new MyTaskData(1001L)), Instant.now().plusSeconds(5));
}
public static class MyTaskData implements Serializable {
public final long id;
public MyTaskData(long id) {
this.id = id;
}
}
}
|
miozus/gulimall
|
gulimall-ware/src/main/java/cn/miozus/gulimall/ware/dao/WareInfoDao.java
|
<filename>gulimall-ware/src/main/java/cn/miozus/gulimall/ware/dao/WareInfoDao.java
package cn.miozus.gulimall.ware.dao;
import cn.miozus.gulimall.ware.entity.WareInfoEntity;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* 仓库信息
*
* @author SuDongpo
* @email <EMAIL>
* @date 2021-08-09 14:20:54
*/
@Mapper
public interface WareInfoDao extends BaseMapper<WareInfoEntity> {
}
|
kit-transue/software-emancipation-discover
|
libs/stream_message/src/minisax.cxx
|
<reponame>kit-transue/software-emancipation-discover<filename>libs/stream_message/src/minisax.cxx
/*************************************************************************
* Copyright (c) 2015, Synopsys, Inc. *
* All rights reserved. *
* *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions are *
* met: *
* *
* 1. Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* *
* 2. Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* *
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS *
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT *
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR *
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT *
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, *
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT *
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, *
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY *
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT *
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE *
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *
*************************************************************************/
// Implements a subset SAX parser for XML.
// See minisax.h for an overview of the role of this module.
//
// This file contains:
// utility functions
// a concrete AttributeList class
// a concrete Parser class
// an ActualParser class which does the grunge work of parsing
// definition of the getParser function
// implementation of the SAXException class
// implementation of the SAXParseException class
#ifdef _WIN32
#pragma warning (disable:4786)
#endif
int ttt;
#include "minisax.h"
#include "msg.h"
#include <stddef.h>
#ifdef ISO_CPP_HEADERS
#include <sstream>
#include <fstream>
#else
#include <strstream.h>
#include <fstream.h>
#endif
//#include <string.h>
#include "ctype.h"
#include <map>
#include <string>
using namespace std;
using namespace MiniXML;
// ===========================================================================
// ===========================================================================
// Utility Functions
// ===========================================================================
// Abort because an unimplemented part of SAX is used.
//
static void unimplemented() {
msg("MiniSAX is not full SAX.", catastrophe_sev) << eom;
exit(1);
}
// ===========================================================================
// ===========================================================================
// For the startElement callback, the SAX parser must supply an attribute
// list object. ConcreteAttributeList is also set up to be
// fabricated on the fly, as the attribute list is parsed.
//
class ConcreteAttributeList : public AttributeList {
public:
ConcreteAttributeList();
~ConcreteAttributeList();
unsigned int getLength() const;
const XMLCh* getName(const unsigned int i) const;
const XMLCh* getType(const unsigned int i) const;
const XMLCh* getValue(const unsigned int i) const;
const XMLCh* getType(const XMLCh* const name) const;
const XMLCh* getValue(const XMLCh* const name) const;
typedef map<string, string*> attrs_type;
attrs_type attrs;
mutable const XMLCh *temp_XMLs;
void update_temp(const char *s) const;
};
// ===========================================================================
// The destructor must delete the attribute value strings and the temp
// string.
//
ConcreteAttributeList::ConcreteAttributeList() : temp_XMLs(NULL) {
}
// ===========================================================================
// The destructor must delete the attribute value strings and the temp
// string.
//
ConcreteAttributeList::~ConcreteAttributeList() {
attrs_type::iterator i;
for (i = attrs.begin(); i != attrs.end(); ++i) {
string *val = (*i).second;
delete val;
}
update_temp(NULL);
}
// ===========================================================================
unsigned int ConcreteAttributeList::getLength() const {
return attrs.size();
}
// ===========================================================================
const XMLCh* ConcreteAttributeList::getName(const unsigned int i) const {
attrs_type::const_iterator iter = attrs.begin();
advance(iter, i);
update_temp((*iter).first.c_str());
return temp_XMLs;
}
// ===========================================================================
const XMLCh* ConcreteAttributeList::getType(const unsigned int i) const {
unimplemented();
return NULL;
}
// ===========================================================================
const XMLCh* ConcreteAttributeList::getValue(const unsigned int i) const {
attrs_type::const_iterator iter = attrs.begin();
advance(iter, i);
update_temp((*iter).second->c_str());
return temp_XMLs;
}
// ===========================================================================
const XMLCh* ConcreteAttributeList::getType(const XMLCh* const name) const {
unimplemented();
return NULL;
}
// ===========================================================================
const XMLCh* ConcreteAttributeList::getValue(const XMLCh* const name) const {
const char *s = XMLsToChars(name);
string str = s;
delete [] (char *)s;
attrs_type::const_iterator iter = attrs.find(str);
if (iter != attrs.end()) {
string *valp = (*iter).second;
update_temp(valp != NULL ? valp->c_str() : NULL);
return temp_XMLs;
}
return NULL;
}
// ===========================================================================
// The temp_XMLs object serves to hold the requested string until the next
// such string is requested. Probably this is not long enough for real SAX,
// but it enables our parser to do what it needs to do.
//
void ConcreteAttributeList::update_temp(const char *str) const {
delete [] (XMLCh *)temp_XMLs;
if (str == NULL) {
temp_XMLs = NULL;
}
else {
temp_XMLs = charsToXMLs(str);
}
}
// ===========================================================================
// ===========================================================================
// This class belongs to a partial implementation of SAX. It's in an odd
// position, because the SAX API does not define a concrete class for its
// parser, and yet a parser object must exist in order to use SAX.
// ConcreteParser declares and implements only those methods of Parser
// which we require.
//
// The first bunch of member functions are demanded by SAX.
// The second bunch are called from the ActualParser class,
// as a convenient way to handle parse events.
//
class ConcreteParser : public Parser {
public:
ConcreteParser();
// void setEntityResolver(EntityResolver* const resolver);
// void setDTDHandler(DTDHandler* const handler);
void setDocumentHandler(DocumentHandler* const handler);
void setErrorHandler(ErrorHandler* const handler);
void parse (const char* const systemId, const bool = false);
void parse (const char* const bufPtr, unsigned long bufLen);
DocumentHandler *docHandler;
ErrorHandler *errorHandler;
void fatalError(const char *diagnostic, int line, int col);
void error(const char *diagnostic, int line, int col);
#ifdef ISO_CPP_HEADERS
void noteContent(ostringstream &str);
#else
void noteContent(ostrstream &str);
#endif
void noteStartElement(const string &tag_name, AttributeList &attrs);
void noteEndElement(const string &tag_name);
void noteBeginFile();
void noteEndFile();
};
// ===========================================================================
// ===========================================================================
// ActualParser does the dirty work of tracking the input stream and the
// constructs of XML. It reports interesting events such as element constructs
// and parse errors via the SAXParser object.
//
class ActualParser {
public:
ActualParser(ConcreteParser &);
private:
ConcreteParser &SAXParser;
int curLine;
int curCol;
istream *input;
void accountChar(char);
bool getChar(char &ch);
bool matchChar(char ch);
bool atChar(char ch);
bool matchChar(bool (*test)(char), char &matched);
bool getName(string &name);
bool getQuotedValue(string &val);
void skipWhitespace();
void recoverFromGarbledTag();
void getAttrs(ConcreteAttributeList &attrs);
void getContent(string *containing_tag);
void getElement();
void getFile();
void fatalError(const char *diagnostic);
void error(const char *diagnostic);
public:
void parse(const char *config_file);
void parse(const char *bufPtr, unsigned long bufLen);
};
// ===========================================================================
ActualParser::ActualParser(ConcreteParser &par) : SAXParser(par)
{
}
// ===========================================================================
void ActualParser::accountChar(char inch) {
if (inch == '\n') {
curLine += 1;
curCol = 0;
}
else {
curCol += 1;
}
}
// ===========================================================================
// Parse a particular character.
bool ActualParser::matchChar(char ch) {
char inch;
input->get(inch);
if (input->eof()) {
return false;
}
bool success = (inch == ch);
if (success) {
accountChar(inch);
}
else {
input->putback(inch);
}
return success;
}
// ===========================================================================
// Test for a particular next character.
bool ActualParser::atChar(char ch) {
char inch;
input->get(inch);
if (input->eof()) {
return false;
}
bool success = (inch == ch);
input->putback(inch);
return success;
}
// ===========================================================================
// Parse a character that meets a test.
bool ActualParser::matchChar(bool (*test)(char), char &matched) {
char inch;
input->get(inch);
if (input->eof()) {
return false;
}
bool success = test(inch);
if (!success) {
input->putback(inch);
}
else {
matched = inch;
accountChar(inch);
}
return success;
}
// ===========================================================================
static bool is_name_char(char ch) {
return isalpha(ch) != 0 || ch == '_';
}
// ===========================================================================
static bool is_whitespace_char(char ch) {
return isspace(ch) != 0;
}
// ===========================================================================
static bool is_any_char(char ch) {
return true;
}
// ===========================================================================
// Parse a character.
bool ActualParser::getChar(char &matched) {
input->get(matched);
if (input->eof()) {
return false;
}
accountChar(matched);
return true;
}
// ===========================================================================
// Parse an alphabetic name. Should be fixed eventually to correspond
// better to the valid tag names in XML.
//
bool ActualParser::getName(string &name) {
char ch;
if (!matchChar(&is_name_char, ch)) {
return false;
}
#ifdef ISO_CPP_HEADERS
ostringstream nm;
#else
ostrstream nm;
#endif
nm << ch;
for (;;) {
if (!matchChar(&is_name_char, ch)) {
break;
}
nm << ch;
}
#ifndef ISO_CPP_HEADERS
nm << ends;
#endif
name = nm.str();
return true;
}
// ===========================================================================
// Parse a quoted string. Should be fixed eventually to correspond
// better to the attribute values in XML.
//
bool ActualParser::getQuotedValue(string &val_out) {
if (!matchChar('"')) {
return false;
}
#ifdef ISO_CPP_HEADERS
ostringstream val;
#else
ostrstream val;
#endif
char ch;
for (;;) {
if (!matchChar(&is_any_char, ch)) {
error("bad quoted attribute value");
return false;
}
if (ch == '"') {
break;
}
val << ch;
}
#ifndef ISO_CPP_HEADERS
val << ends;
#endif
val_out = val.str();
return true;
}
// ===========================================================================
// Skip over whitespace to the next non-whitespace character.
//
void ActualParser::skipWhitespace() {
for(;;) {
char ch;
if (!matchChar(&is_whitespace_char, ch)) {
break;
}
}
}
// ===========================================================================
// skip to > or end of line
//
void ActualParser::recoverFromGarbledTag() {
for (;;) {
char ch;
if (!getChar(ch)) {
break;
}
if (ch == '>' || ch == '\n') {
break;
}
}
}
// ===========================================================================
bool entity_match(const char *p, const char *name)
{
while (*name != '\0') {
if (*p != *name) {
return false;
}
name += 1;
p += 1;
}
return (*p == ';');
}
// ===========================================================================
// A skimpy implementation that substitutes for
// " etc. It really should be handling lots more, and producing a
// stream of XMLCh. In fact, its input should be XMLCh.
//
#ifdef ISO_CPP_HEADERS
void substitute_entities(ostringstream &strm, const char *bufPtr, unsigned long bufLen)
#else
void substitute_entities(ostrstream &strm, const char *bufPtr, unsigned long bufLen)
#endif
{
int i = 0;
while (i < bufLen) {
char ch = bufPtr[i];
if (ch == '&') {
const char *p = bufPtr + (i+1);
if (entity_match(p, "quot")) {
strm << '"';
i += 5;
}
else if (entity_match(p, "amp")) {
strm << '&';
i += 4;
}
else if (entity_match(p, "lt")) {
strm << '<';
i += 3;
}
else if (entity_match(p, "gt")) {
strm << '>';
i += 3;
}
else {
strm << ch;
}
}
else {
strm << ch;
}
i += 1;
}
}
// ===========================================================================
// Parse attributes of a begin tag, up to the > or />.
//
void ActualParser::getAttrs(ConcreteAttributeList &attrs) {
bool garbled = false;
for (;;) {
skipWhitespace();
string attr_name;
if (getName(attr_name)) {
skipWhitespace();
if (matchChar('=')) {
skipWhitespace();
string attr_value;
if (getQuotedValue(attr_value)) {
}
else {
#ifdef ISO_CPP_HEADERS
ostringstream d;
#else
ostrstream d;
#endif
d << "bad attribute " << attr_name;
#ifndef ISO_CPP_HEADERS
d << ends;
#endif
string d_str = d.str();
error(d_str.c_str());
}
#ifdef ISO_CPP_HEADERS
ostringstream value_stream;
#else
ostrstream value_stream;
#endif
substitute_entities(value_stream, attr_value.c_str(), attr_value.length());
#ifndef ISO_CPP_HEADERS
value_stream << ends;
#endif
attrs.attrs.insert(pair<const string,string*>(attr_name, new string(value_stream.str())));
}
else {
attrs.attrs.insert(pair<const string,string*>(attr_name, NULL));
}
}
else if (atChar('>') || atChar('/')) {
if (garbled) {
error("bad tag");
}
break;
}
else {
char ch;
garbled = true;
if (!getChar(ch)) {
error ("bad tag");
break;
}
}
}
}
// ===========================================================================
// Parse content of an element, up to the end of file or an
// end-of-element marker.
//
void ActualParser::getContent(string *containing_tag) {
#ifdef ISO_CPP_HEADERS
ostringstream chars;
#else
ostrstream chars;
#endif
for (;;) {
char ch;
if (!getChar(ch)) {
if (containing_tag != NULL) {
error("unmatching start tag");
}
break;
}
if (ch == '<') {
SAXParser.noteContent(chars);
if (containing_tag != NULL && matchChar('/')) {
skipWhitespace();
string tag;
if (getName(tag)) {
if (tag != *containing_tag) {
error("unmatching end tag");
}
skipWhitespace();
if (!matchChar('>')) {
error("bad end tag");
recoverFromGarbledTag();
}
}
else {
error("bad end tag");
}
break;
}
else if (matchChar('!')) {
// could be a comment
if (matchChar('-')) {
// could be a comment.
if (matchChar('-')) {
// is a comment. Look for -->
int hyphen_count = 0;
for (;;) {
char ch;
if (!getChar(ch)) {
error("bad comment");
break;
}
else if (hyphen_count >= 2 && ch == '>') {
break;
}
else {
if (ch == '-') {
hyphen_count += 1;
}
else {
hyphen_count = 0;
}
}
}
}
else {
error ("bad tag");
recoverFromGarbledTag();
}
}
else if (matchChar('D')) {
// could be a DOCTYPE tag with DTD definition.
char ch;
char tagname [] = { 'O', 'C', 'T', 'Y', 'P', 'E' };
for(int cntr = 0; cntr < 6; cntr++) {
if (!getChar(ch)) {
error("bad tag");
break;
}
else {
if (tagname[ cntr ] == ch) {
continue;
}
else {
error("bad tag");
recoverFromGarbledTag();
break;
}
}
}
// is a DOCTYPE tag Look for >
for(;;) {
if (!getChar(ch)) {
error("bad tag");
break;
}
else if('>' == ch) {
break;
}
}
}
else {
error ("bad tag");
recoverFromGarbledTag();
}
}
else if (matchChar('?')) {
// is a xml header & encoding tag. Look for ?>
bool questionmark = false;
char ch;
for(;;) {
if (!getChar(ch)) {
error("bad tag");
break;
}
else if(questionmark && '>' == ch) {
break;
}
else {
if ('?' == ch) {
questionmark = true;
}
else {
questionmark = false;
}
}
}
}
else {
getElement();
}
}
else {
chars << ch;
}
}
SAXParser.noteContent(chars);
}
// ===========================================================================
// Parse an element and its contents. The initial '<' is past.
//
void ActualParser::getElement() {
if (matchChar('/')) {
error("unmatching end tag");
recoverFromGarbledTag();
return;
}
skipWhitespace();
string tag;
if (getName(tag)) {
ConcreteAttributeList attrs;
getAttrs(attrs);
char ch;
bool have_char = getChar(ch);
bool is_end = have_char && (ch == '/');
if (is_end) {
have_char = getChar(ch);
}
if (have_char && ch == '>') {
SAXParser.noteStartElement(tag, attrs);
if (!is_end) {
getContent(&tag);
}
SAXParser.noteEndElement(tag);
}
else {
is_end = false;
error("bad tag");
recoverFromGarbledTag();
}
}
else {
error("bad tag");
recoverFromGarbledTag(); // skip to > or end of line
}
}
// ===========================================================================
// Parse a file.
//
void ActualParser::getFile() {
SAXParser.noteBeginFile();
getContent(NULL);
SAXParser.noteEndFile();
}
// ===========================================================================
void ActualParser::parse(const char *config_file) {
ifstream i;
i.open(config_file);
if (i.fail()) {
#ifdef ISO_CPP_HEADERS
ostringstream d;
#else
ostrstream d;
#endif
d << "file " << config_file << " failed to open";
#ifndef ISO_CPP_HEADERS
d << ends;
#endif
string d_str = d.str();
curLine = 0;
curCol = 0;
fatalError(d_str.c_str());
return;
}
input = &i;
curLine = 1;
curCol = 0;
getFile();
}
// ===========================================================================
void ActualParser::parse(const char *bufPtr, unsigned long bufLen) {
#ifdef ISO_CPP_HEADERS
istringstream i(string(bufPtr, bufLen));
#else
istrstream i(bufPtr, bufLen);
#endif
if (i.fail()) {
#ifdef ISO_CPP_HEADERS
ostringstream d;
#else
ostrstream d;
#endif
d << "memory buffer of bytes failed";
#ifndef ISO_CPP_HEADERS
d << ends;
#endif
string d_str = d.str();
curLine = 0;
curCol = 0;
fatalError(d_str.c_str());
return;
}
input = &i;
curLine = 1;
curCol = 0;
getFile();
}
// ===========================================================================
void ActualParser::fatalError(const char *diagnostic) {
SAXParser.fatalError(diagnostic, curLine, curCol);
}
// ===========================================================================
void ActualParser::error(const char *diagnostic) {
SAXParser.error(diagnostic, curLine, curCol);
}
// ===========================================================================
// ===========================================================================
// ConcreteParser member definitions
//
// ===========================================================================
ConcreteParser::ConcreteParser() : docHandler(NULL), errorHandler(NULL) {}
void ConcreteParser::setDocumentHandler(DocumentHandler* const handler) {
docHandler = handler;
}
// ===========================================================================
void ConcreteParser::setErrorHandler(ErrorHandler* const handler) {
errorHandler = handler;
}
// ===========================================================================
void ConcreteParser::fatalError(const char *diagnostic, int line, int col) {
const XMLCh *diag = charsToXMLs(diagnostic);
SAXParseException exception(diag, NULL, NULL, line, col);
delete [] (XMLCh *)diag;
if (errorHandler != NULL) {
errorHandler->fatalError(exception);
}
}
// ===========================================================================
void ConcreteParser::error(const char *diagnostic, int line, int col) {
const XMLCh *diag = charsToXMLs(diagnostic);
SAXParseException exception(diag, NULL, NULL, line, col);
delete [] (XMLCh *)diag;
if (errorHandler != NULL) {
errorHandler->error(exception);
}
}
// ===========================================================================
#ifdef ISO_CPP_HEADERS
void ConcreteParser::noteContent(ostringstream &str) {
#else
void ConcreteParser::noteContent(ostrstream &str) {
#endif
#if 0
// This didn't work: a bug in MS VC6?
const char *strp = str.str().c_str();
#else
#ifndef ISO_CPP_HEADERS
str << ends;
#endif
string strobj = str.str();
const char *strp = strobj.c_str();
#endif
#ifdef ISO_CPP_HEADERS
ostringstream strm;
#else
ostrstream strm;
#endif
substitute_entities(strm, strp, strlen(strp));
#ifndef ISO_CPP_HEADERS
strm << ends;
#endif
if (docHandler != NULL) {
string tn_str = strm.str();
const XMLCh *tn = charsToXMLs(tn_str.c_str());
docHandler->characters(tn, XMLs_len(tn));
delete [] (XMLCh *)tn;
}
#ifdef ISO_CPP_HEADERS
#ifndef sun5
// Reset the ostringstream.
str.str(string());
#else
// The above ran into a bug on Sun.
// Hack: need to flush/clear/reinitialize the ostrstream at this point,
// but flush/clear/init don't do the job.
{
str.ostringstream::~ostringstream();
new (&str) ostringstream();
}
#endif
#else
// Hack: need to flush/clear/reinitialize the ostrstream at this point,
// but flush/clear/init don't do the job.
{
str.ostrstream::~ostrstream();
new (&str) ostrstream();
}
#endif
}
// ===========================================================================
void ConcreteParser::noteStartElement(const string &tag_name,
AttributeList &attrs) {
if (docHandler != NULL) {
const XMLCh *tn = charsToXMLs(tag_name.c_str());
docHandler->startElement(tn, attrs);
delete [] (XMLCh *)tn;
}
}
// ===========================================================================
void ConcreteParser::noteEndElement(const string &tag_name) {
if (docHandler != NULL) {
const XMLCh *tn = charsToXMLs(tag_name.c_str());
docHandler->endElement(tn);
delete [] (XMLCh *)tn;
}
}
// ===========================================================================
void ConcreteParser::noteBeginFile() {
if (docHandler != NULL) {
docHandler->startDocument();
}
}
// ===========================================================================
void ConcreteParser::noteEndFile() {
if (docHandler != NULL) {
docHandler->endDocument();
}
}
// ===========================================================================
//
void ConcreteParser::parse (const char* const config_file, const bool) {
ActualParser parser(*this);
parser.parse(config_file);
}
// ===========================================================================
//
void ConcreteParser::parse (const char* const bufPtr, unsigned long bufLen) {
ActualParser parser(*this);
parser.parse(bufPtr, bufLen);
}
// ===========================================================================
// ===========================================================================
Parser *MiniXML::getParser()
{
return new ConcreteParser();
}
// ===========================================================================
// ===========================================================================
SAXException::SAXException()
: message(NULL)
{
}
// ===========================================================================
SAXException::SAXException(const XMLCh* const msg)
: message(replicate_XMLs(msg))
{
}
// ===========================================================================
SAXException::SAXException(const char* const msg)
: message(charsToXMLs(msg))
{
}
// ===========================================================================
SAXException::~SAXException() {
delete [] (XMLCh *)message;
}
// ===========================================================================
const XMLCh* SAXException::getMessage() const
{
return message;
}
// ===========================================================================
SAXException::SAXException(const SAXException &that)
: message(replicate_XMLs(that.message))
{
}
// ===========================================================================
SAXException & SAXException::operator=(const SAXException &that)
{
if (this != &that) {
delete [] (XMLCh *)message;
message = replicate_XMLs(that.message);
}
return *this;
}
// ===========================================================================
// ===========================================================================
SAXParseException::SAXParseException(const XMLCh* const message, const Locator& locator)
: SAXException(message),
publicId(NULL),
systemId(NULL),
lineNumber(0),
columnNumber(0)
{
unimplemented();
}
// ===========================================================================
SAXParseException::SAXParseException(const XMLCh* const message
, const XMLCh* const publicId
, const XMLCh* const systemId
, const unsigned int lineNumber
, const unsigned int columnNumber)
: SAXException(message),
publicId(replicate_XMLs(publicId)),
systemId(replicate_XMLs(systemId)),
lineNumber(lineNumber),
columnNumber(columnNumber)
{
}
// ===========================================================================
SAXParseException::SAXParseException(const SAXParseException& that)
: SAXException(that.getMessage()),
publicId(replicate_XMLs(that.publicId)),
systemId(replicate_XMLs(that.systemId)),
lineNumber(that.lineNumber),
columnNumber(that.columnNumber)
{
}
// ===========================================================================
SAXParseException::~SAXParseException()
{
delete [] (XMLCh *)publicId;
delete [] (XMLCh *)systemId;
}
// ===========================================================================
SAXParseException& SAXParseException::operator=(const SAXParseException& that)
{
if (this != &that) {
SAXException::operator=(that.getMessage());
delete [] (XMLCh *)publicId;
delete [] (XMLCh *)systemId;
publicId = replicate_XMLs(that.publicId);
systemId = replicate_XMLs(that.systemId);
lineNumber = that.lineNumber;
columnNumber = that.columnNumber;
}
return *this;
}
// ===========================================================================
unsigned int SAXParseException::getColumnNumber() const
{
return columnNumber;
}
// ===========================================================================
unsigned int SAXParseException::getLineNumber() const
{
return lineNumber;
}
// ===========================================================================
const XMLCh* SAXParseException::getPublicId() const
{
return publicId;
}
// ===========================================================================
const XMLCh* SAXParseException::getSystemId() const
{
return systemId;
}
|
andreastoux/qtkanban
|
src/firebase.js
|
<reponame>andreastoux/qtkanban<filename>src/firebase.js
import { initializeApp } from "firebase/app";
import {
doc,
getFirestore,
updateDoc,
setDoc,
addDoc,
collection,
arrayUnion,
serverTimestamp,
deleteDoc,
} from "firebase/firestore";
import { getAuth, GoogleAuthProvider } from "@firebase/auth";
const firebaseConfig = {
apiKey: process.env.REACT_APP_API_KEY,
authDomain: process.env.REACT_APP_AUTH_DOMAIN,
projectId: process.env.REACT_APP_PROJECT_ID,
storageBucket: process.env.REACT_APP_STORAGE_BUCKET,
messagingSenderId: process.env.REACT_APP_MESSAGE_ID,
appId: process.env.REACT_APP_APP_ID,
measurementId: process.env.REACT_APP_MEASUREMENT_ID,
};
// Initialize Firebase
export const app = initializeApp(firebaseConfig);
export const db = getFirestore();
export const provider = new GoogleAuthProvider();
export const auth = getAuth(app);
// POST a new task to Cloud Firestore
export const createNewTask = async (userId, boardId, updatedDocument) => {
const taskRef = collection(db, "users", `${userId}`, "tasks");
const newDoc = await addDoc(taskRef, updatedDocument);
const boardRef = doc(db, "users", `${userId}`, "boards", `${boardId}`);
await updateDoc(boardRef, { id: boardId, items: arrayUnion(newDoc.id) });
console.log(newDoc.id);
};
// POST a new board to Cloud Firestore
export const createNewBoard = async (userId, newDocument) => {
const boardsRef = collection(db, "users", `${userId}`, "boards");
await addDoc(boardsRef, newDocument);
};
// PUT/UPDATE board data
export const updateBoardItems = async (userId, boardId, updatedDocument) => {
const boardRef = doc(db, "users", `${userId}`, "boards", `${boardId}`);
await updateDoc(boardRef, updatedDocument);
};
// PUT/UPDATE task document
export const updateTaskDocument = async (userId, taskId, updatedDocument) => {
const boardRef = doc(db, "users", `${userId}`, "tasks", `${taskId}`);
await updateDoc(boardRef, updatedDocument);
};
// Create a new document for each user when they sign up/register for the first time with Google.
export const createNewUserDocumentWithGoogle = async (user) => {
await setDoc(doc(db, "users", user.uid), {
userName: user.displayName,
photoURL: user.photoURL,
registeredOn: serverTimestamp(),
});
};
// Create a new document for each user when they sign up/register for the first time with Email.
export const createNewUserDocumentWithEmail = async (userDocument, username, photoUrl = "") => {
await setDoc(doc(db, "users", userDocument.uid), {
userName: username,
photoURL: photoUrl,
registeredOn: serverTimestamp(),
});
};
//DELETE TASK
export const deleteTask = async (userId, taskId) => {
await deleteDoc(doc(db, "users", `${userId}`, "tasks", `${taskId}`));
};
//DELETE BOARD
export const deleteBoard = async (userId, boardId) => {
await deleteDoc(doc(db, "users", `${userId}`, "boards", `${boardId}`));
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.