repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
kevindew/openapi_parser
lib/openapi3_parser/node/info.rb
<gh_stars>10-100 # frozen_string_literal: true require "openapi3_parser/node/object" module Openapi3Parser module Node # @see https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md#infoObject class Info < Node::Object # @return [String] def title self["title"] end # @return [String, nil] def description self["description"] end # @return [String, nil] def description_html render_markdown(description) end # @return [String, nil] def terms_of_service self["termsOfService"] end # @return [Contact, nil] def contact self["contact"] end # @return [License, nil] def license self["license"] end # @return [String] def version self["version"] end end end end
xhgrid/youlai-mall
youlai-mall-service-api/yshop-sms-api/src/main/java/com/fly4j/yshop/sms/feign/admin/factory/SmsFeignClientFallbackFactory.java
<filename>youlai-mall-service-api/yshop-sms-api/src/main/java/com/fly4j/yshop/sms/feign/admin/factory/SmsFeignClientFallbackFactory.java package com.fly4j.yshop.sms.feign.admin.factory; import com.fly4j.yshop.sms.feign.admin.SmsFeignClient; import feign.hystrix.FallbackFactory; public class SmsFeignClientFallbackFactory implements FallbackFactory<SmsFeignClient> { @Override public SmsFeignClient create(Throwable throwable) { return new SmsFeignClient() { }; } }
maciekp85/ocajp8-training
enthuware-examples/src/test1/objective02/exercise10/Test10.java
package test1.objective02.exercise10; public class Test10 { public static void main(String[] args) { // boolean b = null; // wrong // boolean b = 1; // wrong boolean b = true | false; // bool b = (10<11); // wrong boolean b1 = true || false; String s = null; if(s != null && s.isEmpty()) { System.out.println("True"); } else { System.out.println("False"); } } }
nistefan/cmssw
DataFormats/L1TMuon/interface/L1MuBMTrackSegPhi.h
//------------------------------------------------- // /** \class L1MuBMTrackSegPhi * * PHI Track Segment * * * * <NAME> CERN EP */ // //-------------------------------------------------- #ifndef L1MUBM_TRACK_SEG_PHI_H #define L1MUBM_TRACK_SEG_PHI_H //--------------- // C++ Headers -- //--------------- #include <iosfwd> #include <vector> //---------------------- // Base Class Headers -- //---------------------- //------------------------------------ // Collaborating Class Declarations -- //------------------------------------ #include "DataFormats/L1TMuon/interface/BMTF/L1MuBMTrackSegLoc.h" // --------------------- // -- Class Interface -- // --------------------- class L1MuBMTrackSegPhi; typedef std::vector<L1MuBMTrackSegPhi> L1MuBMTrackSegPhiCollection; class L1MuBMTrackSegPhi { public: /// quality code of BBMX phi track segments enum TSQuality { Li, Lo, Hi, Ho, LL, HL, HH, Null }; /// default constructor L1MuBMTrackSegPhi(); /// constructor L1MuBMTrackSegPhi(int wheel_id, int sector_id, int station_id, int phi = 0, int phib = 0, TSQuality quality = Null, bool tag = false, int bx = 17, bool etaFlag = false); /// constructor L1MuBMTrackSegPhi(const L1MuBMTrackSegLoc&, int phi = 0, int phib = 0, TSQuality quality = Null, bool tag = false, int bx = 17, bool etaFlag = false); /// copy constructor L1MuBMTrackSegPhi(const L1MuBMTrackSegPhi&); /// destructor virtual ~L1MuBMTrackSegPhi(); /// reset phi track segment void reset(); /// return phi-value in global coordinates [0,2pi] double phiValue() const; /// return phib-value in global coordinates [0,2pi] double phibValue() const; /// return wheel inline int wheel() const { return m_location.wheel(); } /// return sector inline int sector() const { return m_location.sector(); } /// return station inline int station() const { return m_location.station(); } /// return location of phi track segment inline const L1MuBMTrackSegLoc& where() const{ return m_location; } /// return phi inline int phi() const { return m_phi; } /// return phib inline int phib() const { return m_phib; } /// return quality code inline int quality() const { return m_quality; } /// return tag (second TS tag) inline int tag() const { return m_tag; } /// return bunch crossing inline int bx() const { return m_bx; } /// return eta flag inline bool etaFlag() const { return m_etaFlag; } /// is it an empty phi track segment? inline bool empty() const { return m_quality == Null; } /// set eta flag inline void setEtaFlag(bool flag) { m_etaFlag = flag; } /// assignment operator L1MuBMTrackSegPhi& operator=(const L1MuBMTrackSegPhi&); /// equal operator bool operator==(const L1MuBMTrackSegPhi&) const; /// unequal operator bool operator!=(const L1MuBMTrackSegPhi&) const; /// overload output stream operator for phi track segment quality friend std::ostream& operator<<(std::ostream&, const TSQuality&); /// overload output stream operator for phi track segments friend std::ostream& operator<<(std::ostream&, const L1MuBMTrackSegPhi&); private: L1MuBMTrackSegLoc m_location; // logical location of TS int m_phi; // 12 bits int m_phib; // 10 bits TSQuality m_quality; // 3 bits bool m_tag; // tag for second TS (of chamber) int m_bx; // bunch crossing identifier bool m_etaFlag; // eta flag (for overlap region) }; #endif
f0r3ns1cat0r/cloud-forensics-utils
tests/providers/gcp/gcp_mocks.py
# -*- coding: utf-8 -*- # Copyright 2020 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """GCP mocks used across tests.""" import re # pylint: disable=line-too-long from libcloudforensics.providers.gcp.internal import build as gcp_build from libcloudforensics.providers.gcp.internal import compute from libcloudforensics.providers.gcp.internal import project as gcp_project from libcloudforensics.providers.gcp.internal import log as gcp_log from libcloudforensics.providers.gcp.internal import monitoring as gcp_monitoring from libcloudforensics.providers.gcp.internal import storage as gcp_storage # pylint: enable=line-too-long FAKE_ANALYSIS_PROJECT = gcp_project.GoogleCloudProject( 'fake-target-project', 'fake-zone') FAKE_ANALYSIS_VM = compute.GoogleComputeInstance( FAKE_ANALYSIS_PROJECT.project_id, 'fake-zone', 'fake-analysis-vm') FAKE_IMAGE = compute.GoogleComputeImage( FAKE_ANALYSIS_PROJECT.project_id, '', 'fake-image') # Source project with the instance that needs forensicating FAKE_SOURCE_PROJECT = gcp_project.GoogleCloudProject( 'fake-source-project', 'fake-zone') FAKE_INSTANCE = compute.GoogleComputeInstance( FAKE_SOURCE_PROJECT.project_id, 'fake-zone', 'fake-instance') FAKE_DISK = compute.GoogleComputeDisk( FAKE_SOURCE_PROJECT.project_id, 'fake-zone', 'fake-disk') FAKE_BOOT_DISK = compute.GoogleComputeDisk( FAKE_SOURCE_PROJECT.project_id, 'fake-zone', 'fake-boot-disk') FAKE_SNAPSHOT = compute.GoogleComputeSnapshot( FAKE_DISK, 'fake-snapshot') FAKE_SNAPSHOT_LONG_NAME = compute.GoogleComputeSnapshot( FAKE_DISK, 'this-is-a-kind-of-long-fake-snapshot-name-and-is-definitely-over-63-chars') FAKE_DISK_COPY = compute.GoogleComputeDisk( FAKE_SOURCE_PROJECT.project_id, 'fake-zone', 'fake-disk-copy') FAKE_LOGS = gcp_log.GoogleCloudLog('fake-target-project') FAKE_LOG_LIST = [ 'projects/fake-target-project/logs/GCEGuestAgent', 'projects/fake-target-project/logs/OSConfigAgent' ] FAKE_LOG_ENTRIES = [{ 'logName': 'test_log', 'timestamp': '123456789', 'textPayload': 'insert.compute.create' }, { 'logName': 'test_log', 'timestamp': '123456789', 'textPayload': 'insert.compute.create' }] FAKE_NEXT_PAGE_TOKEN = 'abcdef<PASSWORD>' FAKE_GCS = gcp_storage.GoogleCloudStorage('fake-target-project') FAKE_GCB = gcp_build.GoogleCloudBuild('fake-target-project') FAKE_MONITORING = gcp_monitoring.GoogleCloudMonitoring('fake-target-project') # Mock struct to mimic GCP's API responses MOCK_INSTANCES_AGGREGATED = { # See https://cloud.google.com/compute/docs/reference/rest/v1/instances # /aggregatedList for complete structure 'items': { 0: { 'instances': [{ 'name': FAKE_INSTANCE.name, 'zone': '/' + FAKE_INSTANCE.zone }] } } } # Mock struct to mimic GCP's API responses MOCK_LOGS_LIST = { # See https://cloud.google.com/logging/docs/reference/v2/rest/v2 # /ListLogsResponse for complete structure 'logNames': FAKE_LOG_LIST } MOCK_LOG_ENTRIES = { # See https://cloud.google.com/logging/docs/reference/v2/rest/v2 # /entries/list/ListLogsResponse for complete structure 'entries': FAKE_LOG_ENTRIES } MOCK_DISKS_AGGREGATED = { # See https://cloud.google.com/compute/docs/reference/rest/v1/disks # /aggregatedList for complete structure 'items': { 0: { 'disks': [{ 'name': FAKE_BOOT_DISK.name, 'zone': '/' + FAKE_BOOT_DISK.zone }] }, 1: { 'disks': [{ 'name': FAKE_DISK.name, 'zone': '/' + FAKE_DISK.zone }] } } } MOCK_LIST_INSTANCES = {FAKE_INSTANCE.name: FAKE_INSTANCE} MOCK_LIST_DISKS = { FAKE_DISK.name: FAKE_DISK, FAKE_BOOT_DISK.name: FAKE_BOOT_DISK } MOCK_GCE_OPERATION_INSTANCES_LABELS_SUCCESS = { 'items': { 'zone': { 'instances': [{ 'name': FAKE_INSTANCE.name, 'zone': '/' + FAKE_INSTANCE.zone, 'labels': { 'id': '123' } }] } } } MOCK_GCE_OPERATION_DISKS_LABELS_SUCCESS = { 'items': { 'zone': { 'disks': [{ 'name': FAKE_DISK.name, 'labels': { 'id': '123' } }, { 'name': FAKE_BOOT_DISK.name, 'labels': { 'some': 'thing' } }] } } } MOCK_GCE_OPERATION_LABELS_FAILED = { 'items': {}, 'warning': { 'code': 404, 'message': 'Not Found' } } MOCK_GCE_OPERATION_INSTANCES_GET = { # See https://cloud.google.com/compute/docs/reference/rest/v1/instances/get # for complete structure 'name': FAKE_INSTANCE.name, 'disks': [{ 'boot': True, 'source': '/' + FAKE_BOOT_DISK.name, }, { 'boot': False, 'source': '/' + FAKE_DISK.name, 'initializeParams': { 'diskName': FAKE_DISK.name } }] } MOCK_GCS_BUCKETS = { 'kind': 'storage#buckets', 'items': [{ 'kind': 'storage#bucket', 'id': 'fake-bucket', 'selfLink': 'https://www.googleapis.com/storage/v1/b/fake-bucket', 'projectNumber': '123456789', 'name': 'fake-bucket', 'timeCreated': '2020-01-01T01:02:03.456Z', 'updated': '2020-07-09T05:58:11.393Z', 'metageneration': '8', 'iamConfiguration': { 'bucketPolicyOnly': { 'enabled': True, 'lockedTime': '2020-10-04T05:47:28.721Z' }, 'uniformBucketLevelAccess': { 'enabled': True, 'lockedTime': '2020-10-04T05:47:28.721Z' } }, 'location': 'US-EAST1', 'locationType': 'region', 'defaultEventBasedHold': False, 'storageClass': 'STANDARD', 'etag': 'CAg=' }] } MOCK_GCS_OBJECT_METADATA = { 'kind': 'storage#object', 'id': 'fake-bucket/foo/fake.img/12345', 'size': '5555555555', 'md5Hash': 'MzFiYWIzY2M0MTJjNGMzNjUyZDMyNWFkYWMwODA5YTEgIGNvdW50MQo=', } MOCK_GCS_BUCKET_OBJECTS = { 'items': [ MOCK_GCS_OBJECT_METADATA ] } MOCK_GCS_BUCKET_ACLS = { 'kind': 'storage#bucketAccessControls', 'items': [ { 'kind': 'storage#bucketAccessControl', 'id': 'test_bucket_1/project-editors-1', 'bucket': 'test_bucket_1', 'entity': 'project-editors-1', 'role': 'OWNER', }, { 'kind': 'storage#bucketAccessControl', 'id': 'test_bucket_1/project-owners-1', 'bucket': 'test_bucket_1', 'entity': 'project-owners-1', 'role': 'OWNER', } ] } MOCK_GCS_BUCKET_IAM = { 'bindings': [{ 'role': 'roles/storage.legacyBucketOwner', 'members': ['projectEditor:project1', 'projectOwner:project1'], }] } MOCK_GCB_BUILDS_CREATE = { 'name': 'operations/build/fake-project/12345', 'metadata': { 'build': { 'id': '12345', 'timeout': '12345s', 'projectId': 'fake-project', 'logsBucket': 'gs://fake-uri', "logUrl": "https://fake-url" } } } MOCK_GCB_BUILDS_SUCCESS = { 'done': True, 'response': { 'id': 'fake-id' }, 'metadata': { 'build': { 'id': '12345', 'timeout': '12345s', 'projectId': 'fake-project', 'logsBucket': 'gs://fake-uri', "logUrl": "https://fake-url" } } } MOCK_GCB_BUILDS_FAIL = { 'done': True, 'error': { 'code': 2, 'message': 'Build failed; check build logs for details' }, 'metadata': { 'build': { 'id': '12345', 'timeout': '12345s', 'projectId': 'fake-project', 'logsBucket': 'gs://fake-uri', "logUrl": "https://fake-url" } } } MOCK_STACKDRIVER_METRIC = 6693417 MOCK_COMPUTE_METRIC = 8093 MOCK_LOGGING_METRIC = 1 MOCK_GCM_METRICS_COUNT = { 'timeSeries': [{ 'metric': { 'type': 'serviceruntime.googleapis.com/api/request_count' }, 'resource': { 'type': 'consumed_api', 'labels': { 'project_id': 'fake-target-project', 'service': 'stackdriver.googleapis.com' } }, 'metricKind': 'DELTA', 'valueType': 'INT64', 'points': [{ 'interval': { 'startTime': '2020-05-18T00:00:00Z', 'endTime': '2020-06-17T00:00:00Z' }, 'value': { 'int64Value': MOCK_STACKDRIVER_METRIC } }] }, { 'metric': { 'type': 'serviceruntime.googleapis.com/api/request_count' }, 'resource': { 'type': 'consumed_api', 'labels': { 'service': 'compute.googleapis.com', 'project_id': 'fake-target-project' } }, 'metricKind': 'DELTA', 'valueType': 'INT64', 'points': [{ 'interval': { 'startTime': '2020-05-18T00:00:00Z', 'endTime': '2020-06-17T00:00:00Z' }, 'value': { 'int64Value': MOCK_COMPUTE_METRIC } }] }, { 'metric': { 'type': 'serviceruntime.googleapis.com/api/request_count' }, 'resource': { 'type': 'consumed_api', 'labels': { 'service': 'logging.googleapis.com', 'project_id': 'fake-target-project' } }, 'metricKind': 'DELTA', 'valueType': 'INT64', 'points': [{ 'interval': { 'startTime': '2020-05-18T00:00:00Z', 'endTime': '2020-06-17T00:00:00Z' }, 'value': { 'int64Value': MOCK_LOGGING_METRIC } }] }], 'unit': '1' } MOCK_GCM_METRICS_BUCKETSIZE = { "timeSeries": [ { "metric": { "labels": { "storage_class": "REGIONAL" }, "type": "storage.googleapis.com/storage/total_bytes" }, "resource": { "type": "gcs_bucket", "labels": { "bucket_name": "test_bucket_1", "project_id": "fake-project", "location": "us-east1" } }, "metricKind": "GAUGE", "valueType": "DOUBLE", "points": [ { "interval": { "startTime": "2021-04-07T00:00:00Z", "endTime": "2021-04-07T00:00:00Z" }, "value": { "doubleValue": 30 } }, { "interval": { "startTime": "2021-04-06T00:05:00Z", "endTime": "2021-04-06T00:05:00Z" }, "value": { "doubleValue": 60 } } ] } ], "unit": "By" } # See: https://cloud.google.com/compute/docs/reference/rest/v1/disks REGEX_DISK_NAME = re.compile('^(?=.{1,63}$)[a-z]([-a-z0-9]*[a-z0-9])?$') STARTUP_SCRIPT = 'scripts/startup.sh'
luigiberducci/dirl
spectrl/examples/fpp_tltl.py
from spectrl.main.learning import QSRewardEnv from spectrl.main.spec_compiler import ev, seq, choose from spectrl.util.io import parse_command_line_options, save_log_info from spectrl.util.rl import print_performance, get_rollout, ObservationWrapper from spectrl.envs.fetch import FetchPickAndPlaceEnv from spectrl.rl.ars import ars, NNParams, ARSParams, NNPolicy from numpy import linalg as LA import numpy as np import os def grip_near_object(err): def predicate(sys_state, res_state): dist = sys_state[:3] - (sys_state[3:6] + np.array([0., 0., 0.065])) dist = np.concatenate([dist, [sys_state[9] + sys_state[10] - 0.1]]) return -LA.norm(dist) + err return predicate def hold_object(err): def predicate(sys_state, res_state): dist = sys_state[:3] - sys_state[3:6] dist2 = np.concatenate([dist, [sys_state[9] + sys_state[10] - 0.045]]) return -LA.norm(dist2) + err return predicate def object_in_air(sys_state, res_state): return sys_state[5] - 0.45 def object_at_goal(err): def predicate(sys_state, res_state): dist = np.concatenate([sys_state[-3:], [sys_state[9] + sys_state[10] - 0.045]]) return -LA.norm(dist) + err return predicate def gripper_reach(goal, err): ''' goal: numpy array of dim (3,) ''' def predicate(sys_state, res_state): return -LA.norm(sys_state[:3] - goal) + err return predicate def object_reach(goal, err): ''' goal: numpy array of dim (3,) ''' def predicate(sys_state, res_state): return -LA.norm(sys_state[3:6] - goal) + err return predicate above_corner1 = np.array([1.15, 1.0, 0.465]) above_corner2 = np.array([1.45, 1.0, 0.465]) corner1 = np.array([1.15, 1.0, 0.425]) corner2 = np.array([1.50, 1.05, 0.425]) # Specifications spec1 = ev(grip_near_object(0.03)) spec2 = seq(spec1, ev(hold_object(0.03))) spec3 = seq(spec2, ev(object_at_goal(0.05))) spec4 = seq(seq(spec2, ev(object_in_air)), ev(object_at_goal(0.05))) spec5 = seq(seq(spec2, ev(object_in_air)), ev(object_reach(above_corner1, 0.05))) spec6 = seq(seq(spec2, ev(object_in_air)), choose(seq(ev(object_reach(above_corner1, 0.05)), ev(object_reach(corner1, 0.05))), seq(ev(object_reach(above_corner2, 0.05)), ev(object_reach(corner2, 0.01))))) specs = [spec1, spec2, spec3, spec4, spec5, spec6] # Construct Product MDP and learn policy if __name__ == '__main__': flags = parse_command_line_options() render = flags['render'] folder = flags['folder'] itno = flags['itno'] spec_num = flags['spec_num'] env = ObservationWrapper(FetchPickAndPlaceEnv(), ['observation', 'desired_goal'], relative=(('desired_goal', 0, 3), ('observation', 3, 6)), max_timesteps=150) action_dim = env.action_space.shape[0] action_bound = env.action_space.high params_list = [ARSParams(500, 50, 20, 0.01, 0.5, 0.3, 10), ARSParams(500, 50, 20, 0.01, 0.5, 0.3, 10), ARSParams(500, 50, 20, 0.01, 0.5, 0.3, 10), ARSParams(500, 50, 20, 0.01, 0.5, 0.3, 10), ARSParams(500, 50, 20, 0.01, 0.5, 0.3, 10), ARSParams(800, 50, 20, 0.01, 0.5, 0.3, 10)] print('\n**** Learning Policy for Spec {} ****'.format(spec_num)) # Step 3: construct product MDP env = QSRewardEnv(env, specs[spec_num]) # Step 4: Set hyper parameters params = params_list[spec_num] # Step 5: Learn policy policy = NNPolicy(NNParams(env, 300)) log_info = ars(env, policy, params, cum_reward=True) # Save policy and log information logdir = os.path.join(folder, 'spec{}'.format(spec_num), 'tltl') if not os.path.exists(logdir): os.makedirs(logdir) save_log_info(log_info, itno, logdir) # save_object('policy', policy, itno, logdir) # Print rollout and performance print_performance(env, policy) if render: rollout = get_rollout(env, policy, True)
BantorSchwanzVor/plotscanner-leak
net/minecraft/client/renderer/entity/RenderGiantZombie.java
package net.minecraft.client.renderer.entity; import net.minecraft.client.model.ModelBase; import net.minecraft.client.model.ModelZombie; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.entity.layers.LayerBipedArmor; import net.minecraft.client.renderer.entity.layers.LayerHeldItem; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.monster.EntityGiantZombie; import net.minecraft.util.ResourceLocation; public class RenderGiantZombie extends RenderLiving<EntityGiantZombie> { private static final ResourceLocation ZOMBIE_TEXTURES = new ResourceLocation("textures/entity/zombie/zombie.png"); private final float scale; public RenderGiantZombie(RenderManager p_i47206_1_, float p_i47206_2_) { super(p_i47206_1_, (ModelBase)new ModelZombie(), 0.5F * p_i47206_2_); this.scale = p_i47206_2_; addLayer(new LayerHeldItem(this)); addLayer(new LayerBipedArmor(this) { protected void initArmor() { this.modelLeggings = (ModelBase)new ModelZombie(0.5F, true); this.modelArmor = (ModelBase)new ModelZombie(1.0F, true); } }); } public void transformHeldFull3DItemLayer() { GlStateManager.translate(0.0F, 0.1875F, 0.0F); } protected void preRenderCallback(EntityGiantZombie entitylivingbaseIn, float partialTickTime) { GlStateManager.scale(this.scale, this.scale, this.scale); } protected ResourceLocation getEntityTexture(EntityGiantZombie entity) { return ZOMBIE_TEXTURES; } } /* Location: C:\Users\BSV\AppData\Local\Temp\Rar$DRa6216.20396\Preview\Preview.jar!\net\minecraft\client\renderer\entity\RenderGiantZombie.class * Java compiler version: 8 (52.0) * JD-Core Version: 1.1.3 */
NickUfer/deep-assoc-completion
src/org/klesun/deep_assoc_completion/entry/DeepEnterHandler.java
package org.klesun.deep_assoc_completion.entry; import com.intellij.codeInsight.editorActions.enter.EnterHandlerDelegate; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.editor.CaretModel; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.actionSystem.EditorActionHandler; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiFile; import com.intellij.psi.util.PsiTreeUtil; import com.jetbrains.php.lang.documentation.phpdoc.psi.PhpDocToken; import com.jetbrains.php.lang.documentation.phpdoc.psi.tags.PhpDocTag; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.klesun.lang.Tls; import static org.klesun.lang.Lang.opt; public class DeepEnterHandler implements EnterHandlerDelegate { @Override public Result preprocessEnter(@NotNull PsiFile psiFile, @NotNull Editor editor, @NotNull Ref<Integer> ref, @NotNull Ref<Integer> ref1, @NotNull DataContext dataContext, @Nullable EditorActionHandler editorActionHandler) { CaretModel caret = editor.getCaretModel(); DocWrapper docWr = new DocWrapper(editor.getDocument()); int pos = caret.getOffset(); String ch = docWr.sub(pos - 1, pos); String nextCh = docWr.sub(pos, pos + 1); String prefix = docWr.sub(pos - 100, pos); String postfix = docWr.sub(pos, pos + 100); if (ch.equals("[")) { return opt(PsiTreeUtil.findElementOfClassAtOffset(psiFile, pos - 1, PhpDocTag.class, false)) .flt(tag -> tag.getTagValue().matches("(?s)\\s*=.*")) .fop(tag -> Tls.regex("[\\s\\S]*\\n(\\s*\\*\\s*).*", prefix, 0) .fop(match -> match.gat(0)) .map(baseIndent -> { String insertion = "\n" + baseIndent + " "; docWr.doc.insertString(pos, insertion); int newPos = pos + insertion.length(); caret.moveToOffset(newPos); if (nextCh.equals("]")) { docWr.doc.insertString(newPos, "\n" + baseIndent); } else if (tag.getTagValue().matches("(?s).*\\[\\s*")) { docWr.doc.insertString(newPos, "\n" + baseIndent + "]"); } return Result.Stop; })) .def(Result.Continue); } else if (ch.equals(",")) { return opt(PsiTreeUtil.findElementOfClassAtOffset(psiFile, pos - 1, PhpDocToken.class, false)) .flt(token -> Tls.findPrevSibling(token, PhpDocTag.class) .flt(tag -> tag.getTagValue().matches("(?s)\\s*=.*") || tag.getText().matches("(?s).*array\\{.*") ).has()) .fop(token -> Tls.regex("[\\s\\S]*\\n(\\s*\\*\\s*).*", prefix, 0)) .fop(match -> match.gat(0)) .map(baseIndent -> { String insertion = "\n" + baseIndent; docWr.doc.insertString(pos, insertion); int newPos = pos + insertion.length(); caret.moveToOffset(newPos); return Result.Stop; }) .def(Result.Continue); } else if (ch.equals("{")) { return opt(PsiTreeUtil.findElementOfClassAtOffset(psiFile, pos - 1, PhpDocTag.class, false)) .fop(tag -> Tls.regex("[\\s\\S]*\\n(\\s*\\*\\s*).*array\\{", prefix, 0)) .fop(match -> match.gat(0)) .map(baseIndent -> { String insertion = "\n" + baseIndent + " "; docWr.doc.insertString(pos, insertion); int newPos = pos + insertion.length(); caret.moveToOffset(newPos); if (nextCh.equals("}")) { docWr.doc.insertString(newPos, "\n" + baseIndent); } else if (postfix.matches("(?s)\\s*([$\\n]).*")) { docWr.doc.insertString(newPos, "\n" + baseIndent + "}"); } return Result.Stop; }) .def(Result.Continue); } else { return Result.Continue; } } @Override public Result postProcessEnter(@NotNull PsiFile psiFile, @NotNull Editor editor, @NotNull DataContext dataContext) { return Result.Continue; } /** * a convenient wrapper for the Document to deal with * text range bounds and more simple signatures */ static class DocWrapper { final public Document doc; public DocWrapper(Document doc) { this.doc = doc; } /** @param end - exclusive */ public String sub(int start, int end) { start = Math.min(doc.getTextLength(), Math.max(0, start)); end = Math.min(doc.getTextLength(), Math.max(0, end)); return doc.getText(new TextRange(start, end)); } } }
martonantoni/pixie
pixie/PixieObject/PixieObjectAlongPathBlender.h
#pragma once /* class cAlongPathPixieObjectBlender: public cPixieObjectAnimator { public: using cPath = std::vector<std::pair<int, cPixieObject::cPropertyValues>>; struct cRequest { unsigned int mStartTime = gFrameTime; unsigned int mAffectedProperties; cPath mPath; // time, target bool mKeepObjectAlive=true; cRequest() {} cRequest(unsigned int AffectedProperties, cPath path): mAffectedProperties(AffectedProperties), mPath(std::move(path)) {} }; private: cRequest mRequest; cPixieObject::cPropertyValues mStartValues; virtual eAnimateResult Animate(cPixieObject& Object) override; virtual void Activated(cPixieObject& Object) override; public: cAlongPathPixieObjectBlender(const cRequest &Request); virtual ~cAlongPathPixieObjectBlender()=default; static void BlendObject(cPixieObject &Object, cPath path, unsigned int AffectedProperties, bool KeepObjectAlive=true); }; #define BLEND_SPRITE_FUNCTION(FunctionNameExtension,AffectedProperties) \ inline void BlendObject##FunctionNameExtension_AlongPath(cPixieObject &Sprite, cAlongPathPixieObjectBlender::cPath path) \ { \ cAlongPathPixieObjectBlender::BlendObject(Sprite,std::move(path),AffectedProperties); \ } \ inline void BlendObject##FunctionNameExtension##_AlongPath_NoKeepAlive(cPixieObject &Sprite, cAlongPathPixieObjectBlender::cPath path) \ { \ cAlongPathPixieObjectBlender::BlendObject(Sprite,std::move(path),AffectedProperties,false); \ } BLEND_SPRITE_FUNCTION(Position, cPixieObject::Property_Position); BLEND_SPRITE_FUNCTION(PositionOffset, cPixieObject::Property_PositionOffset); BLEND_SPRITE_FUNCTION(Size, cPixieObject::Property_Size); BLEND_SPRITE_FUNCTION(Rect, cPixieObject::Property_Rect); BLEND_SPRITE_FUNCTION(CenterAndHSize, cPixieObject::Property_CenterAndHSize); BLEND_SPRITE_FUNCTION(Alpha, cPixieObject::Property_Alpha); BLEND_SPRITE_FUNCTION(RGBColor, cPixieObject::Property_Color); BLEND_SPRITE_FUNCTION(Center, cPixieObject::Property_Center); */
stjordanis/ViDi
disasm/include/TargetValue.h
#pragma once #include <bearparser/core.h> namespace minidis { enum target_state { TS_NOT_ADDR, TS_VIRTUAL_ONLY, //cannot be converted to raw TS_VALID, COUNT_TS }; class TargetValue { public: TargetValue() : m_targetAddrType(Executable::NOT_ADDR), m_targetOpNum(0), m_isImm(false), m_targetAddr(INVALID_ADDR), m_targetRaw(INVALID_ADDR) {} virtual ~TargetValue() {} Executable::addr_type getAddrType() const {return m_targetAddrType; } offset_t getTargetAddr() const { return m_targetAddr; } offset_t getTargetRaw() const { return m_targetRaw; } //converted size_t getOpNum() const { return m_targetOpNum; } bool isImm() const { return m_isImm; } target_state getState() { if (m_targetAddr == INVALID_ADDR) return TS_NOT_ADDR; if (m_targetRaw == INVALID_ADDR) return TS_VIRTUAL_ONLY; return TS_VALID; } void setValues(size_t targetOpNum, Executable::addr_type targetAddrType, offset_t targetAddr, offset_t targetRaw, bool isImm = false) { m_targetAddrType = targetAddrType; //should be VA or RVA m_targetAddr = targetAddr; m_targetRaw = targetRaw; m_targetOpNum = targetOpNum; m_isImm = isImm; } // values: Executable::addr_type m_targetAddrType; // original type offset_t m_targetAddr; //original addr offset_t m_targetRaw; size_t m_targetOpNum; bool m_isImm; }; //--- }; // namespace minidis
brettonw/Bedrock3
libraries/bag/src/main/java/com/brettonw/bedrock/bag/formats/ObjectFormatReader.java
<reponame>brettonw/Bedrock3<filename>libraries/bag/src/main/java/com/brettonw/bedrock/bag/formats/ObjectFormatReader.java<gh_stars>1-10 package com.brettonw.bedrock.bag.formats; import com.brettonw.bedrock.bag.BagObject; public interface ObjectFormatReader { BagObject readBagObject (); }
lenoch/tagsetbench
create_model.py
#!/usr/bin/env python3 import doctest from subprocess import run import sys import rftagger from tagsetbench import ShellPath, read_args def create_model(): args = { 'training-corpus': ShellPath(), 'rftagger-lexicon': ShellPath(), 'rftagger-possible-unknown-tags': ShellPath(), # 'temporary-corpus': ShellPath(), # mám symlinky v pracovním adresáři 'rftagger-wordclass-automaton': ShellPath( 'RFTagger/wordclass/wordclass.txt'), 'tagger': rftagger.NAME, # DONE: # Marek používal kontext -c 8, výchozí je -c 2 a -c 20 už nepomohl, ale # když si to pěkně zautomatizuju (./configure i make v cyklu, pokaždé # buď získávat výsledky anebo je ukládat mimo pracování adresář anebo # to dávat do různých pracovních adresářů), můžu přijít na nejvhodnější 'rftagger-context-length': 5, 'rftagger-verbose-training-log': False, 'model': ShellPath(), 'training-log': ShellPath(), } args = read_args(sys.argv, args) # TODO: spíš vyhodit výjimku (a doctest dělat na požádání, explicitně) if not args['training-corpus'].name and not args['model'].name: doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE) return if args['tagger'] == rftagger.NAME: cmd = [ 'rft-train', # PATH=/RFTagger/is/here:$PATH args['training-corpus'], # corpus args['rftagger-wordclass-automaton'], args['model'], # parfile # "The n preceding tags are used as context (default 2)." '-c', str(args['rftagger-context-length']), # TODO: args['rftagger-verbose-training-log'] # "The verbose mode is turned on." '-v', # Vypadá to, že to s -vv (very verbose mode) padá. ] if args['rftagger-lexicon'].is_file(): # "Additional lexicon entries are supplied in file f." cmd += ['-l', args['rftagger-lexicon']] # TODO: pro tu srandu a porovnání, co takhle opravdu zkusit rozdíl # v úspěšnosti mezi čistým modelem a modelem rozšířeným o lexikon # opět jen z trénovacího korpusu? Podle mě by to mělo dopadnout # skoro stejně. if args['rftagger-possible-unknown-tags'].is_file(): # "The possible POS tags of unknown words are restricted to those # listed in file f." # Tagy otevřených slovních kategorií (omezení pouze na ně) # pomocí -o, ale nejdřív bych tam musel dát i kA. cmd += ['-o', args['rftagger-possible-unknown-tags']] # Slovník, který nejspíš omezí možnost (pozitivního) hádání (aspoň # něco místo k?), protože nemám zatím jiný zdroj, ale taky # (negativního) hádání (k1 místo k5) se zpřístupní pomocí -o # slovnik.vert (tagy a lemmata přehozeně) cmd = [str(arg) for arg in cmd] # TODO: vypisuje na stderr chyby do logu, takže si jich můžu tady # všimnout, na to možná bude lepší vyčítat stderr ve smyčce with args['training-log'].open('w') as stderr: completed_process = run(cmd, stderr=stderr, check=True, universal_newlines=True) print(completed_process) else: raise NotImplementedError('Tagger "{}" not supported.'.format( args['tagger'])) if __name__ == '__main__': create_model()
shanghaolong04/Cpp-CS2040C
Wk-1_chopin.cpp
#include <bits/stdc++.h> using namespace std; int main() { ios_base::sync_with_stdio(false); cin.tie(NULL); int i = 1; string str1, str2; char c, d; c = cin.peek(); while (c != EOF) { cin >> str1 >> str2; cin.get(); if (str1.length() == 1) cout << "Case " << i << ": UNIQUE \n"; else { stringstream(str1) >> c >> d; if (d == '#') { if (c == 'G') c = 'A'; //G becomes A else c++; //next alphabet d = 'b'; } else if (d == 'b') { if (c == 'A') c = 'G'; //A becomes G else c--; //previous alphabet d = '#'; } cout << "Case " << i << ": " << c << d << " " << str2 << '\n'; } i++; c = cin.peek(); } }
jeremykid/FunAlgorithm
leetcode/252_Meeting_Rooms_Easy.py
class Solution: def canAttendMeetings(self, intervals: List[List[int]]) -> bool: if len(intervals) == 0: return True intervals.sort(key=self.sortbybegin) for i in range(len(intervals) - 1): if intervals[i][1] > intervals[i+1][0]: return False return True def sortbybegin(self, element): return element[0]
AndroidSourceAnalysise/ns-api
src/main/java/com/ns/tld/job/TwitterMonthJob.java
/** * project name: ns-api * file name:OrderConfirmJob * package name:com.ns.tld.job * date:2018-04-27 14:53 * author: wq * Copyright (c) CD Technology Co.,Ltd. All rights reserved. */ package com.ns.tld.job; import java.util.Map; import org.quartz.Job; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; import com.jfinal.plugin.activerecord.Db; import com.ns.common.quartzplugin.Scheduled; import com.ns.common.utils.DateUtil; import com.ns.tld.service.TldTwitterService; /** * 推客信息表月度定时任务. * 1. 备份表 * 2. 清空月度销售额 * **/ @Scheduled(cron = "0 5 0 1 * ?") public class TwitterMonthJob implements Job { static TldTwitterService twitterService = TldTwitterService.me; @Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { backUpTwitterTable(); cleanMonthScale(); } private void backUpTwitterTable() { String lastMonthStr = DateUtil.getMonthStr(-1,0,"yyyy_MM"); String bak_table_name = "tld_twitter".concat("_").concat(lastMonthStr); Db.update("create table " + bak_table_name + " as select * from tld_twitter"); } private void cleanMonthScale() { Db.update("update tld_twitter set month_sale = 0 "); } }
coreyjwhite/phetch
src/components/layout/Content.js
<filename>src/components/layout/Content.js /** * @module Content * @category Layout * @description A flex column component be passed to * {@link module:Layout|<Layout>} */ import PropTypes from "prop-types"; import styled from "styled-components"; import camelize from "libs/camelize"; import m from "styles/measures"; const StyledDiv = styled.div.attrs(props => ({ id: `${camelize(props.pageTitle)}PageContentContainer`, className: "contentContainer" }))` display: flex; width: 100%; height: fit-content; flex-flow: row wrap; align-self: flex-start; @media (min-width: ${m.devMd}) { padding: ${m.sp4}; } `; /** * React function component * * @param props * @param {string} props.pageTitle - HTML <title> element * @param {object} props.children - Content components */ export default function Content(props) { return <StyledDiv {...props}>{props.children}</StyledDiv>; } Content.propTypes = { children: PropTypes.oneOfType([PropTypes.element, PropTypes.array]), pageTitle: PropTypes.string };
MasherJames/GetEasyKk
client/src/components/Register/index.js
<reponame>MasherJames/GetEasyKk<filename>client/src/components/Register/index.js import React, { Component } from "react"; import { Link } from "react-router-dom"; import PropTypes from "prop-types"; import { connect } from "react-redux"; import { registerRequestAction } from "../../actions/registerActions"; import InputField from "../common/InputField"; import "./register.scss"; class Register extends Component { constructor(props) { super(props); this.state = { username: "", email: "", password: "", confirmPassword: "", errors: {} }; } handleChange = e => { this.setState({ [e.target.name]: e.target.value }); }; handleSubmit = e => { e.preventDefault(); const newUser = { email: this.state.email, username: this.state.username, password: <PASSWORD>, confirmPassword: <PASSWORD> }; this.props.registerRequestAction(newUser); }; componentDidMount() { if (this.props.isAuthenticated) { this.props.history.push("/dashboard"); } } componentWillReceiveProps(nextProps) { if (nextProps.errors) { this.setState({ errors: nextProps.errors }); } } render() { const { errors } = this.state; return ( <div className="container"> <div className="register"> <h2 className="register-head">Sign Up</h2> <Link to="/login" className="have-account"> Already have an account ? </Link> <form onSubmit={this.handleSubmit} className="register-form"> <InputField name="email" placeholder="Email" value={this.state.email} error={errors.email} type="email" onChange={this.handleChange} /> <InputField name="username" placeholder="Username" value={this.state.username} error={errors.username} onChange={this.handleChange} /> <InputField name="password" type="password" placeholder="Password" value={this.state.password} error={errors.password} onChange={this.handleChange} /> <InputField name="confirmPassword" type="password" placeholder="<PASSWORD>Password" value={this.state.confirmPassword} error={errors.confirmPassword} onChange={this.handleChange} /> <button type="submit" className="btn"> Sign up </button> </form> </div> </div> ); } } const mapStateToProps = state => ({ errors: state.registerUser.errors, success: state.registerUser.success, payload: state.registerUser.payload, signingUp: state.registerUser.signingUp, isAuthenticated: state.loginUser.isAuthenticated }); Register.propTypes = { registerRequestAction: PropTypes.func.isRequired, signingUp: PropTypes.bool.isRequired, success: PropTypes.string.isRequired, errors: PropTypes.object.isRequired }; export default connect( mapStateToProps, { registerRequestAction } )(Register);
schurerlab/LINCSDataPortal_2.0
UI-Code/src/app/pages/SigDatasets.js
import React from "react"; import {Grid, Col, Row} from 'react-bootstrap'; import axios from 'axios'; import DatasetSearch from '../components/datasets/DatasetSearch'; import DatasetsTable from '../components/datasets/DatasetsTable'; import Datasets from '../components/datasets/Datasets'; export const SigDatasets = (props) => { return ( <div > <Datasets/> </div> ) }
robin-aws/aws-encryption-sdk-c
verification/cbmc/proofs/aws_cryptosdk_default_cmm_set_alg_id/aws_cryptosdk_default_cmm_set_alg_id_harness.c
<gh_stars>10-100 /* * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not * use * this file except in compliance with the License. A copy of the License is * located at * * http://aws.amazon.com/apache2.0/ * * or in the "license" file accompanying this file. This file is distributed on * an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or * implied. See the License for the specific language governing permissions and * limitations under the License. */ #include <aws/cryptosdk/default_cmm.h> #include <aws/cryptosdk/materials.h> #include <make_common_data_structures.h> void aws_cryptosdk_default_cmm_set_alg_id_harness() { /* Nondet input */ enum aws_cryptosdk_alg_id alg_id; struct aws_cryptosdk_keyring *keyring = malloc(sizeof(*keyring)); const struct aws_cryptosdk_keyring_vt vtable = { .vt_size = sizeof(struct aws_cryptosdk_keyring_vt), .name = ensure_c_str_is_allocated(SIZE_MAX), .destroy = nondet_voidp(), .on_encrypt = nondet_voidp(), .on_decrypt = nondet_voidp() }; ensure_cryptosdk_keyring_has_allocated_members(keyring, &vtable); __CPROVER_assume(aws_cryptosdk_keyring_is_valid(keyring)); /* Instantiate the default (non-caching) implementation of the Crypto MaterialsManager (CMM) */ struct aws_cryptosdk_cmm *cmm = aws_cryptosdk_default_cmm_new(can_fail_allocator(), keyring); /* Assumptions */ __CPROVER_assume(cmm != NULL); /* Operation under verification */ aws_cryptosdk_default_cmm_set_alg_id(cmm, alg_id); /* Post-conditions */ assert(aws_cryptosdk_cmm_base_is_valid(cmm)); }
dgeibi/the-little-cipher
src/createHistory.js
<filename>src/createHistory.js export default pathname => { if (process.env.SSR) { const { createMemoryHistory } = require('history') return createMemoryHistory({ initialEntries: [pathname], }) } const { createBrowserHistory, createHashHistory } = require('history') const supportHistoryAPI = window.history && window.history.pushState return (supportHistoryAPI ? createBrowserHistory : createHashHistory)() }
fishjar/swagger-go
public/ipc.js
const path = require("path"); const fs = require("fs"); const { ipcMain, dialog } = require("electron"); const downloadRepo = require("download-git-repo"); const ejs = require("ejs"); const prettier = require("prettier"); const archiver = require("archiver"); const fse = require("fs-extra"); /** * 监听打开文件 */ ipcMain.on("open-file-dialog", event => { dialog.showOpenDialog( { title: "请选择一个文件", properties: ["openFile"], filters: [ { name: "Swagger Files", extensions: ["yaml", "yml", "json"] }, { name: "All Files", extensions: ["*"] }, ], }, filePaths => { if (filePaths) { event.sender.send("open-file-ok", filePaths); } else { event.sender.send("open-file-err"); } } ); }); /** * 监听打开文件夹 */ ipcMain.on("open-path-dialog", event => { dialog.showOpenDialog( { title: "请选择一个文件", properties: ["openDirectory"], }, filePaths => { if (filePaths) { event.sender.send("open-path-ok", filePaths); } else { event.sender.send("open-path-err"); } } ); }); /** * 监听保存文件 */ ipcMain.on("save-file-dialog", event => { dialog.showSaveDialog( { title: "保存文件", defaultPath: "swagger.yaml", filters: [ { name: "Swagger Files", extensions: ["yaml", "yml", "json"] }, { name: "All Files", extensions: ["*"] }, ], }, filePath => { if (filePath) { event.sender.send("save-file-ok", filePath); } else { event.sender.send("save-file-err"); } } ); }); /** * 监听读取文件 */ ipcMain.on("read-file", (event, filePath) => { fs.readFile(filePath, (err, data) => { if (err) { event.sender.send("read-file-err", err); } else { event.sender.send("read-file-ok", data); } }); }); /** * 监听写入文件 */ ipcMain.on("write-file", (event, filePath, data) => { fs.writeFile(filePath, data, err => { if (err) { event.sender.send("write-file-err", err); } else { event.sender.send("write-file-ok"); } }); }); /** * 监听读取默认数据 */ ipcMain.on("read-default-data", event => { const filePath = path.join(__dirname, "swagger.yaml"); fs.readFile(filePath, "utf8", (err, data) => { if (err) { event.sender.send("read-default-data-err", err); } else { event.sender.send("read-default-data-ok", data); } }); }); /** * 监听读取README */ ipcMain.on("read-readme", event => { const filePath = path.join(__dirname, "README.md"); fs.readFile(filePath, "utf8", (err, data) => { if (err) { event.sender.send("read-readme-err", err); } else { event.sender.send("read-readme-ok", data); } }); }); /** * 监听读取缓存数据 */ ipcMain.on("read-cache", event => { const filePath = path.join(__dirname, "cache.yaml"); fs.readFile(filePath, "utf8", (err, data) => { if (err) { event.sender.send("read-cache-err", err); } else { event.sender.send("read-cache-ok", data); } }); }); /** * 监听写入缓存 */ ipcMain.on("write-cache", (event, data) => { const filePath = path.join(__dirname, "cache.yaml"); fs.writeFile(filePath, data, err => { if (err) { event.sender.send("write-cache-err", err); } else { event.sender.send("write-cache-ok"); } }); }); /** * 下载样板文件 */ ipcMain.on( "download-boilerplate", ( event, boilerplateName, repoUrl, repoBranch = "master", targetDir = "download" ) => { const localDir = path.join(__dirname, targetDir, boilerplateName); fse .emptyDir(localDir) .then(() => { downloadRepo(`${repoUrl}#${repoBranch}`, localDir, err => { if (err) { event.sender.send("download-boilerplate-err", err); } else { event.sender.send("download-boilerplate-ok"); } }); }) .catch(err => { console.log(err); event.sender.send( "download-boilerplate-err", new Error("清空文件夹出错") ); }); } ); /** * 生成样板文件 */ ipcMain.on( "generate-boilerplate", ( event, boilerplateName, { definitions, dataFormats, sourceType = "defaultLocal", sourceDir, yamlData, associations = [], } ) => { const modelKeys = Object.keys(definitions).filter( key => definitions[key]["x-isModel"] ); // 默认样板 if (sourceType === "defaultLocal") { sourceDir = path.join(__dirname, "boilerplate", boilerplateName); } // 在线下载的样板 if (sourceType === "defaultOnline" || sourceType === "customOnline") { sourceDir = path.join(__dirname, "download", boilerplateName); } if (!sourceDir) { event.sender.send( "generate-boilerplate-err", new Error("缺少样本来源目录") ); return; } const tmpDir = path.join(__dirname, "tmp", boilerplateName); const swaggerConfig = require(path.join( sourceDir, "swagger", "config.json" )); const { globalFiles = [], modelFiles = [], replaceFiles = [], modelReplaceFiles = [], removeFiles = [], boilerplateLanguage, templateEngine, modelFilesCase = "default", } = swaggerConfig; /** * 格式化文件 * @param {*} str */ const strFormat = str => { try { if ( ["js", "javascript", "node", "nodejs"].includes(boilerplateLanguage) ) { str = prettier.format(str, { semi: true, trailingComma: "es5", parser: "babel", }); } } catch (err) { console.log("格式化失败"); console.log(err); } return str; }; /** * 渲染模板 * @param {*} templateFile * @param {*} data */ const renderPromise = (templateFile, data) => { return new Promise((resolve, reject) => { if (templateEngine === "ejs") { ejs.renderFile(templateFile, data, function(err, str) { if (err) { console.log("渲染失败"); console.log(err); reject(err); } else { // console.log(str); resolve(strFormat(str)); } }); } else { throw new Error(`不支持的模板引擎: ${templateEngine}`); } }); }; /** * 删除 -> 渲染 -> 生成 * @param {*} templateFile * @param {*} outFile * @param {*} data */ const doPromise = (templateFile, outFile, data) => { return fse .remove(outFile) .then(() => { console.log("删除成功", outFile); return renderPromise(templateFile, data); }) .then(fileStr => { console.log("渲染成功", outFile); return fse.outputFile(outFile, fileStr); }); }; fse .emptyDir(tmpDir) .then(() => { console.log("已清空文件夹", tmpDir); return fse.copy(sourceDir, tmpDir); }) .then(() => { console.log("拷贝文件夹成功"); return Promise.all( removeFiles.map(item => { const rmFile = path.join(tmpDir, item); console.log(`删除 ${rmFile}`); return fse.remove(rmFile); }) ); }) .then(() => { console.log("删除文件成功"); return Promise.all( replaceFiles.map(item => { const placeFile = path.join(tmpDir, item[0]); const outFile = path.join(tmpDir, item[1]); console.log(`替换文件 ${outFile}`); return fse.copy(placeFile, outFile); }) ); }) .then(() => { console.log("拷贝替换文件成功"); return fse.outputFile( path.join(tmpDir, "swagger", "swagger.yaml"), yamlData ); }) .then(() => { console.log("生成swagger文件成功"); return Promise.all( globalFiles.map(item => { const inFile = path.join(tmpDir, item[0]); const outFile = path.join(tmpDir, item[1]); return doPromise(inFile, outFile, { definitions, dataFormats, associations, }); }) ); }) .then(() => { console.log("生成全局文件成功"); const tasks = []; modelKeys.forEach(key => { let pathKey = key; if (modelFilesCase === "lower") { pathKey = pathKey.toLowerCase(); } else if (modelFilesCase === "plural") { pathKey = definitions[key]["x-plural"]; } else if (modelFilesCase === "pluralLower") { pathKey = definitions[key]["x-plural"].toLowerCase(); } modelFiles.forEach(item => { const inFile = path.join(tmpDir, item[0]); const outFile = path.join(tmpDir, item[1].replace("*", pathKey)); tasks.push( doPromise(inFile, outFile, { modelKey: key, model: definitions[key], definitions, dataFormats, associations, }) ); }); modelReplaceFiles.forEach(item => { const placeFile = path.join(tmpDir, item[0]); const outFile = path.join(tmpDir, item[1].replace("*", pathKey)); tasks.push(fse.copy(placeFile, outFile)); }); }); return Promise.all(tasks); }) .then(() => { console.log("生成模型文件成功"); event.sender.send("generate-boilerplate-ok"); }) .catch(err => { console.log(err); event.sender.send("generate-boilerplate-err", err); }); } ); /** * 打包样板文件 */ ipcMain.on("archiver-boilerplate", (event, boilerplateName, outDir) => { const tmpDir = path.join(__dirname, "tmp", boilerplateName); try { var output = fs.createWriteStream( path.join(outDir, `${boilerplateName}.zip`) ); var archive = archiver("zip", { zlib: { level: 9 }, }); output.on("close", function() { console.log(archive.pointer() + " total bytes"); console.log("打包成功"); event.sender.send("archiver-boilerplate-ok"); }); output.on("end", function() { console.log("Data has been drained"); }); archive.on("warning", function(err) { console.log("警告", err); event.sender.send("archiver-boilerplate-err", err); if (err.code === "ENOENT") { // } else { // } }); archive.on("error", function(err) { console.log("打包失败"); event.sender.send("archiver-boilerplate-err", err); }); archive.pipe(output); archive.directory(tmpDir, boilerplateName); archive.finalize(); } catch (err) { console.log("打包错误"); console.log(err); event.sender.send("archiver-boilerplate-err", err); } }); /** * 拷贝样板文件 */ ipcMain.on("copy-boilerplate", (event, boilerplateName, outDir) => { const tmpDir = path.join(__dirname, "tmp", boilerplateName); fse.copy(tmpDir, path.join(outDir, boilerplateName), err => { if (err) { console.log("拷贝错误"); event.sender.send("copy-boilerplate-err", err); } else { event.sender.send("copy-boilerplate-ok"); } }); }); /** * 判断文件/文件夹是否存在 */ ipcMain.on("path-exists", (event, pathStrs) => { const outPath = path.join(...pathStrs); fse.pathExists(outPath, (err, exists) => { if (err) { console.log(err); // => null event.sender.send("path-exists-err", err); } else { event.sender.send("path-exists-ok", exists, outPath); } }); }); /** * 清除缓存文件 */ ipcMain.on("clear-cache-path", event => { const tmpDir = path.join(__dirname, "tmp"); const downloadDir = path.join(__dirname, "download"); fse .emptyDir(tmpDir) .then(() => fse.emptyDir(downloadDir)) .then(() => { event.sender.send("clear-cache-path-ok"); }) .catch(err => { console.log("清空临时文件夹错误"); event.sender.send("clear-cache-path-err", err); }); });
cryst-al/novoline
src/net/acs.java
package net; import com.viaversion.viaversion.api.protocol.remapper.PacketRemapper; import com.viaversion.viaversion.api.type.Type; import net.aAX; class acs extends PacketRemapper { final aAX c; acs(aAX var1) { this.c = var1; } public void registerMap() { this.a(Type.STRING); this.a(this.c.c()); } }
ivanvtimofeev/tf-operator
pkg/controller/redis/redis_controller.go
<gh_stars>0 package redis import ( "context" "fmt" "time" "github.com/tungstenfabric/tf-operator/pkg/apis/tf/v1alpha1" "github.com/tungstenfabric/tf-operator/pkg/controller/utils" "github.com/tungstenfabric/tf-operator/pkg/k8s" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/types" "k8s.io/client-go/util/workqueue" "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/controller" "sigs.k8s.io/controller-runtime/pkg/event" "sigs.k8s.io/controller-runtime/pkg/handler" "sigs.k8s.io/controller-runtime/pkg/manager" "sigs.k8s.io/controller-runtime/pkg/reconcile" "sigs.k8s.io/controller-runtime/pkg/source" appsv1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logf "sigs.k8s.io/controller-runtime/pkg/log" ) var log = logf.Log.WithName("controller_redis") var restartTime, _ = time.ParseDuration("3s") var requeueReconcile = reconcile.Result{Requeue: true, RequeueAfter: restartTime} func resourceHandler(myclient client.Client) handler.Funcs { appHandler := handler.Funcs{ CreateFunc: func(e event.CreateEvent, q workqueue.RateLimitingInterface) { listOps := &client.ListOptions{Namespace: e.Meta.GetNamespace()} list := &v1alpha1.RedisList{} err := myclient.List(context.TODO(), list, listOps) if err == nil { for _, app := range list.Items { q.Add(reconcile.Request{NamespacedName: types.NamespacedName{ Name: app.GetName(), Namespace: e.Meta.GetNamespace(), }}) } } }, UpdateFunc: func(e event.UpdateEvent, q workqueue.RateLimitingInterface) { listOps := &client.ListOptions{Namespace: e.MetaNew.GetNamespace()} list := &v1alpha1.RedisList{} err := myclient.List(context.TODO(), list, listOps) if err == nil { for _, app := range list.Items { q.Add(reconcile.Request{NamespacedName: types.NamespacedName{ Name: app.GetName(), Namespace: e.MetaNew.GetNamespace(), }}) } } }, DeleteFunc: func(e event.DeleteEvent, q workqueue.RateLimitingInterface) { listOps := &client.ListOptions{Namespace: e.Meta.GetNamespace()} list := &v1alpha1.RedisList{} err := myclient.List(context.TODO(), list, listOps) if err == nil { for _, app := range list.Items { q.Add(reconcile.Request{NamespacedName: types.NamespacedName{ Name: app.GetName(), Namespace: e.Meta.GetNamespace(), }}) } } }, GenericFunc: func(e event.GenericEvent, q workqueue.RateLimitingInterface) { listOps := &client.ListOptions{Namespace: e.Meta.GetNamespace()} list := &v1alpha1.RedisList{} err := myclient.List(context.TODO(), list, listOps) if err == nil { for _, app := range list.Items { q.Add(reconcile.Request{NamespacedName: types.NamespacedName{ Name: app.GetName(), Namespace: e.Meta.GetNamespace(), }}) } } }, } return appHandler } // Add adds Redis controller to the manager. func Add(mgr manager.Manager) error { return add(mgr, newReconciler(mgr)) } func newReconciler(mgr manager.Manager) reconcile.Reconciler { kubernetes := k8s.New(mgr.GetClient(), mgr.GetScheme()) return &ReconcileRedis{Client: mgr.GetClient(), Scheme: mgr.GetScheme(), Manager: mgr, Kubernetes: kubernetes} } // add adds a new Controller to mgr with r as the reconcile.Reconciler. func add(mgr manager.Manager, r reconcile.Reconciler) error { // Create a new controller. c, err := controller.New("redis-controller", mgr, controller.Options{Reconciler: r}) if err != nil { return err } // Watch for changes to primary resource Redis. if err = c.Watch(&source.Kind{Type: &v1alpha1.Redis{}}, &handler.EnqueueRequestForObject{}); err != nil { return err } ownerHandler := &handler.EnqueueRequestForOwner{ IsController: true, OwnerType: &v1alpha1.Redis{}, } if err = c.Watch(&source.Kind{Type: &corev1.Secret{}}, ownerHandler); err != nil { return err } // Watch for changes to PODs. serviceMap := map[string]string{"tf_manager": "redis"} srcPod := &source.Kind{Type: &corev1.Pod{}} podHandler := resourceHandler(mgr.GetClient()) predPodIPChange := utils.PodIPChange(serviceMap) if err = c.Watch(srcPod, podHandler, predPodIPChange); err != nil { return err } srcSTS := &source.Kind{Type: &appsv1.StatefulSet{}} stsPred := utils.STSStatusChange(utils.RedisGroupKind()) if err = c.Watch(srcSTS, ownerHandler, stsPred); err != nil { return err } return nil } // blank assignment to verify that ReconcileRedis implements reconcile.Reconciler. var _ reconcile.Reconciler = &ReconcileRedis{} // ReconcileRedis reconciles a Redis object. type ReconcileRedis struct { // This client, initialized using mgr.Client() above, is a split client // that reads objects from the cache and writes to the apiserver. Client client.Client Scheme *runtime.Scheme Manager manager.Manager Kubernetes *k8s.Kubernetes } // Reconcile reconciles Redis func (r *ReconcileRedis) Reconcile(request reconcile.Request) (reconcile.Result, error) { reqLogger := log.WithName("Reconcile").WithName(request.Name) reqLogger.Info("Start") instanceType := "redis" // Check ZIU status f, err := v1alpha1.CanReconcile("Redis", r.Client) if err != nil { log.Error(err, "When check redis ziu status") return reconcile.Result{}, err } if !f { log.Info("redis reconcile blocks by ZIU status") return reconcile.Result{Requeue: true, RequeueAfter: v1alpha1.ZiuRestartTime}, nil } instance := &v1alpha1.Redis{} if err := r.Client.Get(context.TODO(), request.NamespacedName, instance); err != nil && errors.IsNotFound(err) { reqLogger.Error(err, "Failed to get redis obj") return reconcile.Result{}, nil } configMapName := request.Name + "-" + instanceType + "-configmap" configMap, err := instance.CreateConfigMap(configMapName, r.Client, r.Scheme, request) if err != nil { reqLogger.Error(err, "Failed to create configmap") return reconcile.Result{}, err } secretCertificates, err := instance.CreateSecret(request.Name+"-secret-certificates", r.Client, r.Scheme, request) if err != nil { reqLogger.Error(err, "Failed to create secret") return reconcile.Result{}, err } statefulSet := GetSTS() if err = instance.PrepareSTS(statefulSet, &instance.Spec.CommonConfiguration, request, r.Scheme); err != nil { reqLogger.Error(err, "Failed to prepare stateful set") return reconcile.Result{}, err } if err = v1alpha1.EnsureServiceAccount(&statefulSet.Spec.Template.Spec, instanceType, instance.Spec.CommonConfiguration.ImagePullSecrets, r.Client, request, r.Scheme, instance); err != nil { return reconcile.Result{}, err } configmapsVolumeName := request.Name + "-" + instanceType + "-volume" instance.AddVolumesToIntendedSTS(statefulSet, map[string]string{ configMapName: configmapsVolumeName, }) v1alpha1.AddCAVolumeToIntendedSTS(statefulSet) instance.AddSecretVolumesToIntendedSTS(statefulSet, map[string]string{secretCertificates.Name: request.Name + "-secret-certificates"}) statefulSet.Spec.Template.Spec.Affinity = &corev1.Affinity{ PodAntiAffinity: &corev1.PodAntiAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: []corev1.PodAffinityTerm{{ LabelSelector: &metav1.LabelSelector{ MatchExpressions: []metav1.LabelSelectorRequirement{{ Key: instanceType, Operator: "In", Values: []string{request.Name}, }}, }, TopologyKey: "kubernetes.io/hostname", }}, }, } utils.CleanupContainers(&statefulSet.Spec.Template.Spec, instance.Spec.ServiceConfiguration.Containers) redisPort := *instance.ConfigurationParameters().RedisPort for idx := range statefulSet.Spec.Template.Spec.Containers { container := &statefulSet.Spec.Template.Spec.Containers[idx] instanceContainer := utils.GetContainerFromList(container.Name, instance.Spec.ServiceConfiguration.Containers) if instanceContainer.Command != nil { container.Command = instanceContainer.Command } container.Image = instanceContainer.Image container.VolumeMounts = append(container.VolumeMounts, corev1.VolumeMount{ Name: request.Name + "-" + instanceType + "-volume", MountPath: "/etc/contrailconfigmaps", }) v1alpha1.AddCertsMounts(request.Name, container) v1alpha1.SetLogLevelEnv(instance.Spec.CommonConfiguration.LogLevel, container) if container.Command == nil { command := []string{"bash", fmt.Sprintf("/etc/contrailconfigmaps/run-%s.sh", container.Name)} container.Command = command } switch container.Name { case "redis": readinessProbe := corev1.Probe{ FailureThreshold: 3, PeriodSeconds: 3, Handler: corev1.Handler{ Exec: &corev1.ExecAction{ Command: []string{"sh", "-c", fmt.Sprintf("redis-cli -h 127.0.0.1 -p %d ping", redisPort)}, }, }, } startupProbe := corev1.Probe{ FailureThreshold: 30, PeriodSeconds: 3, Handler: corev1.Handler{ Exec: &corev1.ExecAction{ Command: []string{"sh", "-c", fmt.Sprintf("redis-cli -h 127.0.0.1 -p %d ping", redisPort)}, }, }, } container.ReadinessProbe = &readinessProbe container.StartupProbe = &startupProbe } } v1alpha1.AddCommonVolumes(&statefulSet.Spec.Template.Spec, instance.Spec.CommonConfiguration) v1alpha1.DefaultSecurityContext(&statefulSet.Spec.Template.Spec) if created, err := v1alpha1.CreateServiceSTS(instance, instanceType, statefulSet, r.Client); err != nil || created { if err != nil { reqLogger.Error(err, "Failed to create the stateful set.") return reconcile.Result{}, err } return requeueReconcile, err } if updated, err := v1alpha1.UpdateServiceSTS(instance, instanceType, statefulSet, false, r.Client); err != nil || updated { if err != nil && !v1alpha1.IsOKForRequeque(err) { reqLogger.Error(err, "Failed to update the stateful set.") return reconcile.Result{}, err } return requeueReconcile, nil } podIPList, podIPMap, err := instance.PodIPListAndIPMapFromInstance(instanceType, request, r.Client) if err != nil { reqLogger.Error(err, "Failed to get pod ip list from instance.") return reconcile.Result{}, err } if updated, err := v1alpha1.UpdatePodsAnnotations(podIPList, r.Client); updated || err != nil { if err != nil && !v1alpha1.IsOKForRequeque(err) { reqLogger.Error(err, "Failed to update pods annotations.") return reconcile.Result{}, err } return requeueReconcile, nil } if len(podIPMap) > 0 { // TODO: Services can be run on masters only, ensure that pods number is if nodes, err := v1alpha1.GetControllerNodes(r.Client); err != nil || len(podIPList) < len(nodes) { // to avoid redundand sts-es reloading configure only as STS pods are ready reqLogger.Error(err, "Not enough pods are ready to generate configs (pods < nodes)", "pods", len(podIPList), "nodes", len(nodes)) return requeueReconcile, err } if err := v1alpha1.EnsureCertificatesExist(instance, podIPList, instanceType, r.Client, r.Scheme); err != nil { reqLogger.Error(err, "Failed to ensure CertificatesExist") return reconcile.Result{}, err } data, err := instance.InstanceConfiguration(podIPList, r.Client) if err != nil { reqLogger.Error(err, "Failed to get config data.") return reconcile.Result{}, err } if err = v1alpha1.UpdateConfigMap(instance, instanceType, data, r.Client); err != nil { reqLogger.Error(err, "Failed to update config map.") return reconcile.Result{}, err } if updated, err := instance.ManageNodeStatus(podIPMap, r.Client); err != nil || updated { if err != nil && !v1alpha1.IsOKForRequeque(err) { reqLogger.Error(err, "Failed to manage node status") return reconcile.Result{}, err } return requeueReconcile, nil } } falseVal := false if instance.Status.ConfigChanged == nil { instance.Status.ConfigChanged = &falseVal } beforeCheck := *instance.Status.ConfigChanged newConfigMap := &corev1.ConfigMap{} if err = r.Client.Get(context.TODO(), types.NamespacedName{Name: configMapName, Namespace: request.Namespace}, newConfigMap); err != nil { reqLogger.Error(err, "Failed to get the config map.") return reconcile.Result{}, err } *instance.Status.ConfigChanged = !v1alpha1.CmpConfigMaps(configMap, newConfigMap) if *instance.Status.ConfigChanged { reqLogger.Info("Update StatefulSet: ConfigChanged") if _, err := v1alpha1.UpdateServiceSTS(instance, instanceType, statefulSet, true, r.Client); err != nil && !v1alpha1.IsOKForRequeque(err) { reqLogger.Error(err, "Update StatefulSet failed") return reconcile.Result{}, err } return requeueReconcile, nil } if beforeCheck != *instance.Status.ConfigChanged { reqLogger.Info("Update Status: ConfigChanged") if err := r.Client.Status().Update(context.TODO(), instance); err != nil && !v1alpha1.IsOKForRequeque(err) { reqLogger.Error(err, "Update Status failed") return reconcile.Result{}, err } return requeueReconcile, nil } instance.Status.Active = new(bool) instance.Status.Degraded = new(bool) if err = instance.SetInstanceActive(r.Client, instance.Status.Active, instance.Status.Degraded, statefulSet, request); err != nil { if v1alpha1.IsOKForRequeque(err) { return requeueReconcile, nil } reqLogger.Error(err, "Failed to set instance active") return reconcile.Result{}, err } reqLogger.Info("Done") return reconcile.Result{}, nil }
valoni/TinyCLR-Ports
Targets/LPC23xx_LPC24xx/LPC24_DA.cpp
<reponame>valoni/TinyCLR-Ports // Copyright Microsoft Corporation // Copyright GHI Electronics, LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "LPC24.h" #define DACR (*(volatile unsigned long *)0xE006C000) #define LPC24_DAC_PRECISION_BITS 10 // Number of Bits in the DAC Convertion #define LPC24_DAC_MAX_VALUE (1<<LPC24_DAC_PRECISION_BITS) /////////////////////////////////////////////////////////////////////////////// #define TOTAL_DAC_CONTROLLERS 1 static TinyCLR_Dac_Controller dacControllers[TOTAL_DAC_CONTROLLERS]; static TinyCLR_Api_Info dacApi[TOTAL_DAC_CONTROLLERS]; static const LPC24_Gpio_Pin dacPins[] = LPC24_DAC_PINS; struct DacState { bool isOpened[SIZEOF_ARRAY(dacPins)]; }; static DacState dacStates[TOTAL_DAC_CONTROLLERS]; const char* dacApiNames[TOTAL_DAC_CONTROLLERS] = { "GHIElectronics.TinyCLR.NativeApis.LPC24.DacController\\0" }; void LPC24_Dac_AddApi(const TinyCLR_Api_Manager* apiManager) { for (int32_t i = 0; i < TOTAL_DAC_CONTROLLERS; i++) { dacControllers[i].ApiInfo = &dacApi[i]; dacControllers[i].Acquire = &LPC24_Dac_Acquire; dacControllers[i].Release = &LPC24_Dac_Release; dacControllers[i].OpenChannel = &LPC24_Dac_OpenChannel; dacControllers[i].CloseChannel = &LPC24_Dac_CloseChannel; dacControllers[i].WriteValue = &LPC24_Dac_WriteValue; dacControllers[i].GetMinValue = &LPC24_Dac_GetMinValue; dacControllers[i].GetMaxValue = &LPC24_Dac_GetMaxValue; dacControllers[i].GetResolutionInBits = &LPC24_Dac_GetResolutionInBits; dacControllers[i].GetChannelCount = &LPC24_Dac_GetChannelCount; dacApi[i].Author = "GHI Electronics, LLC"; dacApi[i].Name = dacApiNames[i]; dacApi[i].Type = TinyCLR_Api_Type::DacController; dacApi[i].Version = 0; dacApi[i].Implementation = &dacControllers[i]; dacApi[i].State = &dacStates[i]; apiManager->Add(apiManager, &dacApi[i]); } apiManager->SetDefaultName(apiManager, TinyCLR_Api_Type::DacController, dacApi[0].Name); } TinyCLR_Result LPC24_Dac_Acquire(const TinyCLR_Dac_Controller* self) { if (self == nullptr) return TinyCLR_Result::ArgumentNull; return TinyCLR_Result::Success; } TinyCLR_Result LPC24_Dac_Release(const TinyCLR_Dac_Controller* self) { if (self == nullptr) return TinyCLR_Result::ArgumentNull; return TinyCLR_Result::Success; } TinyCLR_Result LPC24_Dac_OpenChannel(const TinyCLR_Dac_Controller* self, uint32_t channel) { if (channel >= LPC24_Dac_GetChannelCount(self)) return TinyCLR_Result::ArgumentOutOfRange; if (!LPC24_GpioInternal_OpenPin(dacPins[channel].number)) return TinyCLR_Result::SharingViolation; auto state = reinterpret_cast<DacState*>(self->ApiInfo->State); LPC24_GpioInternal_ConfigurePin(dacPins[channel].number, LPC24_Gpio_Direction::Input, dacPins[channel].pinFunction, LPC24_Gpio_PinMode::Inactive); DACR = (0 << 6); // This sets the initial starting voltage at 0 state->isOpened[channel] = true; return TinyCLR_Result::Success; } TinyCLR_Result LPC24_Dac_CloseChannel(const TinyCLR_Dac_Controller* self, uint32_t channel) { if (channel >= LPC24_Dac_GetChannelCount(self)) return TinyCLR_Result::ArgumentOutOfRange; auto state = reinterpret_cast<DacState*>(self->ApiInfo->State); if (state->isOpened[channel]) LPC24_GpioInternal_ClosePin(dacPins[channel].number); state->isOpened[channel] = false; return TinyCLR_Result::Success; } TinyCLR_Result LPC24_Dac_WriteValue(const TinyCLR_Dac_Controller* self, uint32_t channel, int32_t value) { if (channel >= LPC24_Dac_GetChannelCount(self)) return TinyCLR_Result::ArgumentOutOfRange; if (value > LPC24_DAC_MAX_VALUE) { value = LPC24_DAC_MAX_VALUE; } if (value < 1) { value = 1; } DACR = ((value - 1) << 6); // Sets voltage level between 0 and 1023. return TinyCLR_Result::Success; } uint32_t LPC24_Dac_GetChannelCount(const TinyCLR_Dac_Controller* self) { return SIZEOF_ARRAY(dacPins); } uint32_t LPC24_Dac_GetResolutionInBits(const TinyCLR_Dac_Controller* self) { return LPC24_DAC_PRECISION_BITS; } int32_t LPC24_Dac_GetMinValue(const TinyCLR_Dac_Controller* self) { return 0; } int32_t LPC24_Dac_GetMaxValue(const TinyCLR_Dac_Controller* self) { return ((1 << LPC24_DAC_PRECISION_BITS) - 1); } void LPC24_Dac_Reset() { for (auto c = 0; c < TOTAL_DAC_CONTROLLERS; c++) { for (auto ch = 0; ch < LPC24_Dac_GetChannelCount(&dacControllers[c]); ch++) { LPC24_Dac_CloseChannel(&dacControllers[c], ch); dacStates[c].isOpened[ch] = false; } } }
bensheldon/panlexicon-rails
app/lib/seeder.rb
# frozen_string_literal: true class Seeder def users User.find_or_initialize_by(email: '<EMAIL>') do |user| user.password = 'password' user.skip_confirmation! user.save end end def search_history(total_days = 30) datetime = total_days.days.ago words = Word.order(Arel.sql("RANDOM()")).limit(total_days * 25) raise "No Words" if words.empty? loop do search_record = SearchRecord.create! created_at: datetime search_record.search_records_words.create! [{ word: words.sample, position: 0 }] datetime += 1.hour break if datetime > Time.current end end end
my-devops-info/cloudrail-knowledge
tests/knowledge/rules/aws/non_context_aware/encryption_enforcement_rules/encrypt_at_rest/test_ensure_cloud_watch_log_groups_encrypted_rule.py
import unittest from cloudrail.knowledge.context.aws.cloudwatch.cloud_watch_log_group import CloudWatchLogGroup from cloudrail.knowledge.context.aws.kms.kms_key import KmsKey from cloudrail.knowledge.context.aws.kms.kms_key_manager import KeyManager from cloudrail.knowledge.context.aws.aws_environment_context import AwsEnvironmentContext from cloudrail.knowledge.context.terraform_state import TerraformState from cloudrail.knowledge.rules.aws.non_context_aware.encryption_enforcement_rules.encrypt_at_rest.ensure_cloud_watch_log_groups_encrypted_rule import \ EnsureCloudWatchLogGroupsEncryptedRule from cloudrail.knowledge.rules.base_rule import RuleResultType from cloudrail.dev_tools.rule_test_utils import create_empty_entity class TestEnsureCloudWatchLogGroupsEncryptedRule(unittest.TestCase): def setUp(self): self.rule = EnsureCloudWatchLogGroupsEncryptedRule() def test_not_car_cloudwatch_log_group_encrypted_at_rest_using_kms_cmk__kms_encryption_missing__fail(self): # Arrange cloud_watch_log_group: CloudWatchLogGroup = create_empty_entity(CloudWatchLogGroup) terraform_state = create_empty_entity(TerraformState) cloud_watch_log_group.terraform_state = terraform_state cloud_watch_log_group.terraform_state.is_new = True context = AwsEnvironmentContext(cloud_watch_log_groups=[cloud_watch_log_group]) # Act result = self.rule.run(context, {}) # Assert self.assertEqual(RuleResultType.FAILED, result.status) self.assertEqual(1, len(result.issues)) def test_not_car_cloudwatch_log_group_encrypted_at_rest_using_kms_cmk__kms_key_manager_is_aws__fail(self): # Arrange cloud_watch_log_group: CloudWatchLogGroup = create_empty_entity(CloudWatchLogGroup) terraform_state = create_empty_entity(TerraformState) cloud_watch_log_group.terraform_state = terraform_state cloud_watch_log_group.terraform_state.is_new = True cloud_watch_log_group.kms_encryption = 'kms_encryption' kms_key: KmsKey = create_empty_entity(KmsKey) kms_key.key_manager = KeyManager.AWS cloud_watch_log_group.kms_data = kms_key context = AwsEnvironmentContext(cloud_watch_log_groups=[cloud_watch_log_group]) # Act result = self.rule.run(context, {}) # Assert self.assertEqual(RuleResultType.FAILED, result.status) self.assertEqual(1, len(result.issues)) def test_not_car_cloudwatch_log_group_encrypted_at_rest_using_kms_cmk_pass(self): # Arrange cloud_watch_log_group: CloudWatchLogGroup = create_empty_entity(CloudWatchLogGroup) terraform_state = create_empty_entity(TerraformState) cloud_watch_log_group.terraform_state = terraform_state cloud_watch_log_group.terraform_state.is_new = True cloud_watch_log_group.kms_encryption = 'kms_encryption' kms_key: KmsKey = create_empty_entity(KmsKey) kms_key.key_manager = KeyManager.CUSTOMER cloud_watch_log_group.kms_data = kms_key context = AwsEnvironmentContext(cloud_watch_log_groups=[cloud_watch_log_group]) # Act result = self.rule.run(context, {}) # Assert self.assertEqual(RuleResultType.SUCCESS, result.status) self.assertEqual(0, len(result.issues))
teamapp/google-cloud-ruby
google-cloud-pubsub/test/google/cloud/pubsub/project/topics_test.rb
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require "helper" describe Google::Cloud::PubSub::Project, :topics, :mock_pubsub do let(:topics_with_token) do response = Google::Cloud::PubSub::V1::ListTopicsResponse.new topics_hash(3, "next_page_token") paged_enum_struct response end let(:topics_without_token) do response = Google::Cloud::PubSub::V1::ListTopicsResponse.new topics_hash(2) paged_enum_struct response end let(:topics_with_token_2) do response = Google::Cloud::PubSub::V1::ListTopicsResponse.new topics_hash(3, "second_page_token") paged_enum_struct response end let(:labels) { { "foo" => "bar" } } let(:kms_key) { "projects/a/locations/b/keyRings/c/cryptoKeys/d" } let(:persistence_regions) { ["us-west1", "us-west2"] } let(:schema_name) { "my-schema" } let(:message_encoding) { :JSON } let(:async) do { max_bytes: 2_000_000, max_messages: 200, interval: 0.02, threads: { publish: 3, callback: 5 }, flow_control: { message_limit: 4_000, byte_limit: 40_000_000, limit_exceeded_behavior: :block } } end it "creates a topic" do new_topic_name = "new-topic-#{Time.now.to_i}" create_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(new_topic_name) mock = Minitest::Mock.new mock.expect :create_topic, create_res, [name: topic_path(new_topic_name), labels: nil, kms_key_name: nil, message_storage_policy: nil, schema_settings: nil] pubsub.service.mocked_publisher = mock topic = pubsub.create_topic new_topic_name mock.verify _(topic.name).must_equal topic_path(new_topic_name) _(topic.labels).must_be :empty? _(topic.labels).must_be :frozen? _(topic.kms_key).must_be :empty? _(topic.persistence_regions).must_be :empty? _(topic.schema_name).must_be :nil? _(topic.message_encoding).must_be :nil? _(topic.message_encoding_json?).must_equal false _(topic.message_encoding_binary?).must_equal false end it "creates a topic with fully-qualified topic path" do new_topic_path = "projects/other-project/topics/new-topic-#{Time.now.to_i}" create_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(new_topic_path) mock = Minitest::Mock.new mock.expect :create_topic, create_res, [name: new_topic_path, labels: nil, kms_key_name: nil, message_storage_policy: nil, schema_settings: nil] pubsub.service.mocked_publisher = mock topic = pubsub.create_topic new_topic_path mock.verify _(topic.name).must_equal new_topic_path end it "creates a topic with new_topic_alias" do new_topic_name = "new-topic-#{Time.now.to_i}" create_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(new_topic_name) mock = Minitest::Mock.new mock.expect :create_topic, create_res, [name: topic_path(new_topic_name), labels: nil, kms_key_name: nil, message_storage_policy: nil, schema_settings: nil] pubsub.service.mocked_publisher = mock topic = pubsub.new_topic new_topic_name mock.verify _(topic.name).must_equal topic_path(new_topic_name) _(topic.labels).must_be :empty? _(topic.labels).must_be :frozen? _(topic.kms_key).must_be :empty? _(topic.persistence_regions).must_be :empty? _(topic.schema_name).must_be :nil? _(topic.message_encoding).must_be :nil? _(topic.message_encoding_json?).must_equal false _(topic.message_encoding_binary?).must_equal false end it "creates a topic with labels" do new_topic_name = "new-topic-#{Time.now.to_i}" create_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(new_topic_name, labels: labels) mock = Minitest::Mock.new mock.expect :create_topic, create_res, [name: topic_path(new_topic_name), labels: labels, kms_key_name: nil, message_storage_policy: nil, schema_settings: nil] pubsub.service.mocked_publisher = mock topic = pubsub.create_topic new_topic_name, labels: labels mock.verify _(topic.name).must_equal topic_path(new_topic_name) _(topic.labels).must_equal labels _(topic.labels).must_be :frozen? _(topic.kms_key).must_be :empty? _(topic.persistence_regions).must_be :empty? _(topic.schema_name).must_be :nil? _(topic.message_encoding).must_be :nil? _(topic.message_encoding_json?).must_equal false _(topic.message_encoding_binary?).must_equal false end it "creates a topic with kms_key" do new_topic_name = "new-topic-#{Time.now.to_i}" create_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(new_topic_name, kms_key_name: kms_key) mock = Minitest::Mock.new mock.expect :create_topic, create_res, [name: topic_path(new_topic_name), labels: nil, kms_key_name: kms_key, message_storage_policy: nil, schema_settings: nil] pubsub.service.mocked_publisher = mock topic = pubsub.create_topic new_topic_name, kms_key: kms_key mock.verify _(topic.name).must_equal topic_path(new_topic_name) _(topic.labels).must_be :empty? _(topic.labels).must_be :frozen? _(topic.kms_key).must_equal kms_key _(topic.persistence_regions).must_be :empty? _(topic.schema_name).must_be :nil? _(topic.message_encoding).must_be :nil? _(topic.message_encoding_json?).must_equal false _(topic.message_encoding_binary?).must_equal false end it "creates a topic with persistence_regions" do new_topic_name = "new-topic-#{Time.now.to_i}" create_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(new_topic_name, persistence_regions: persistence_regions) mock = Minitest::Mock.new message_storage_policy = Google::Cloud::PubSub::V1::MessageStoragePolicy.new allowed_persistence_regions: persistence_regions mock.expect :create_topic, create_res, [name: topic_path(new_topic_name), labels: nil, kms_key_name: nil, message_storage_policy: message_storage_policy, schema_settings: nil] pubsub.service.mocked_publisher = mock topic = pubsub.create_topic new_topic_name, persistence_regions: persistence_regions mock.verify _(topic.name).must_equal topic_path(new_topic_name) _(topic.labels).must_be :empty? _(topic.labels).must_be :frozen? _(topic.kms_key).must_be :empty? _(topic.persistence_regions).must_equal persistence_regions _(topic.schema_name).must_be :nil? _(topic.message_encoding).must_be :nil? _(topic.message_encoding_json?).must_equal false _(topic.message_encoding_binary?).must_equal false end it "creates a topic with schema_name and message_encoding" do new_topic_name = "new-topic-#{Time.now.to_i}" schema_settings = Google::Cloud::PubSub::V1::SchemaSettings.new schema: schema_path(schema_name), encoding: message_encoding create_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(new_topic_name) create_res.schema_settings = schema_settings mock = Minitest::Mock.new mock.expect :create_topic, create_res, [name: topic_path(new_topic_name), labels: nil, kms_key_name: nil, message_storage_policy: nil, schema_settings: schema_settings] pubsub.service.mocked_publisher = mock topic = pubsub.create_topic new_topic_name, schema_name: schema_name, message_encoding: message_encoding mock.verify _(topic.name).must_equal topic_path(new_topic_name) _(topic.labels).must_be :empty? _(topic.labels).must_be :frozen? _(topic.kms_key).must_be :empty? _(topic.schema_name).must_equal schema_path(schema_name) _(topic.message_encoding).must_equal message_encoding end it "creates a topic with async option" do new_topic_name = "new-topic-#{Time.now.to_i}" create_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(new_topic_name) mock = Minitest::Mock.new mock.expect :create_topic, create_res, [name: topic_path(new_topic_name), labels: nil, kms_key_name: nil, message_storage_policy: nil, schema_settings: nil] pubsub.service.mocked_publisher = mock topic = pubsub.create_topic new_topic_name, async: async topic.enable_message_ordering! # Create the AsyncPublisher mock.verify _(topic.async_publisher.topic_name).must_equal topic_path(new_topic_name) _(topic.async_publisher.max_bytes).must_equal async[:max_bytes] _(topic.async_publisher.max_messages).must_equal async[:max_messages] _(topic.async_publisher.interval).must_equal async[:interval] _(topic.async_publisher.publish_threads).must_equal async[:threads][:publish] _(topic.async_publisher.callback_threads).must_equal async[:threads][:callback] _(topic.async_publisher.flow_control).must_equal async[:flow_control] end it "raises when creating a topic with schema_name but without message_encoding" do err = expect do topic = pubsub.create_topic "new-topic", schema_name: schema_name end.must_raise ArgumentError _(err.message).must_equal "Schema settings must include both schema_name and message_encoding." end it "raises when creating a topic without schema_name but with message_encoding" do err = expect do topic = pubsub.create_topic "new-topic", message_encoding: message_encoding end.must_raise ArgumentError _(err.message).must_equal "Schema settings must include both schema_name and message_encoding." end it "gets a topic" do topic_name = "found-topic" get_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(topic_name) mock = Minitest::Mock.new mock.expect :get_topic, get_res, [topic: topic_path(topic_name)] pubsub.service.mocked_publisher = mock topic = pubsub.topic topic_name mock.verify _(topic.name).must_equal topic_path(topic_name) _(topic).wont_be :reference? _(topic).must_be :resource? end it "gets a topic with fully-qualified topic path" do topic_full_path = "projects/other-project/topics/found-topic" get_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(topic_full_path) mock = Minitest::Mock.new mock.expect :get_topic, get_res, [topic: topic_path(topic_full_path)] pubsub.service.mocked_publisher = mock topic = pubsub.topic topic_full_path mock.verify _(topic.name).must_equal topic_full_path end it "gets a topic with get_topic alias" do topic_name = "found-topic" get_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(topic_name) mock = Minitest::Mock.new mock.expect :get_topic, get_res, [topic: topic_path(topic_name)] pubsub.service.mocked_publisher = mock topic = pubsub.get_topic topic_name mock.verify _(topic.name).must_equal topic_path(topic_name) _(topic).wont_be :reference? _(topic).must_be :resource? end it "gets a topic with find_topic alias" do topic_name = "found-topic" get_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(topic_name) mock = Minitest::Mock.new mock.expect :get_topic, get_res, [topic: topic_path(topic_name)] pubsub.service.mocked_publisher = mock topic = pubsub.find_topic topic_name mock.verify _(topic.name).must_equal topic_path(topic_name) _(topic).wont_be :reference? _(topic).must_be :resource? end it "returns nil when getting an non-existent topic" do not_found_topic_name = "not-found-topic" stub = Object.new def stub.get_topic *args raise Google::Cloud::NotFoundError.new("not found") end pubsub.service.mocked_publisher = stub topic = pubsub.find_topic not_found_topic_name _(topic).must_be :nil? end it "gets a topic with skip_lookup option" do topic_name = "found-topic" # No HTTP mock needed, since the lookup is not made topic = pubsub.find_topic topic_name, skip_lookup: true _(topic.name).must_equal topic_path(topic_name) _(topic).must_be :reference? _(topic).wont_be :resource? end it "gets a topic with project option" do topic_name = "found-topic" topic_full_path = "projects/custom/topics/found-topic" get_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(topic_full_path) mock = Minitest::Mock.new mock.expect :get_topic, get_res, [topic: topic_full_path] pubsub.service.mocked_publisher = mock topic = pubsub.find_topic topic_name, project: "custom" _(topic.name).must_equal topic_full_path _(topic).wont_be :reference? _(topic).must_be :resource? end it "gets a topic with skip_lookup and project options" do topic_name = "found-topic" # No HTTP mock needed, since the lookup is not made topic = pubsub.find_topic topic_name, skip_lookup: true, project: "custom" _(topic.name).must_equal "projects/custom/topics/found-topic" _(topic).must_be :reference? _(topic).wont_be :resource? end it "gets a topic with async option" do topic_name = "found-topic" get_res = Google::Cloud::PubSub::V1::Topic.new topic_hash(topic_name) mock = Minitest::Mock.new mock.expect :get_topic, get_res, [topic: topic_path(topic_name)] pubsub.service.mocked_publisher = mock topic = pubsub.topic topic_name, async: async topic.enable_message_ordering! # Create the AsyncPublisher mock.verify _(topic.async_publisher.topic_name).must_equal topic_path(topic_name) _(topic.async_publisher.max_bytes).must_equal async[:max_bytes] _(topic.async_publisher.max_messages).must_equal async[:max_messages] _(topic.async_publisher.interval).must_equal async[:interval] _(topic.async_publisher.publish_threads).must_equal async[:threads][:publish] _(topic.async_publisher.callback_threads).must_equal async[:threads][:callback] _(topic.async_publisher.flow_control).must_equal async[:flow_control] end it "lists topics" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: nil, page_token: nil] pubsub.service.mocked_publisher = mock topics = pubsub.topics mock.verify _(topics.size).must_equal 3 end it "lists topics with find_topics alias" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: nil, page_token: nil] pubsub.service.mocked_publisher = mock topics = pubsub.find_topics mock.verify _(topics.size).must_equal 3 end it "lists topics with list_topics alias" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: nil, page_token: nil] pubsub.service.mocked_publisher = mock topics = pubsub.list_topics mock.verify _(topics.size).must_equal 3 end it "paginates topics" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: nil, page_token: nil] mock.expect :list_topics, topics_without_token, [project: "projects/#{project}", page_size: nil, page_token: "next_page_token"] pubsub.service.mocked_publisher = mock first_topics = pubsub.topics second_topics = pubsub.topics token: first_topics.token mock.verify _(first_topics.size).must_equal 3 token = first_topics.token _(token).wont_be :nil? _(token).must_equal "next_page_token" _(second_topics.size).must_equal 2 _(second_topics.token).must_be :nil? end it "paginates topics with max set" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: 3, page_token: nil] pubsub.service.mocked_publisher = mock topics = pubsub.topics max: 3 mock.verify _(topics.size).must_equal 3 token = topics.token _(token).wont_be :nil? _(token).must_equal "next_page_token" end it "paginates topics with next? and next" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: nil, page_token: nil] mock.expect :list_topics, topics_without_token, [project: "projects/#{project}", page_size: nil, page_token: "next_<PASSWORD>token"] pubsub.service.mocked_publisher = mock first_topics = pubsub.topics second_topics = first_topics.next mock.verify _(first_topics.size).must_equal 3 _(first_topics.next?).must_equal true _(second_topics.size).must_equal 2 _(second_topics.next?).must_equal false end it "paginates topics with next? and next and max set" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: 3, page_token: nil] mock.expect :list_topics, topics_without_token, [project: "projects/#{project}", page_size: 3, page_token: "next_<PASSWORD>token"] pubsub.service.mocked_publisher = mock first_topics = pubsub.topics max: 3 second_topics = first_topics.next mock.verify _(first_topics.size).must_equal 3 _(first_topics.next?).must_equal true _(second_topics.size).must_equal 2 _(second_topics.next?).must_equal false end it "paginates topics with all" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: nil, page_token: nil] mock.expect :list_topics, topics_without_token, [project: "projects/#{project}", page_size: nil, page_token: "next_<PASSWORD>"] pubsub.service.mocked_publisher = mock topics = pubsub.topics.all.to_a mock.verify _(topics.size).must_equal 5 end it "paginates topics with all and max set" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: 3, page_token: nil] mock.expect :list_topics, topics_without_token, [project: "projects/#{project}", page_size: 3, page_token: "<PASSWORD>"] pubsub.service.mocked_publisher = mock topics = pubsub.topics(max: 3).all.to_a mock.verify _(topics.size).must_equal 5 end it "iterates topics with all using Enumerator" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: nil, page_token: nil] mock.expect :list_topics, topics_with_token_2, [project: "projects/#{project}", page_size: nil, page_token: "<PASSWORD>"] pubsub.service.mocked_publisher = mock topics = pubsub.topics.all.take(5) mock.verify _(topics.size).must_equal 5 end it "iterates topics with all and request_limit set" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: nil, page_token: nil] mock.expect :list_topics, topics_with_token_2, [project: "projects/#{project}", page_size: nil, page_token: "<PASSWORD>"] pubsub.service.mocked_publisher = mock topics = pubsub.topics.all(request_limit: 1).to_a mock.verify _(topics.size).must_equal 6 end it "paginates topics without max set" do mock = Minitest::Mock.new mock.expect :list_topics, topics_with_token, [project: "projects/#{project}", page_size: nil, page_token: nil] pubsub.service.mocked_publisher = mock topics = pubsub.topics mock.verify _(topics.size).must_equal 3 token = topics.token _(token).wont_be :nil? _(token).must_equal "next_page_token" end end
karathen/Karathen-Android
Karathen-Android/app/src/main/java/com/raistone/wallet/sealwallet/utils/SealUtils.java
package com.raistone.wallet.sealwallet.utils; import android.annotation.SuppressLint; import android.content.ClipData; import android.content.ClipboardManager; import android.content.Context; import android.content.pm.ActivityInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.telephony.TelephonyManager; import com.raistone.wallet.sealwallet.R; import java.util.List; public class SealUtils { public static void clipData(Context context, String str) { ClipboardManager cm = (ClipboardManager) context.getSystemService(Context.CLIPBOARD_SERVICE); ClipData mClipData = ClipData.newPlainText("Label", str); if (null != cm) { cm.setPrimaryClip(mClipData); ToastHelper.showToast(context.getResources().getString(R.string.copy_success_string)); } } public static boolean isApplicationAviliable(Context context, String appPackageName) { try { PackageManager packageManager = context.getPackageManager(); List<PackageInfo> pinfo = packageManager.getInstalledPackages(0); if (pinfo != null) { for (int i = 0; i < pinfo.size(); i++) { String pn = pinfo.get(i).packageName; if (appPackageName.equals(pn)) { return true; } } } return false; } catch (Exception ignored) { return false; } } public static boolean isNetworkConnected(Context context) { try { if (context != null) { @SuppressWarnings("static-access") ConnectivityManager cm = (ConnectivityManager) context .getSystemService(context.CONNECTIVITY_SERVICE); NetworkInfo info = cm.getActiveNetworkInfo(); return info != null && info.isConnected(); } else { return false; //no internet } } catch (Exception e) { e.printStackTrace(); return false; } } public static String getDeviceId(Context context) { TelephonyManager telephonyManager = (TelephonyManager) context.getSystemService(context.TELEPHONY_SERVICE); @SuppressLint("MissingPermission") String imei = telephonyManager.getDeviceId(); return imei; } }
pgriffel/pacioli
src/mvm/ast/expression/LitKey.java
package mvm.ast.expression; import java.io.PrintWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import mvm.AbstractPrintable; import mvm.Environment; import mvm.MVMException; import mvm.values.PacioliValue; import mvm.values.matrix.IndexSet; import mvm.values.matrix.Key; import mvm.values.matrix.MatrixDimension; public class LitKey extends AbstractPrintable implements Expression { private List<String> entities; private List<String> items; public LitKey(List<String> entities, List<String> items) { this.entities = entities; this.items = items; } @Override public PacioliValue eval(Environment environment) throws MVMException { List<IndexSet> sets = new ArrayList<IndexSet>(); HashMap<String, IndexSet> indexSets = environment.getMachine().indexSets; for (String entity : entities) { if (indexSets.containsKey(entity)) { sets.add(indexSets.get(entity)); } else { throw new MVMException("Index set '%s' unnown", entity); } } return new Key(items, new MatrixDimension(sets)); } @Override public void printText(PrintWriter out) { } }
regedarek/kw-app
app/components/blog/dashboard.rb
<reponame>regedarek/kw-app module Blog class Dashboard def fetch { latest_degree: Scrappers::ToprRecord.last.topr_degree.url(:dashboard), total_meters: routes.sum(:length), last_route: { name: routes.last.name, date: routes.last.climbing_date }, max_meters_person: { name: Db::User.find_by(kw_id: best_person.kw_id).display_name, meters: best_person.total_mountain_routes_length } }.to_json end private def latest_degree today_infos = Scrappers::ToprRecord.where(time: Date.today).order(time: :desc) if today_infos.any? today_infos.first.topr_degree.url(:dashboard) else 'brak zagrożenia' end end def routes Db::Activities::MountainRoute.where(route_type: 'ski').order(:climbing_date) end def best_person Activities::SkiRepository.new.fetch_season.order('total_mountain_routes_length DESC').first end end end
parkerwray/smuthi-1
tests/unit_tests/initial_field_tests/test_initial_field_gauss.py
import numpy as np import smuthi.initial_field as init import smuthi.layers import smuthi.particles import smuthi.fields ld = 532 A = 1 beta = 0 alpha = 0.2 * np.pi pol = 1 rS = [100, -200, 300] laysys = smuthi.layers.LayerSystem(thicknesses=[0, 0], refractive_indices=[1, 1]) particle = smuthi.particles.Sphere(position=rS, l_max=3, m_max=3) particle_list = [particle] al_ar = np.linspace(0, 2*np.pi, 1000) kp_ar = np.linspace(0, 0.99999, 1000) * smuthi.fields.angular_frequency(ld) bw = 4000 ref = [-100, 100, 200] gauss_beam = init.GaussianBeam(vacuum_wavelength=ld, polar_angle=beta, azimuthal_angle=alpha, polarization=pol, amplitude=A, reference_point=ref, k_parallel_array=kp_ar, beam_waist=bw) particle.initial_field = gauss_beam.spherical_wave_expansion(particle, laysys) def test_SWE_coefficients_against_prototype(): aI = particle.initial_field.coefficients ai0 = 0.4040275 - 1.6689055j ai9 = -0.0115840 + 0.0107125j ai20 = -3.1855342e-04 - 7.7089434e-04j print(abs((aI[0] - ai0) / ai0), abs((aI[9] - ai9) / ai9), abs((aI[20] - ai20) / ai20)) assert abs((aI[0] - ai0) / ai0) < 1e-5 assert abs((aI[9] - ai9) / ai9) < 1e-5 assert abs((aI[20] - ai20) / ai20) < 1e-5 def test_focus_field(): E = gauss_beam.electric_field(np.array([ref[0]]), np.array([ref[1]]), np.array([ref[2]]), laysys) print(abs(np.sqrt(E[0]**2 + E[1]**2 + E[2]**2) - A)) assert abs(np.sqrt(E[0]**2 + E[1]**2 + E[2]**2) - A) < 1e-3 if __name__ == '__main__': test_SWE_coefficients_against_prototype() test_focus_field()
AlirezaMojtabavi/Python_Practice
Elementary/2- functions and loops/Ex_1_prime.py
<reponame>AlirezaMojtabavi/Python_Practice<gh_stars>0 number = int(input()) count = 0 for i in range(1,number): if number%i == 0: count+=1 elif number%i != 0: pass if count==1: print('prime') else: print('not prime')
bingchunjin/1806_SDK
linux-4.14.90-dev/linux-4.14.90/arch/arm/mach-omap2/omap-wakeupgen.h
/* * OMAP WakeupGen header file * * Copyright (C) 2011 Texas Instruments, Inc. * <NAME> <<EMAIL>> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. */ #ifndef OMAP_ARCH_WAKEUPGEN_H #define OMAP_ARCH_WAKEUPGEN_H /* OMAP4 and OMAP5 has same base address */ #define OMAP_WKUPGEN_BASE 0x48281000 #define OMAP_WKG_CONTROL_0 0x00 #define OMAP_WKG_ENB_A_0 0x10 #define OMAP_WKG_ENB_B_0 0x14 #define OMAP_WKG_ENB_C_0 0x18 #define OMAP_WKG_ENB_D_0 0x1c #define OMAP_WKG_ENB_E_0 0x20 #define OMAP_WKG_ENB_A_1 0x410 #define OMAP_WKG_ENB_B_1 0x414 #define OMAP_WKG_ENB_C_1 0x418 #define OMAP_WKG_ENB_D_1 0x41c #define OMAP_WKG_ENB_E_1 0x420 #define OMAP_AUX_CORE_BOOT_0 0x800 #define OMAP_AUX_CORE_BOOT_1 0x804 #define OMAP_AMBA_IF_MODE 0x80c #define OMAP_PTMSYNCREQ_MASK 0xc00 #define OMAP_PTMSYNCREQ_EN 0xc04 #define OMAP_TIMESTAMPCYCLELO 0xc08 #define OMAP_TIMESTAMPCYCLEHI 0xc0c extern void __iomem *omap_get_wakeupgen_base(void); extern int omap_secure_apis_support(void); #endif
sho25/activemq
activemq-http/src/main/java/org/apache/activemq/transport/http/HttpEmbeddedTunnelServlet.java
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1 begin_comment comment|/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ end_comment begin_package package|package name|org operator|. name|apache operator|. name|activemq operator|. name|transport operator|. name|http package|; end_package begin_import import|import name|java operator|. name|net operator|. name|URI import|; end_import begin_import import|import name|javax operator|. name|servlet operator|. name|ServletException import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|activemq operator|. name|broker operator|. name|BrokerService import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|activemq operator|. name|transport operator|. name|TransportAcceptListener import|; end_import begin_comment comment|/** * This servlet embeds an ActiveMQ broker inside a servlet engine which is ideal * for deploying ActiveMQ inside a WAR and using this servlet as a HTTP tunnel. * * */ end_comment begin_class specifier|public class|class name|HttpEmbeddedTunnelServlet extends|extends name|HttpTunnelServlet block|{ specifier|private specifier|static specifier|final name|long name|serialVersionUID init|= operator|- literal|3705734740251302361L decl_stmt|; specifier|protected name|BrokerService name|broker decl_stmt|; specifier|protected name|HttpTransportServer name|transportConnector decl_stmt|; specifier|public specifier|synchronized name|void name|init parameter_list|() throws|throws name|ServletException block|{ comment|// lets initialize the ActiveMQ broker try|try block|{ if|if condition|( name|broker operator|== literal|null condition|) block|{ name|broker operator|= name|createBroker argument_list|() expr_stmt|; comment|// Add the servlet connector name|String name|url init|= name|getConnectorURL argument_list|() decl_stmt|; name|HttpTransportFactory name|factory init|= operator|new name|HttpTransportFactory argument_list|() decl_stmt|; name|transportConnector operator|= operator|( name|HttpTransportServer operator|) name|factory operator|. name|doBind argument_list|( operator|new name|URI argument_list|( name|url argument_list|) argument_list|) expr_stmt|; name|broker operator|. name|addConnector argument_list|( name|transportConnector argument_list|) expr_stmt|; name|String name|brokerURL init|= name|getServletContext argument_list|() operator|. name|getInitParameter argument_list|( literal|"org.apache.activemq.brokerURL" argument_list|) decl_stmt|; if|if condition|( name|brokerURL operator|!= literal|null condition|) block|{ name|log argument_list|( literal|"Listening for internal communication on: " operator|+ name|brokerURL argument_list|) expr_stmt|; block|} block|} name|broker operator|. name|start argument_list|() expr_stmt|; block|} catch|catch parameter_list|( name|Exception name|e parameter_list|) block|{ throw|throw operator|new name|ServletException argument_list|( literal|"Failed to start embedded broker: " operator|+ name|e argument_list|, name|e argument_list|) throw|; block|} comment|// now lets register the listener name|TransportAcceptListener name|listener init|= name|transportConnector operator|. name|getAcceptListener argument_list|() decl_stmt|; name|getServletContext argument_list|() operator|. name|setAttribute argument_list|( literal|"transportChannelListener" argument_list|, name|listener argument_list|) expr_stmt|; name|super operator|. name|init argument_list|() expr_stmt|; block|} comment|/** * Factory method to create a new broker * * @throws Exception */ specifier|protected name|BrokerService name|createBroker parameter_list|() throws|throws name|Exception block|{ name|BrokerService name|answer init|= operator|new name|BrokerService argument_list|() decl_stmt|; return|return name|answer return|; block|} specifier|protected name|String name|getConnectorURL parameter_list|() block|{ return|return literal|"http://localhost/" operator|+ name|getServletContext argument_list|() operator|. name|getServletContextName argument_list|() return|; block|} block|} end_class end_unit
CSCSI/Triana
triana-types/src/main/java/triana/types/util/FlatArray.java
/* * The University of Wales, Cardiff Triana Project Software License (Based * on the Apache Software License Version 1.1) * * Copyright (c) 2007 University of Wales, Cardiff. All rights reserved. * * Redistribution and use of the software in source and binary forms, with * or without modification, are permitted provided that the following * conditions are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * 3. The end-user documentation included with the redistribution, if any, * must include the following acknowledgment: "This product includes * software developed by the University of Wales, Cardiff for the Triana * Project (http://www.trianacode.org)." Alternately, this * acknowledgment may appear in the software itself, if and wherever * such third-party acknowledgments normally appear. * * 4. The names "Triana" and "University of Wales, Cardiff" must not be * used to endorse or promote products derived from this software * without prior written permission. For written permission, please * contact <EMAIL>. * * 5. Products derived from this software may not be called "Triana," nor * may Triana appear in their name, without prior written permission of * the University of Wales, Cardiff. * * 6. This software may not be sold, used or incorporated into any product * for sale to third parties. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN * NO EVENT SHALL UNIVERSITY OF WALES, CARDIFF OR ITS CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. * * ------------------------------------------------------------------------ * * This software consists of voluntary contributions made by many * individuals on behalf of the Triana Project. For more information on the * Triana Project, please see. http://www.trianacode.org. * * This license is based on the BSD license as adopted by the Apache * Foundation and is governed by the laws of England and Wales. * */ package triana.types.util; import java.io.Serializable; import java.lang.reflect.Array; import java.util.ArrayList; /** * <i>FlatArray</i> holds the contents of a multi-dimensional array that has been flattened, i.e. whose elements have * been mapped onto a one-dimensional array. It contains the one-dimensional array, an integer array holding the lengths * of the original array dimensions, and the name of the type of the component of the flattened array. It provides a * method to reconstruct the original array from the flattened array and lengths. Arrays can have elements of any type. * </p><p> Arrays can be partially flattened, so that the highest few dimensions are mapped onto a single dimension but * the remaining dimensions are kept as array elements of flattened array. </p><p> <i>FlatArray</i> also contains a * large number of class methods as utilities for the manipulation of arrays using the flattening technique. These are * extensively used in <i>GraphType</i>. </p><p> * * @author <NAME> * @version $Revision: 4048 $ * @see triana.types.GraphType */ public class FlatArray extends Object implements Serializable { /* * Remembers the input object that is to be flattened */ private Object inputObject = null; /* * Holds the flattened array. */ private Object flatArray; /* * The lengths of the dimensions of the original array */ private int[] lengths; /** * The total length of the flattened array, equal to the total number of elements of the original array. */ private int totalLength = 0; /* * The name of the Type of the components of the flattened array */ private String componentName; /** * Constructs an empty <i>FlatArray</i>. */ public FlatArray() { } /* * Constructors. */ /** * Constructs a <i>FlatArray</i> from the given array. All dimensions higher than one are flattened. If the input * object is a 1-dimensional array then it is not processed, and array <i>lengths</i> has only one element. If the * input object is not an array, then <i>FlatArray</i> creates a 1-dimensional array with the object as the single * element and sets the single element of <i>lengths</i> to 1. * * @param o The array to be fully flattened */ public FlatArray(Object o) { inputObject = o; Class componentClass; if (o.getClass().isArray()) { lengths = findArrayDimensionLengths(o); int totalDims = lengths.length; // System.out.println("Number of array dims = " + String.valueOf(totalDims) ); if (totalDims == 1) { flatArray = o; componentName = o.getClass().getComponentType().getName(); totalLength = lengths[0]; ; } else { int totalNumbers = lengths[0]; for (int i = 1; i < totalDims; i++) { totalNumbers *= lengths[i]; } totalLength = totalNumbers; componentName = findArrayComponentName(o, totalDims - 1); // System.out.println("Component name = " + componentName ); try { if (componentName.equals("boolean")) { componentClass = Boolean.TYPE; } else if (componentName.equals("char")) { componentClass = Character.TYPE; } else if (componentName.equals("byte")) { componentClass = Byte.TYPE; } else if (componentName.equals("short")) { componentClass = Short.TYPE; } else if (componentName.equals("int")) { componentClass = Integer.TYPE; } else if (componentName.equals("long")) { componentClass = Long.TYPE; } else if (componentName.equals("float")) { componentClass = Float.TYPE; } else if (componentName.equals("double")) { componentClass = Double.TYPE; } else if (componentName.equals("void")) { componentClass = Void.TYPE; } else { componentClass = Class.forName(componentName); } flatArray = Array.newInstance(componentClass, totalNumbers); recurseArrayUnpack(o, flatArray, lengths, 0, totalDims - 1, 0); } catch (ClassNotFoundException ex) { System.out.println( "Class with name " + componentName + " does not exist. Construct an empty FlatArray."); } } } else { flatArray = new Object[1]; Array.set(flatArray, 0, o); lengths = new int[1]; lengths[0] = 1; } } /** * Constructs a <i>FlatArray</i> from the given array by flattening all the dimensions up to argument <i>depth</i>. * The array <i>lengths</i> contains the lengths of only the dimensions to be flattened. If the input object is a * 1-dimensional array then it is not processed, and <i>lengths</i> has only one element. If the input object has * fewer dimensions than depth, then it is fully flattened. If the input object is not an array, then * <i>FlatArray</i> creates a 1-dimensional array with the object as the single element and sets the single element * of <i>lengths</i> to 1. * * @param o The array to be flattened * @param depth The last dimension to be flattened, counting from 0 */ public FlatArray(Object o, int depth) { inputObject = o; Class componentClass; if (o.getClass().isArray()) { lengths = findArrayDimensionLengths(o, depth); int totalDims = lengths.length; if (totalDims == 1) { flatArray = o; componentName = o.getClass().getComponentType().getName(); totalLength = lengths[0]; } else { int totalNumbers = lengths[0]; for (int i = 1; i < totalDims; i++) { totalNumbers *= lengths[i]; } totalLength = totalNumbers; componentName = findArrayComponentName(o, totalDims - 1); try { if (componentName.equals("boolean")) { componentClass = Boolean.TYPE; } else if (componentName.equals("char")) { componentClass = Character.TYPE; } else if (componentName.equals("byte")) { componentClass = Byte.TYPE; } else if (componentName.equals("short")) { componentClass = Short.TYPE; } else if (componentName.equals("int")) { componentClass = Integer.TYPE; } else if (componentName.equals("long")) { componentClass = Long.TYPE; } else if (componentName.equals("float")) { componentClass = Float.TYPE; } else if (componentName.equals("double")) { componentClass = Double.TYPE; } else if (componentName.equals("void")) { componentClass = Void.TYPE; } else { componentClass = Class.forName(componentName); } flatArray = Array.newInstance(componentClass, totalNumbers); recurseArrayUnpack(o, flatArray, lengths, 0, totalDims - 1, 0); } catch (ClassNotFoundException ex) { System.out.println( "Class with name " + componentName + " does not exist. Construct an empty FlatArray."); } } } else { flatArray = new Object[1]; Array.set(flatArray, 0, o); lengths = new int[1]; lengths[0] = 1; } } /** * Constructs a <i>FlatArray</i> from the given flattened array and the given array of integers. The user must check * that the integer array is consistent with the flattened array, in that the product of its elements equals the * length of the array. * * @param o The flattened array * @param l The lengths of the dimensions of the corresponding unflattened array */ public FlatArray(Object o, int[] l) { setFlatArray(o); setLengths(l); setComponentName(o.getClass().getComponentType().getName()); } /* * Class methods */ /** * Class method finds the number of dimensions of a multi-dimensional array. If the object argument is not an array, * the method returns 0. * * @param o The multi-dimensional array to be examined * @return The number of dimensions of the given array */ public static int findNumberOfDimensions(Object o) { if (!o.getClass().isArray()) { return 0; } String argName = o.getClass().getName(); return argName.lastIndexOf("[") - argName.indexOf("[") + 1; } /** * Class method finds the size of each dimension of a multi-dimensional array. It returns an int[] that contains the * sizes of the dimensions. The argument can be any Object, but if the argument is not an array, then the method * returns <i>null</i>. * <p/> * Note that the method only looks at the length of the first row in each dimension, so it will not give sensible * information unless the multi-dimensional array is "rectangular", ie unless all the rows in a given dimension have * the same length. * * @param o The input (multi-dimensional) array * @return The lengths of the dimensions */ public static int[] findArrayDimensionLengths(Object o) { int dims = findNumberOfDimensions(o); if (dims == 0) { return null; } int[] l = new int[dims]; Object o1 = o; for (int level = 0; level < dims; level++) { l[level] = Array.getLength(o1); if (level < dims - 1) { o1 = ((Object[]) o1)[0]; } } return l; } /** * Class method finds the size of each dimension of a multi-dimensional array down to a certain depth. It returns an * int[] that contains the sizes of the dimensions. The argument can be any Object, but if the argument is not an * array, then the method returns <i>null</i>. If the given depth is actually larger than the number of dimensions * of the array, depth is reset to the number of dimensions. * <p/> * Note that the method only looks at the length of the first row in each dimension, so it will not give sensible * information unless the multi-dimensional array is "rectangular", ie unless all the rows in a given dimension have * the same length. * * @param o The input (multi-dimensional) array * @param depth The number of dimensions whose size is desired * @return The lengths of the dimensions down to depth */ public static int[] findArrayDimensionLengths(Object o, int depth) { int dims = findNumberOfDimensions(o); if (dims == 0) { return null; } if (depth > dims) { depth = dims; } int[] l = new int[depth]; Object o1 = o; for (int level = 0; level < depth; level++) { l[level] = Array.getLength(o1); if (level < depth - 1) { o1 = ((Object[]) o1)[0]; } } return l; } /** * Class method to find the name of the type of the component of an array down to a certain depth. Since Java * multi-dimensional arrays are stored as arrays of arrays, one can ask for the component type at any level in the * hierarchy. In Triana data for dependent variables, the data arrays may have more dimensions than the number of * independent variables. If so, the extra dimensions are regarded as the elementary components of the data at the * points in independent variable space. Thus, a vector field in two dimensions is represented by a two-dimensional * array of vectors, and so is stored as a three-dimensional array. The present method allows the user to specify * the depth in this hierarchical array in which the class of the remaining levels is to be found. The parameter * <i>depth</i> is the number of dimensions that should be stripped away before asking for the component type. Thus, * <i>depth</i> = 0 returns the type of the elements of the highest dimension of the array. To discover the * component type in the Triana sense, use <i>depth</i> = <i>independentVariables</i> - 1. If the array has fewer * dimensions than <i>depth</i> - 1, the method returns null. If the argument is not an array, the method returns * its type name. * * @param o The data array to be examined * @param depth The depth at which one wants the component type * @return The component type name */ public static String findArrayComponentName(Object o, int depth) { Class argClass = o.getClass(); String argName = argClass.getName(); // System.out.println( "FlatArray findArrayComponentName: object name = " + argName ); if (!argClass.isArray()) { return argName; } int dims = argName.lastIndexOf("[") - argName.indexOf("[") + 1; // System.out.println( "FlatArray findArrayComponentName: dims and depth are " + String.valueOf(dims) + " " + String.valueOf(depth) ); if (depth >= dims) { return null; } try { if (depth == dims - 1) { // System.out.println( "FlatArray findArrayComponentName: returned name for depth = dims - 1 is " + Class.forName( argName.substring( depth ) ).getComponentType().getName() ); return Class.forName(argName.substring(depth)).getComponentType().getName(); } } catch (ClassNotFoundException ex) { System.out.println("Class with name " + argName.substring(depth) + " does not exist. Cannot find array component name for the given object."); } // System.out.println( "FlatArray findArrayComponentName: returned name for depth = " + String.valueOf(depth) + " is " + argName.substring(depth + 1 ) ); return argName.substring(depth + 1); } /** * Class method that creates a new array with the same dimensionality and component types as the given arbitrary * multi-dimensional array. Arrays with primitive components are returned with with default initialization; arrays * with other object components are returned with null initializations. * <p/> * The argument can be any Object, but if the argument is not an array, then the method returns <i>null</i>. * <p/> * Note that the method only looks at the length of the first row of the input array in each dimension, so it will * not give a correct imitation of the input array unless the multi-dimensional array is "rectangular", ie unless * all the rows in a given dimension have the same length. * * @param o The input (multi-dimensional) array * @return An imitation of the input, <i>i.e.</i> an empty array of the same type */ public static Object multiArrayImitate(Object o) { Class oClass = o.getClass(); if (!oClass.isArray()) { return null; } Class componentClass; Object o1; int[] ln = findArrayDimensionLengths(o); String n = oClass.getName(); if (n.endsWith("[B")) { componentClass = Byte.TYPE; } else if (n.endsWith("[D")) { componentClass = Double.TYPE; } else if (n.endsWith("[F")) { componentClass = Float.TYPE; } else if (n.endsWith("[I")) { componentClass = Integer.TYPE; } else if (n.endsWith("[J")) { componentClass = Long.TYPE; } else if (n.endsWith("[S")) { componentClass = Short.TYPE; } else if (n.endsWith("[C")) { componentClass = Character.TYPE; } else if (n.endsWith("[Z")) { componentClass = Boolean.TYPE; } else { int index = n.indexOf("L"); try { componentClass = Class.forName(n.substring(index)); } catch (ClassNotFoundException ex) { System.out.println("Could not find class with component name " + n.substring(index) + ". Return null from FlatArray.multiArrayImitate()."); return null; } } o1 = Array.newInstance(componentClass, ln); return o1; } /** * Class method that copies an arbitrary multi-dimensional array to another array of the same dimensions. Copying of * arrays that hold primitive data types is by value. If they are reference types, they are copied by reference. * <p/> * The method uses the private method <i>recurseCopy</i> to move through the array dimensions. The argument can be * any Object, but if the argument is not an array, then the method returns <i>null</i>, and if the array holds * reference objects at its lowest level instead of primitive data types, then an empty array of the correct size is * returned. * <p/> * Note that the method only looks at the length of the first row in each dimension, so it will not give sensible * information unless the multi-dimensional array is "rectangular", ie unless all the rows in a given dimension have * the same length. * * @param o The input (multi-dimensional) array * @return A copy of the input by value */ public static Object multiArrayCopy(Object o) { Object o1 = multiArrayImitate(o); recurseCopy(o, o1); return o1; } /** * Private recursive method that copies from an arbitrary multi-dimensional array (first argument) to another of the * same dimensionality (second argument). * * @param o The array to be copied * @param o1 The copy */ private static void recurseCopy(Object o, Object o1) { if (o == null) { return; } int j; int ln = Array.getLength(o); Class oClass = o.getClass(); Class component = oClass.getComponentType(); if (component.isArray()) { for (j = 0; j < ln; j++) { recurseCopy(((Object[]) o)[j], ((Object[]) o1)[j]); } } else { System.arraycopy(o, 0, o1, 0, ln); } return; } /** * Private recursive method to take apart a multi-dimensional rectangular array of any Objects in order to pack them * into a one-dimensional array. The arrays must have compatible component types, and the receiving array must have * enough elements. Starting from dimension fromDim (which counts from 0) to dimension toDim, all elements are * written to a 1D array. If the elements at dimension toDim are themselves arrays they are written to the new 1D * array as references. In this way it is possible to flatten only some of the dimensions of o. * * @param o The multi-dimensional array being flattened * @param out The output array receiving the data * @param dims The integer array of dimension lengths of o * @param fromDim The starting dimension for this recursion * @param toDim The last dimension, whose elements are just copied. * @param writePosition The current writing position in the the array out * @return The new writing position in the array out */ private static int recurseArrayUnpack(Object o, Object out, int[] dims, int fromDim, int toDim, int writePosition) { int k; int currentLength = dims[fromDim]; if (fromDim < toDim) { for (k = 0; k < currentLength; k++) { writePosition = recurseArrayUnpack(((Object[]) o)[k], out, dims, fromDim + 1, toDim, writePosition); } } else { System.arraycopy(o, 0, out, writePosition, currentLength); writePosition += currentLength; } return writePosition; } /** * Private recursive method to reconstruct a multi-dimensional rectangular array of objects from values that have * been unpacked into a one-dimensional array. * * @param o The multi-dimensional array that receives the data * @param in The 1D array that contains the data * @param dims The array containing the dimension lengths of o * @param fromDim The current dimension in this recursive method * @param toDim The final dimensions of o that will be written to * @param readPosition The current reading position in the the array in * @return The new reading position in the array in */ private static int recurseArrayPack(Object in, Object o, int[] dims, int fromDim, int toDim, int readPosition) { /* System.out.println("RecurseArrayPack at level fromDim = " + String.valueOf( fromDim ) + " and toDim = " + String.valueOf( toDim ) ); System.out.println( "Input multi-array name " + o.getClass().getName() ); System.out.println( "Input flat array name " + in.getClass().getName() ); System.out.println( "Flat array length = " + String.valueOf( Array.getLength( in ) ) ); System.out.println( "On entering, readPosition = " + String.valueOf( readPosition ) ); */ int k; int currentLength = dims[fromDim]; // System.out.println("currentLength = " + String.valueOf( currentLength ) ); Class component; if (fromDim < toDim) { for (k = 0; k < currentLength; k++) { component = o.getClass().getComponentType(); readPosition = recurseArrayPack(in, ((Object[]) o)[k], dims, fromDim + 1, toDim, readPosition); // System.out.println("if-clause readPosition = " + String.valueOf(readPosition) ); } } else { System.arraycopy(in, readPosition, o, 0, currentLength); readPosition += currentLength; // System.out.println("else-clause readPosition = " + String.valueOf(readPosition) ); } return readPosition; } /** * Class method that returns <i>true</i> if the given multi-dimensional array is an array of primitive Java data * types at its lowest level. It returns <i>false</i> if the elements of the array at its lowest level are reference * types. * * @param o The array being inspected * @return True if the elements at the lowest level are primitive. */ public static boolean isPrimitiveArray(Object o) { Class oClass = o.getClass(); if (!oClass.isArray()) { return false; } String n = oClass.getName(); boolean a; if (n.endsWith("[B")) { a = true; } else if (n.endsWith("[D")) { a = true; } else if (n.endsWith("[F")) { a = true; } else if (n.endsWith("[I")) { a = true; } else if (n.endsWith("[J")) { a = true; } else if (n.endsWith("[S")) { a = true; } else if (n.endsWith("[C")) { a = true; } else if (n.endsWith("[Z")) { a = true; } else { a = false; } return a; } /** * Class method that returns <i>true</i> if the given multi-dimensional array contains these primitive Java data * types at its lowest level: byte, short, int, long, float, or double. It returns <i>false</i> if the elements of * the array at its lowest level are boolean, char, or reference types. * * @param o The array being tested * @return True if the elements at the lowest level are arithmetic primitive types */ public static boolean isArithmeticArray(Object o) { Class oClass = o.getClass(); if (!oClass.isArray()) { return false; } String n = oClass.getName(); boolean a; if (n.endsWith("[B")) { a = true; } else if (n.endsWith("[D")) { a = true; } else if (n.endsWith("[F")) { a = true; } else if (n.endsWith("[I")) { a = true; } else if (n.endsWith("[J")) { a = true; } else if (n.endsWith("[S")) { a = true; } else { a = false; } return a; } /** * Class method that converts a given input array to an array of doubles. The input array can have any * dimensionality, but must contain primitive arithmetic types. If its elements are boolean, char, or reference * types then the method returns <i>null</i>. If the input object is not an array, the method also returns * <i>null</i>. If the input object is an array of doubles, then it is passed directly to output. * * @param o The input array * @return The array converted to an array of doubles */ public static Object toDoubleArray(Object o) { if (o == null) { return null; } Class oClass = o.getClass(); if (!oClass.isArray()) { return null; } String n = oClass.getName(); if (n.endsWith("[D")) { return o; } Object ra = null; double[] r; int j, len; if (findNumberOfDimensions(o) == 1) { len = Array.getLength(o); r = new double[len]; if (n.endsWith("[B")) { byte[] in = (byte[]) o; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 1"); } else if (n.endsWith("[F")) { float[] in = (float[]) o; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 2"); } else if (n.endsWith("[I")) { int[] in = (int[]) o; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 3"); } else if (n.endsWith("[J")) { long[] in = (long[]) o; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 4"); } else if (n.endsWith("[S")) { short[] in = (short[]) o; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 5"); } ra = r; } else { FlatArray flr = new FlatArray(o); len = flr.getFlatLength(); r = new double[len]; Object ar = flr.getFlatArray(); if (n.endsWith("[B")) { byte[] in = (byte[]) ar; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 1b"); } else if (n.endsWith("[F")) { float[] in = (float[]) ar; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 2b"); } else if (n.endsWith("[I")) { int[] in = (int[]) ar; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 3b"); } else if (n.endsWith("[J")) { long[] in = (long[]) ar; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 4b"); } else if (n.endsWith("[S")) { short[] in = (short[]) ar; for (j = 0; j < len; j++) { r[j] = (double) in[j]; } System.out.println("test 5b"); } flr.setFlatArray(r); ra = flr.restoreArray(true); } return r; } /** * Class method that converts a given input array to an array of floats. The input array can have any * dimensionality, but must contain primitive arithmetic types. If its elements are boolean, char, or reference * types then the method returns <i>null</i>. If the input object is not an array, the method also returns * <i>null</i>. If the input object is an array of floats, then it is passed directly to output. * * @param o The input array * @return The array converted to an array of floats */ public static Object toFloatArray(Object o) { if (o == null) { return null; } Class oClass = o.getClass(); if (!oClass.isArray()) { return null; } String n = oClass.getName(); if (n.endsWith("[F")) { return o; } Object ra = null; float[] r; int j, len; if (findNumberOfDimensions(o) == 1) { len = Array.getLength(o); r = new float[len]; if (n.endsWith("[B")) { byte[] in = (byte[]) o; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } else if (n.endsWith("[D")) { double[] in = (double[]) o; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } else if (n.endsWith("[I")) { int[] in = (int[]) o; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } else if (n.endsWith("[J")) { long[] in = (long[]) o; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } else if (n.endsWith("[S")) { short[] in = (short[]) o; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } ra = r; } else { FlatArray flr = new FlatArray(o); len = flr.getFlatLength(); r = new float[len]; Object ar = flr.getFlatArray(); if (n.endsWith("[B")) { byte[] in = (byte[]) ar; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } else if (n.endsWith("[D")) { double[] in = (double[]) ar; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } else if (n.endsWith("[I")) { int[] in = (int[]) ar; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } else if (n.endsWith("[J")) { long[] in = (long[]) ar; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } else if (n.endsWith("[S")) { short[] in = (short[]) ar; for (j = 0; j < len; j++) { r[j] = (float) in[j]; } } flr.setFlatArray(r); ra = flr.restoreArray(true); } return r; } /** * Class method that converts a given input array to an array of ints. The input array can have any dimensionality, * but must contain primitive arithmetic types. If its elements are boolean, char, or reference types then the * method returns <i>null</i>. If the input object is not an array, the method also returns <i>null</i>. If the * input object is an array of ints, then it is passed directly to output. Floats and doubles are converted to ints * by rounding. * * @param o The input array * @return The array converted to an array of ints */ public static Object toIntArray(Object o) { if (o == null) { return null; } Class oClass = o.getClass(); if (!oClass.isArray()) { return null; } String n = oClass.getName(); if (n.endsWith("[I")) { return o; } Object ra = null; int[] r; int j, len; if (findNumberOfDimensions(o) == 1) { len = Array.getLength(o); r = new int[len]; if (n.endsWith("[B")) { byte[] in = (byte[]) o; for (j = 0; j < len; j++) { r[j] = (int) in[j]; } } else if (n.endsWith("[D")) { double[] in = (double[]) o; for (j = 0; j < len; j++) { r[j] = (int) Math.round(in[j]); } } else if (n.endsWith("[F")) { float[] in = (float[]) o; for (j = 0; j < len; j++) { r[j] = Math.round(in[j]); } } else if (n.endsWith("[J")) { long[] in = (long[]) o; for (j = 0; j < len; j++) { r[j] = (int) in[j]; } } else if (n.endsWith("[S")) { short[] in = (short[]) o; for (j = 0; j < len; j++) { r[j] = (int) in[j]; } } ra = r; } else { FlatArray flr = new FlatArray(o); len = flr.getFlatLength(); r = new int[len]; Object ar = flr.getFlatArray(); if (n.endsWith("[B")) { byte[] in = (byte[]) ar; for (j = 0; j < len; j++) { r[j] = (int) in[j]; } } else if (n.endsWith("[D")) { double[] in = (double[]) ar; for (j = 0; j < len; j++) { r[j] = (int) Math.round(in[j]); } } else if (n.endsWith("[F")) { float[] in = (float[]) ar; for (j = 0; j < len; j++) { r[j] = Math.round(in[j]); } } else if (n.endsWith("[J")) { long[] in = (long[]) ar; for (j = 0; j < len; j++) { r[j] = (int) in[j]; } } else if (n.endsWith("[S")) { short[] in = (short[]) ar; for (j = 0; j < len; j++) { r[j] = (int) in[j]; } } flr.setFlatArray(r); ra = flr.restoreArray(true); } return r; } /** * Class method that converts a given input array to an array of longs. The input array can have any dimensionality, * but must contain primitive arithmetic types. If its elements are boolean, char, or reference types then the * method returns <i>null</i>. If the input object is not an array, the method also returns <i>null</i>. If the * input object is an array of longs, then it is passed directly to output. Floats and doubles are converted to * longs by rounding. * * @param o The input array * @return The array converted to an array of longs */ public static Object toLongArray(Object o) { if (o == null) { return null; } Class oClass = o.getClass(); if (!oClass.isArray()) { return null; } String n = oClass.getName(); if (n.endsWith("[J")) { return o; } Object ra = null; long[] r; int j, len; if (findNumberOfDimensions(o) == 1) { len = Array.getLength(o); r = new long[len]; if (n.endsWith("[B")) { byte[] in = (byte[]) o; for (j = 0; j < len; j++) { r[j] = (long) in[j]; } } else if (n.endsWith("[D")) { double[] in = (double[]) o; for (j = 0; j < len; j++) { r[j] = Math.round(in[j]); } } else if (n.endsWith("[F")) { float[] in = (float[]) o; for (j = 0; j < len; j++) { r[j] = (long) Math.round(in[j]); } } else if (n.endsWith("[I")) { int[] in = (int[]) o; for (j = 0; j < len; j++) { r[j] = (long) in[j]; } } else if (n.endsWith("[S")) { short[] in = (short[]) o; for (j = 0; j < len; j++) { r[j] = (long) in[j]; } } ra = r; } else { FlatArray flr = new FlatArray(o); len = flr.getFlatLength(); r = new long[len]; Object ar = flr.getFlatArray(); if (n.endsWith("[B")) { byte[] in = (byte[]) ar; for (j = 0; j < len; j++) { r[j] = (long) in[j]; } } else if (n.endsWith("[D")) { double[] in = (double[]) ar; for (j = 0; j < len; j++) { r[j] = Math.round(in[j]); } } else if (n.endsWith("[F")) { float[] in = (float[]) ar; for (j = 0; j < len; j++) { r[j] = (long) Math.round(in[j]); } } else if (n.endsWith("[I")) { int[] in = (int[]) ar; for (j = 0; j < len; j++) { r[j] = (long) in[j]; } } else if (n.endsWith("[S")) { short[] in = (short[]) ar; for (j = 0; j < len; j++) { r[j] = (long) in[j]; } } flr.setFlatArray(r); ra = flr.restoreArray(true); } return r; } /** * Class method that converts a given input array to an array of shorts. The input array can have any * dimensionality, but must contain primitive arithmetic types. If its elements are boolean, char, or reference * types then the method returns <i>null</i>. If the input object is not an array, the method also returns * <i>null</i>. If the input object is an array of doubles, then it is passed directly to output. Floats and doubles * are converted first to ints by rounding and then to shorts by casting. * * @param o The input array * @return The array converted to an array of shorts */ public static Object toShortArray(Object o) { if (o == null) { return null; } Class oClass = o.getClass(); if (!oClass.isArray()) { return null; } String n = oClass.getName(); if (n.endsWith("[S")) { return o; } Object ra = null; int j, len; short[] r; if (findNumberOfDimensions(o) == 1) { len = Array.getLength(o); r = new short[len]; if (n.endsWith("[B")) { byte[] in = (byte[]) o; for (j = 0; j < len; j++) { r[j] = (short) in[j]; } } else if (n.endsWith("[D")) { double[] in = (double[]) o; for (j = 0; j < len; j++) { r[j] = (short) Math.round(in[j]); } } else if (n.endsWith("[F")) { float[] in = (float[]) o; for (j = 0; j < len; j++) { r[j] = (short) Math.round(in[j]); } } else if (n.endsWith("[I")) { int[] in = (int[]) o; for (j = 0; j < len; j++) { r[j] = (short) in[j]; } } else if (n.endsWith("[J")) { long[] in = (long[]) o; for (j = 0; j < len; j++) { r[j] = (short) in[j]; } } ra = r; } else { FlatArray flr = new FlatArray(o); len = flr.getFlatLength(); r = new short[len]; Object ar = flr.getFlatArray(); if (n.endsWith("[B")) { byte[] in = (byte[]) ar; for (j = 0; j < len; j++) { r[j] = (short) in[j]; } } else if (n.endsWith("[D")) { double[] in = (double[]) ar; for (j = 0; j < len; j++) { r[j] = (short) Math.round(in[j]); } } else if (n.endsWith("[F")) { float[] in = (float[]) ar; for (j = 0; j < len; j++) { r[j] = (short) Math.round(in[j]); } } else if (n.endsWith("[I")) { int[] in = (int[]) ar; for (j = 0; j < len; j++) { r[j] = (short) in[j]; } } else if (n.endsWith("[J")) { long[] in = (long[]) ar; for (j = 0; j < len; j++) { r[j] = (short) in[j]; } } flr.setFlatArray(r); ra = flr.restoreArray(true); } return r; } /** * Class method that converts a given input array to an array of bytes. The input array can have any dimensionality, * but must contain primitive arithmetic types. If its elements are boolean, char, or reference types then the * method returns <i>null</i>. If the input object is not an array, the method also returns <i>null</i>. If the * input object is an array of bytes, then it is passed directly to output. Floats and doubles are converted first * to ints by rounding and then to bytes by casting. * * @param o The input array * @return The array converted to an array of bytes */ public static Object toByteArray(Object o) { if (o == null) { return null; } Class oClass = o.getClass(); if (!oClass.isArray()) { return null; } String n = oClass.getName(); if (n.endsWith("[B")) { return o; } Object ra = null; byte[] r; int j, len; if (findNumberOfDimensions(o) == 1) { len = Array.getLength(o); r = new byte[len]; if (n.endsWith("[D")) { double[] in = (double[]) o; for (j = 0; j < len; j++) { r[j] = (byte) Math.round(in[j]); } } else if (n.endsWith("[F")) { float[] in = (float[]) o; for (j = 0; j < len; j++) { r[j] = (byte) Math.round(in[j]); } } else if (n.endsWith("[I")) { int[] in = (int[]) o; for (j = 0; j < len; j++) { r[j] = (byte) in[j]; } } else if (n.endsWith("[J")) { long[] in = (long[]) o; for (j = 0; j < len; j++) { r[j] = (byte) in[j]; } } else if (n.endsWith("[S")) { short[] in = (short[]) o; for (j = 0; j < len; j++) { r[j] = (byte) in[j]; } } ra = r; } else { FlatArray flr = new FlatArray(o); len = flr.getFlatLength(); r = new byte[len]; Object ar = flr.getFlatArray(); if (n.endsWith("[D")) { double[] in = (double[]) ar; for (j = 0; j < len; j++) { r[j] = (byte) Math.round(in[j]); } } else if (n.endsWith("[F")) { float[] in = (float[]) ar; for (j = 0; j < len; j++) { r[j] = (byte) Math.round(in[j]); } } else if (n.endsWith("[I")) { int[] in = (int[]) ar; for (j = 0; j < len; j++) { r[j] = (byte) in[j]; } } else if (n.endsWith("[J")) { long[] in = (long[]) ar; for (j = 0; j < len; j++) { r[j] = (byte) in[j]; } } else if (n.endsWith("[S")) { short[] in = (short[]) ar; for (j = 0; j < len; j++) { r[j] = (byte) in[j]; } } flr.setFlatArray(r); ra = flr.restoreArray(true); } return r; } /** * Class method that converts the given input array to an array whose elements are of the type given by the Class in * the second argument. The input array (first argument) can have any dimensionality, but must contain primitive * arithmetic types. If its elements are boolean, char, or reference types, then the method returns <i>null</i>. If * the input object is not an array, the method also returns <i>null</i>. The second argument is a Class. If it is * the Class of a primitive data type, then the elements of the first argument will be converted to that type, * provided the type is arithmetic (<i>i.e.</i> not boolean or char); if the given class is boolean or char, the * method returns <i>null</i>. If the given Class is that of an array, then the output type will be the type of the * <i>elements</i> of the array; again, if these elements are of type boolean or char, the method returns null. If * the given Class is not a primitive type or an array, then the method returns null. If the given object is already * of the same Class as the given Class, then the object is simply passed to the output. </p><p> Note that Java * provides Class constants for primitives, denoted by <i>Integer.Type, Double.Type, etc,</i>. These values are also * returned by applying the suffix .class to a primitive variable type name, <i>e.g.</i> <i>int.class</i> has the * value <i>Integer.Type</i>. For any Object <i>Q</i>, including arrays, the class is given by the method * <i>Q.getClass()</i>. Normally, the present method would be used to to convert the elements of <i>o</i> to the * type of the elements of object <i>q</i>, by invoking <i>convertArrayType( o, q.getClass() )</i>. * * @param o The input array * @param outClass The desired type of the output elements * @return The array converted to an array of the desired type */ public static Object convertArrayElements(Object o, Class outClass) { if (o == null) { return null; } if (outClass == null) { return o; } Class inClass = o.getClass(); if (!inClass.isArray()) { return null; } if (outClass.isInstance(o)) { return o; } if (outClass.isPrimitive()) { if (outClass == Double.TYPE) { return toDoubleArray(o); } if (outClass == Float.TYPE) { return toFloatArray(o); } if (outClass == Integer.TYPE) { return toIntArray(o); } if (outClass == Short.TYPE) { return toShortArray(o); } if (outClass == Long.TYPE) { return toLongArray(o); } if (outClass == Byte.TYPE) { return toByteArray(o); } else { return null; } } if (!outClass.isArray()) { return null; } String n = outClass.getName(); if (n.endsWith("[B")) { return toByteArray(o); } if (n.endsWith("[D")) { return toDoubleArray(o); } if (n.endsWith("[F")) { return toFloatArray(o); } if (n.endsWith("[I")) { return toIntArray(o); } if (n.endsWith("[J")) { return toLongArray(o); } if (n.endsWith("[S")) { return toShortArray(o); } else { return null; } } /** * Class method that adds the given complex number (supplied as two doubles in the last two arguments) to the given * complex data array (supplied in the first two arguments, containing any primitive arithmetic types) and returns a * boolean <i>true</i> value to indicate success. Arithmetic is done in-place, <i>i.e.</i> the supplied arrays are * simply modified. </p><p> If the data are not arithmetic data then the method returns <i>false</i>. If the input * data are real (second argument <i>null</i>) but the imaginary part of the increment variable (fourth argument) is * non-zero, then the method returns <i>false</i>. * * @param or The real part of the array data, which must contain doubles * @param oi The imaginary part of the array data, which must contain doubles; it may be <i>null</i> * @param sr The real part of the increment number * @param si The imaginary part of the increment number (must be zero if the second argument is <i>null</i>) * @return True if the method succeeds */ public static boolean incrementArray(Object or, Object oi, double sr, double si) { int k, len; FlatArray flr, fli; Object ar, ai; double[] dataReal, dataImag; if (!isArithmeticArray(or)) { return false; } boolean isComplexData = (oi != null); boolean isComplexIncrement = (si != 0); if (!isComplexData && isComplexIncrement) { return false; } if (findNumberOfDimensions(or) == 1) { if (or instanceof double[]) { dataReal = (double[]) or; } else { System.out.println( "FlatArray.incrementArray: input array does not hold doubles. Convert to doubles before supplying argument."); return false; } len = dataReal.length; for (k = 0; k < len; k++) { dataReal[k] += sr; } if (isComplexData) { if (oi instanceof double[]) { dataImag = (double[]) oi; } else { System.out.println( "FlatArray.incrementArray: input array does not hold doubles. Convert to doubles before supplying argument."); return false; } for (k = 0; k < len; k++) { dataImag[k] += si; } } } else { flr = new FlatArray(or); ar = flr.getFlatArray(); len = Array.getLength(ar); if (ar instanceof double[]) { dataReal = (double[]) ar; } else { System.out.println( "FlatArray.incrementArray: input array does not hold doubles. Convert to doubles before supplying argument."); return false; } for (k = 0; k < len; k++) { dataReal[k] += sr; } flr.restoreArray(); if (isComplexData) { fli = new FlatArray(oi); ai = fli.getFlatArray(); if (ai instanceof double[]) { dataImag = (double[]) ai; } else { System.out.println( "FlatArray.incrementArray: input array does not hold doubles. Convert to doubles before supplying argument."); return false; } for (k = 0; k < len; k++) { dataImag[k] = si; } fli.restoreArray(); } } return true; } /** * Class method that adds the given complex number (supplied as two doubles in the last two arguments) to the given * complex data array (supplied in the first two arguments, containing any primitive arithmetic types) and returns * an ArrayList holding the real and imaginary parts of the results as new arrays of doubles of the same * dimensionality as the given arrays. </p><p> If the data are not arithmetic data then the method returns a null * ArrayList. If the input data are real (second argument <i>null</i>) and the imaginary part of the increment * variable (fourth argument) is zero, then the second component of the returned ArrayList will be <i>null</i>. If * the input data are real but the imaginary part of the increment variable is non-zero, then the output data will * be complex. * * @param or The real part of the array data * @param oi The imaginary part of the array data (<i>null</i> if data are real) * @param sr The real part of the increment number * @param si The imaginary part of the increment number (zero if increment is real) * @return containing the real and imaginary parts of the incremented array (second element <i>null</i> if result is * real) */ public static ArrayList incrementCopyOfArray(Object or, Object oi, double sr, double si) { ArrayList answer = null; Object nr = multiArrayCopy(or); Object ni = null; if (oi != null) { ni = multiArrayCopy(oi); } else if (si != 0) { ni = multiArrayImitate(or); } boolean result = incrementArray(nr, ni, sr, si); if (result) { answer = new ArrayList(2); answer.add(nr); answer.add(ni); } return answer; } /** * Class method that adds the given real number to the given data array and returns a boolean <i>true</i> value to * indicate success. Arithmetic is done in-place, <i>i.e.</i> the supplied array is simply modified. </p><p> If the * data are not arithmetic data then the method returns <i>false</i>. * * @param o The array data, which must contain doubles * @param s The increment number * @return True if the method succeeds */ public static boolean incrementArray(Object o, double s) { return incrementArray(o, null, s, 0.0); } /** * Class method that multiplies the given complex data array (supplied in the first two arguments, containing any * primitive arithmetic types) by the given complex number (supplied as two doubles in the last two arguments) and * returns a boolean <i>true</i> value to indicate success. Arithmetic is done in-place, <i>i.e.</i> the supplied * arrays are simply modified. </p><p> If the data are not arithmetic data then the method returns <i>false</i>. If * the input data are real (second argument <i>null</i>) but the imaginary part of the scale variable (fourth * argument) is non-zero, then the method returns <i>false</i>. * * @param or The real part of the array data * @param oi The imaginary part of the array data (<i>null</i> if data are real) * @param sr The real part of the scaling number * @param si The imaginary part of the scaling number (zero if scale is real) * @return True if the method succeeds */ public static boolean scaleArray(Object or, Object oi, double sr, double si) { int k, len; FlatArray flr = null; FlatArray fli = null; Object ar, ai; double[] dataReal = null; double[] dataImag = null; ; if (!isArithmeticArray(or)) { return false; } boolean isComplexData = (oi != null); boolean isComplexScale = (si != 0); if (!isComplexData && isComplexScale) { return false; } if (findNumberOfDimensions(or) == 1) { if (or instanceof double[]) { dataReal = (double[]) or; } else { System.out.println( "FlatArray.scaleArray: input array does not hold doubles. Convert to doubles before supplying argument."); return false; } len = dataReal.length; dataImag = null; if (isComplexData) { if (oi instanceof double[]) { dataImag = (double[]) oi; } else { System.out.println( "FlatArray.scaleArray: input array does not hold doubles. Convert to doubles before supplying argument."); return false; } } else if (isComplexScale) { dataImag = new double[len]; } if (!isComplexScale) { for (k = 0; k < len; k++) { dataReal[k] *= sr; } if (isComplexData) { for (k = 0; k < len; k++) { dataImag[k] = sr * dataImag[k]; } } } else { double scratch; for (k = 0; k < len; k++) { scratch = dataReal[k]; dataReal[k] *= sr; dataReal[k] -= si * dataImag[k]; dataImag[k] *= sr; dataImag[k] += si * scratch; } } } else { flr = new FlatArray(or); ar = flr.getFlatArray(); len = Array.getLength(ar); if (ar instanceof double[]) { dataReal = (double[]) ar; } else { System.out.println( "FlatArray.scaleArray: input array does not hold doubles. Convert to doubles before supplying argument."); return false; } if (isComplexData) { fli = new FlatArray(oi); ai = fli.getFlatArray(); if (ai instanceof double[]) { dataImag = (double[]) ai; } else { System.out.println( "FlatArray.scaleArray: input array does not hold doubles. Convert to doubles before supplying argument."); return false; } } if (!isComplexScale) { for (k = 0; k < len; k++) { dataReal[k] *= sr; } flr.restoreArray(); if (isComplexData) { for (k = 0; k < len; k++) { dataImag[k] *= sr; } fli.restoreArray(); } } else { double scratch; for (k = 0; k < len; k++) { scratch = dataReal[k]; dataReal[k] *= sr; dataReal[k] -= si * dataImag[k]; dataImag[k] *= sr; dataImag[k] += si * scratch; } flr.restoreArray(); fli.restoreArray(); } } return true; } /** * Class method that multiplies the given complex data array (supplied in the first two arguments, containing any * primitive arithmetic types) by the given complex number (supplied as two doubles in the last two arguments) and * returns an ArrayList holding the real and imaginary parts of the results as new arrays of doubles of the same * dimensionality as the given arrays. </p><p> If the data are not arithmetic data then the method returns a null * ArrayList. If the input data are real (second argument <i>null</i>) and the imaginary part of the scale variable * (fourth argument) is zero, then the second component of the returned ArrayList will be <i>null</i>. If the input * data are real but the imaginary part of the scale variable is non-zero, then the output data will be complex. * * @param or The real part of the array data * @param oi The imaginary part of the array data (<i>null</i> if data are real) * @param sr The real part of the scaling number * @param si The imaginary part of the scaling number (zero if scale is real) * @return containing the real and imaginary parts of the rescaled array (second element <i>null</i> if result is * real) */ public static ArrayList scaleCopyOfArray(Object or, Object oi, double sr, double si) { ArrayList answer = null; Object nr = multiArrayCopy(or); Object ni = null; if (oi != null) { ni = multiArrayCopy(oi); } else if (si != 0) { ni = multiArrayImitate(or); } boolean result = scaleArray(nr, ni, sr, si); if (result) { answer = new ArrayList(2); answer.add(nr); answer.add(ni); } return answer; } /** * Class method that multiplies the given real data array by the given real number and returns a boolean * <i>true</i> value to indicate success. Arithmetic is done in-place, <i>i.e.</i> the supplied array is simply * modified. </p><p> If the data are not arithmetic data then the method returns <i>false</i>. * * @param o The array data, which must contain doubles * @param s The increment number * @return True if the method succeeds */ public static boolean scaleArray(Object o, double s) { return scaleArray(o, null, s, 0); } /** * Class method maxArray returns a double containing the maximum value of all the elements of the given array, if * its elements are of an arithmetic data type, as determined by method <i>isArithmeticArray</i>. If the array * contains primitive types other than doubles, the returned value is converted to a double. If the data are not * arithmetic, then <i>Double.NaN</i> is returned. * * @param in The array to be tested * @return The maximum value of the elements of the given array */ public static double maxArray(Object in) { int len, k; FlatArray fl; Object o; double d; int i; long l; double m; String name; if (isArithmeticArray(in)) { fl = new FlatArray(in); name = fl.getComponentName(); o = fl.getFlatArray(); len = Array.getLength(o); if ((Byte.TYPE).getName().equals(name)) { byte[] ia = (byte[]) o; i = (int) ia[0]; for (k = 1; k < len; k++) { i = Math.max(i, (int) ia[k]); } m = (double) i; } else if ((Double.TYPE).getName().equals(name)) { double[] da = (double[]) o; d = da[0]; for (k = 1; k < len; k++) { d = Math.max(d, da[k]); } m = d; } else if ((Float.TYPE).getName().equals(name)) { float[] fa = (float[]) o; d = (double) fa[0]; for (k = 1; k < len; k++) { d = Math.max(d, (double) fa[k]); } m = d; } else if ((Integer.TYPE).getName().equals(name)) { int[] ia = (int[]) o; i = ia[0]; for (k = 1; k < len; k++) { i = Math.max(i, ia[k]); } m = (double) i; } else if ((Long.TYPE).getName().equals(name)) { long[] la = (long[]) o; l = la[0]; for (k = 1; k < len; k++) { l = Math.max(l, la[k]); } m = (double) l; } else if ((Short.TYPE).getName().equals(name)) { short[] sa = (short[]) o; i = (int) sa[0]; for (k = 1; k < len; k++) { i = Math.max(i, (int) sa[k]); } m = (double) i; } else { m = Double.NaN; } } else { m = Double.NaN; } return m; } /** * Class method that returns a double containing the minimum value of all the elements of the given array, if its * elements are of an arithmetic data type, as determined by method <i>isArithmeticArray</i>. If the array contains * primitive types other than doubles, the returned value is converted to a double. If the data are not arithmetic, * then <i>Double.NaN</i> is returned. * * @param in The array to be tested * @return double The minimum value of the elements of the given array */ public static double minArray(Object in) { int len, k; FlatArray fl; Object o; double d; int i; long l; double m; String name; if (isArithmeticArray(in)) { fl = new FlatArray(in); name = fl.getComponentName(); o = fl.getFlatArray(); len = Array.getLength(o); if ((Byte.TYPE).getName().equals(name)) { byte[] ia = (byte[]) o; i = (int) ia[0]; for (k = 1; k < len; k++) { i = Math.min(i, (int) ia[k]); } m = (double) i; } else if ((Double.TYPE).getName().equals(name)) { double[] da = (double[]) o; d = da[0]; for (k = 1; k < len; k++) { d = Math.min(d, da[k]); } m = d; } else if ((Float.TYPE).getName().equals(name)) { float[] fa = (float[]) o; d = (double) fa[0]; for (k = 1; k < len; k++) { d = Math.min(d, (double) fa[k]); } m = d; } else if ((Integer.TYPE).getName().equals(name)) { int[] ia = (int[]) o; i = ia[0]; for (k = 1; k < len; k++) { i = Math.min(i, ia[k]); } m = (double) i; } else if ((Long.TYPE).getName().equals(name)) { long[] la = (long[]) o; l = la[0]; for (k = 1; k < len; k++) { l = Math.min(l, la[k]); } m = (double) l; } else if ((Short.TYPE).getName().equals(name)) { short[] sa = (short[]) o; i = (int) sa[0]; for (k = 1; k < len; k++) { i = Math.min(i, (int) sa[k]); } m = (double) i; } else { m = Double.NaN; } } else { m = Double.NaN; } return m; } /** * Class method that tests to see if the given arrays are of a similar type: both Objects are indeed arrays, they * have the same dimensionality, and their sizes in the different dimensions are the same. Finally it tests that * their types are the same. * * @param a1 The first array to be compared * @param a2 The second array to be compared * @return <i>True</i> if the arrays are similar */ public static boolean similarArrays(Object a1, Object a2) { if (a1 == null || a2 == null) { return false; } if ((!a1.getClass().isArray()) || (!a2.getClass().isArray())) { return false; } if (!a1.getClass().getName().equals(a2.getClass().getName())) { return false; } int[] d1 = findArrayDimensionLengths(a1); int[] d2 = findArrayDimensionLengths(a2); ; for (int k = 0; k < d1.length; k++) { if (d1[k] != d2[k]) { return false; } } return true; } /** * Class method that tests the elements of the two given arrays. If the arrays have the same dimensionality and * size, if the elements are primitive Java data types, and if all the elements of one array equal those of the * other array at the same position, then the method returns <i>true</i>. Otherwise it returns <i>false</i>. If the * given objects are not arrays, or are arrays of non-primitive objects, then the method also returns <i>false</i>. * * @param a1 The first array to be compared * @param a2 The second array * @return <i>True</i> if the two arrays are equal element-by-element */ public static boolean equalArrays(Object a1, Object a2) { if (!similarArrays(a1, a2)) { return false; } if ((!isPrimitiveArray(a1)) || (!isPrimitiveArray(a2))) { return false; } int k, len; FlatArray fl1, fl2; Object o1, o2; String name1, name2; fl1 = new FlatArray(a1); name1 = fl1.getComponentName(); o1 = fl1.getFlatArray(); fl2 = new FlatArray(a2); name2 = fl2.getComponentName(); o2 = fl2.getFlatArray(); len = Array.getLength(o1); if (!name1.equals(name2)) { return false; } if ((Byte.TYPE).getName().equals(name1)) { byte[] b1 = (byte[]) o1; byte[] b2 = (byte[]) o2; for (k = 0; k < len; k++) { if (b1[k] != b2[k]) { return false; } } o1 = b1; o2 = b2; } else if ((Double.TYPE).getName().equals(name1)) { double[] d1 = (double[]) o1; double[] d2 = (double[]) o2; for (k = 0; k < len; k++) { if (d1[k] != d2[k]) { return false; } } o1 = d1; o2 = d2; } else if ((Float.TYPE).getName().equals(name1)) { float[] f1 = (float[]) o1; float[] f2 = (float[]) o2; for (k = 0; k < len; k++) { if (f1[k] != f2[k]) { return false; } } o1 = f1; o2 = f2; } else if ((Integer.TYPE).getName().equals(name1)) { int[] i1 = (int[]) o1; int[] i2 = (int[]) o2; for (k = 0; k < len; k++) { if (i1[k] != i2[k]) { return false; } } o1 = i1; o2 = i2; } else if ((Long.TYPE).getName().equals(name1)) { long[] l1 = (long[]) o1; long[] l2 = (long[]) o2; for (k = 0; k < len; k++) { if (l1[k] != l2[k]) { return false; } } o1 = l1; o2 = l2; } else if ((Short.TYPE).getName().equals(name1)) { short[] s1 = (short[]) o1; short[] s2 = (short[]) o2; for (k = 0; k < len; k++) { if (s1[k] != s2[k]) { return false; } } o1 = s1; o2 = s2; } return true; } /** * Class method that sets the elements of the given array to zero. If the given object is not an array, or is an * array of non-arithmetic elements, then the method returns without doing anything. * * @param a The array to be initialized */ public static void initializeArray(Object a) { if (!a.getClass().isArray()) { return; } if (!isArithmeticArray(a)) { return; } int k; FlatArray fl = new FlatArray(a); String name = fl.getComponentName(); Object o = fl.getFlatArray(); int len = Array.getLength(o); if ((Byte.TYPE).getName().equals(name)) { byte[] b = (byte[]) o; for (k = 0; k < len; k++) { b[k] = (byte) 0; } o = b; } else if ((Double.TYPE).getName().equals(name)) { double[] d = (double[]) o; for (k = 0; k < len; k++) { d[k] = 0.0; } o = d; } else if ((Float.TYPE).getName().equals(name)) { float[] f = (float[]) o; for (k = 0; k < len; k++) { f[k] = (float) 0.0; } o = f; } else if ((Integer.TYPE).getName().equals(name)) { int[] i = (int[]) o; for (k = 0; k < len; k++) { i[k] = 0; } o = i; } else if ((Long.TYPE).getName().equals(name)) { long[] l = (long[]) o; for (k = 0; k < len; k++) { l[k] = (long) 0; } o = l; } else if ((Short.TYPE).getName().equals(name)) { short[] s = (short[]) o; for (k = 0; k < len; k++) { s[k] = (short) 0; } o = s; } fl.restoreArray(); } /** * Class method that adds the elements of the two given arrays. If the arrays have the same dimensionality and size, * and if the elements are arithmetic Java data types, then the method adds the two arrays and returns an Object of * the same type. The arithmetic is done in-place in the first argument, so the returned value is simply a reference * to the first argument. If a new returned array is desired, then the first argument should be a copy of the input * array, using method <i>multiArrayCopy</i>. </p><p> If the input arrays are not the same dimensionality and size, * or if at least one of them does not contain arithmetic data types, then this method returns <i>null</i>. If the * given objects are not arrays, or are arrays of non-primitive objects, then the method also returns <i>null</i>. * * @param a1 The first array to be added * @param a2 The second array to be added * @return The sum of the two arrays element-by-element */ public static Object addArrays(Object a1, Object a2) { if (!similarArrays(a1, a2)) { return null; } if ((!isArithmeticArray(a1)) || (!isArithmeticArray(a2))) { return null; } int k; FlatArray fl1 = new FlatArray(a1); String name = fl1.getComponentName(); Object o1 = fl1.getFlatArray(); FlatArray fl2 = new FlatArray(a2); Object o2 = fl2.getFlatArray(); int len = Array.getLength(o1); Class component = o1.getClass().getComponentType(); if ((Byte.TYPE).getName().equals(name)) { byte[] b1 = (byte[]) o1; byte[] b2 = (byte[]) o2; for (k = 0; k < len; k++) { b1[k] += b2[k]; } } else if ((Double.TYPE).getName().equals(name)) { System.out.println("FlatArray add: adding doubles."); double[] d1 = (double[]) o1; double[] d2 = (double[]) o2; for (k = 0; k < len; k++) { d1[k] += d2[k]; } } else if ((Float.TYPE).getName().equals(name)) { float[] f1 = (float[]) o1; float[] f2 = (float[]) o2; for (k = 0; k < len; k++) { f1[k] += f2[k]; } } else if ((Integer.TYPE).getName().equals(name)) { int[] i1 = (int[]) o1; int[] i2 = (int[]) o2; for (k = 0; k < len; k++) { i1[k] += i2[k]; } } else if ((Long.TYPE).getName().equals(name)) { long[] l1 = (long[]) o1; long[] l2 = (long[]) o2; for (k = 0; k < len; k++) { l1[k] += l2[k]; } } else if ((Short.TYPE).getName().equals(name)) { short[] s1 = (short[]) o1; short[] s2 = (short[]) o2; for (k = 0; k < len; k++) { s1[k] += s2[k]; } } System.out.println("FlatArray add: name of returned array is " + a1.getClass().getName()); fl1.restoreArray(); return a1; } /** * Class method that subtracts the elements of the second given array from those of the first. If the arrays have * the same dimensionality and size, and if the elements are arithmetic Java data types, then the method subtracts * the second array from the first and returns an Object of the same type. The arithmetic is done in-place in the * first argument, so the returned value is simply a reference to the first argument. If a new returned array is * desired, then the first argument should be a copy of the input array, using method <i>multiArrayCopy</i>. </p><p> * If the input arrays are not the same dimensionality and size, or if at least one of them does not contain * arithmetic data types, then this method returns <i>null</i>. If the given objects are not arrays, or are arrays * of non-primitive objects, then the method also returns <i>null</i>. * * @param a1 The array to be subtracted from * @param a2 The array to be subtracted * @return The result of subtracting the second array from the first element-by-element */ public static Object subtractArrays(Object a1, Object a2) { if (!similarArrays(a1, a2)) { return null; } if ((!isArithmeticArray(a1)) || (!isArithmeticArray(a2))) { return null; } int k; FlatArray fl1 = new FlatArray(a1); String name = fl1.getComponentName(); Object o1 = fl1.getFlatArray(); FlatArray fl2 = new FlatArray(a2); Object o2 = fl2.getFlatArray(); int len = Array.getLength(o1); Class component = o1.getClass().getComponentType(); if ((Byte.TYPE).getName().equals(name)) { byte[] b1 = (byte[]) o1; byte[] b2 = (byte[]) o2; for (k = 0; k < len; k++) { b1[k] -= b2[k]; } } else if ((Double.TYPE).getName().equals(name)) { double[] d1 = (double[]) o1; double[] d2 = (double[]) o2; for (k = 0; k < len; k++) { d1[k] -= d2[k]; } } else if ((Float.TYPE).getName().equals(name)) { float[] f1 = (float[]) o1; float[] f2 = (float[]) o2; for (k = 0; k < len; k++) { f1[k] -= f2[k]; } } else if ((Integer.TYPE).getName().equals(name)) { int[] i1 = (int[]) o1; int[] i2 = (int[]) o2; for (k = 0; k < len; k++) { i1[k] -= i2[k]; } } else if ((Long.TYPE).getName().equals(name)) { long[] l1 = (long[]) o1; long[] l2 = (long[]) o2; for (k = 0; k < len; k++) { l1[k] -= l2[k]; } } else if ((Short.TYPE).getName().equals(name)) { short[] s1 = (short[]) o1; short[] s2 = (short[]) o2; for (k = 0; k < len; k++) { s1[k] -= s2[k]; } } fl1.restoreArray(); return a1; } /** * Class method that multiplies the individual elements of two complex arrays, whose real and imaginary parts are * the four given arrays. Corresponding elements are multiplied. Thus, this is <i>not</i> a version of matrix * multiplication. If the arrays have the same dimensionality and size, and if the elements are arithmetic Java data * types, then the method multiplies the arrays and returns an ArrayList containing two arrays of the same type, the * real and imaginary parts of the result. The arithmetic is done in-place in the first 2 arguments, so the returned * values are simply references to these arguments. If new returned arrays are desired, then the first 2 arguments * should be copies of the input arrays, using method <i>multiArrayCopy</i>. </p><p> If the input arrays are not all * the same dimensionality and size, or if at least one of them does not contain arithmetic data types, the this * method returns <i>null</i>. If the given objects are not arrays, or are arrays of non-primitive objects, then the * method also returns <i>null</i>. * * @param a1r The real part of the first array to be multiplied * @param a1i The imaginary part of the first array to be multiplied * @param a2r The real part of the second array to be multiplied * @param a2i The imaginary part of the second array to be multiplied * @return The product of the first 2 arrays by the second two, using element-by-element complex multiplication */ public static ArrayList multiplyArrays(Object a1r, Object a1i, Object a2r, Object a2i) { if (!similarArrays(a1r, a2r)) { return null; } if ((!isArithmeticArray(a1r)) || (!isArithmeticArray(a2r))) { return null; } if (a1i != null) { if (!similarArrays(a1r, a1i)) { return null; } if (!isArithmeticArray(a1i)) { return null; } } if (a2i != null) { if (!similarArrays(a1r, a2i)) { return null; } if (!isArithmeticArray(a2i)) { return null; } } boolean complex = ((a1i != null) || (a2i != null)); ArrayList answer = new ArrayList(2); FlatArray fl1r = new FlatArray(a1r); String name = fl1r.getComponentName(); Object o1r = fl1r.getFlatArray(); FlatArray fl2r = new FlatArray(a2r); Object o2r = fl2r.getFlatArray(); FlatArray fl1i = null; FlatArray fl2i = null; Object o1i = null; Object o2i = null; Class component = o1r.getClass().getComponentType(); int len = Array.getLength(o1r); if (a1i != null) { fl1i = new FlatArray(a1i); o1i = fl1i.getFlatArray(); } else if (complex) { o1i = Array.newInstance(component, len); initializeArray(o1i); } if (a2i != null) { fl2i = new FlatArray(a2i); o2i = fl2i.getFlatArray(); } else if (complex) { o2i = Array.newInstance(component, len); initializeArray(o2i); } int k; if ((Byte.TYPE).getName().equals(name)) { byte[] b1r = (byte[]) o1r; byte[] b1i = (byte[]) o1i; byte[] b2r = (byte[]) o2r; byte[] b2i = (byte[]) o2i; if (complex) { byte scratch; for (k = 0; k < len; k++) { scratch = b1r[k]; b1r[k] = (byte) (b1r[k] * b2r[k] - b1i[k] * b2i[k]); b1i[k] = (byte) (scratch * b2i[k] + b2r[k] * b1i[k]); } } else { for (k = 0; k < len; k++) { b1r[k] *= b2r[k]; } } } else if ((Double.TYPE).getName().equals(name)) { double[] d1r = (double[]) o1r; double[] d1i = (double[]) o1i; double[] d2r = (double[]) o2r; double[] d2i = (double[]) o2i; if (complex) { double scratch; for (k = 0; k < len; k++) { scratch = d1r[k]; d1r[k] = d1r[k] * d2r[k] - d1i[k] * d2i[k]; d1i[k] = scratch * d2i[k] + d2r[k] * d1i[k]; } } else { for (k = 0; k < len; k++) { d1r[k] *= d2r[k]; } } } else if ((Float.TYPE).getName().equals(name)) { float[] f1r = (float[]) o1r; float[] f1i = (float[]) o1i; float[] f2r = (float[]) o2r; float[] f2i = (float[]) o2i; if (complex) { float scratch; for (k = 0; k < len; k++) { scratch = f1r[k]; f1r[k] = (f1r[k] * f2r[k] - f1i[k] * f2i[k]); f1i[k] = (scratch * f2i[k] + f2r[k] * f1i[k]); } } else { for (k = 0; k < len; k++) { f1r[k] *= f2r[k]; } } } else if ((Integer.TYPE).getName().equals(name)) { int[] i1r = (int[]) o1r; int[] i1i = (int[]) o1i; int[] i2r = (int[]) o2r; int[] i2i = (int[]) o2i; if (complex) { int scratch; for (k = 0; k < len; k++) { scratch = i1r[k]; i1r[k] = i1r[k] * i2r[k] - i1i[k] * i2i[k]; i1i[k] = scratch * i2i[k] + i2r[k] * i1i[k]; } } else { for (k = 0; k < len; k++) { i1r[k] *= i2r[k]; } } } else if ((Long.TYPE).getName().equals(name)) { long[] l1r = (long[]) o1r; long[] l1i = (long[]) o1i; long[] l2r = (long[]) o2r; long[] l2i = (long[]) o2i; if (complex) { long scratch; for (k = 0; k < len; k++) { scratch = l1r[k]; l1r[k] = (l1r[k] * l2r[k] - l1i[k] * l2i[k]); l1i[k] = (scratch * l2i[k] + l2r[k] * l1i[k]); } } else { for (k = 0; k < len; k++) { l1r[k] *= l2r[k]; } } } else if ((Short.TYPE).getName().equals(name)) { short[] s1r = (short[]) o1r; short[] s1i = (short[]) o1i; short[] s2r = (short[]) o2r; short[] s2i = (short[]) o2i; if (complex) { short scratch; for (k = 0; k < len; k++) { scratch = s1r[k]; s1r[k] = (short) (s1r[k] * s2r[k] - s1i[k] * s2i[k]); s1i[k] = (short) (scratch * s2i[k] + s2r[k] * s1i[k]); } } else { for (k = 0; k < len; k++) { s1r[k] *= s2r[k]; } } } fl1r.restoreArray(); answer.add(a1r); if (complex) { fl1i.restoreArray(); answer.add(a1i); } else { answer.add(null); } return answer; } /** * Class method that multiplies the individual elements of the two given arrays. Corresponding elements are * multiplied. Thus, this is <i>not</i> a version of matrix multiplication. If the arrays have the same * dimensionality and size, and if the elements are arithmetic Java data types, then the method multiplies the two * arrays and returns the result, an Object that is an array of the same type. The arithmetic is done in-place in * the first argument, so the returned value is simply a reference to the first argument. If a new returned array * is desired, then the first argument should be a copy of the input array, using method <i>multiArrayCopy</i>. * </p><p> If the input arrays are not all the same dimensionality and size, or if at least one of them does not * contain arithmetic data types, the this method returns <i>null</i>. If the given objects are not arrays, or are * arrays of non-primitive objects, then the method also returns <i>null</i>. * * @param a1 The first array to be multiplied * @param a2 The second array to be multiplied * @return The product of the two arrays element-by-element */ public static Object multiplyArrays(Object a1, Object a2) { ArrayList answer = multiplyArrays(a1, null, a2, null); return answer.get(0); } /** * Class method that divides the individual elements of two complex arrays, whose real and imaginary parts are the * four given arrays. Corresponding elements of the first set of arrays are divided by the elements of the second * set. Integer data types use integer division. If remaindered division is required, convert all arguments to a * floating type first, using for example the method <i>toDoubleArray</i>. If the arrays have the same * dimensionality and size, and if the elements are arithmetic Java data types, then the method divides the arrays * and returns an ArrayList containing two arrays of the same type, the real and imaginary parts of the result. The * arithmetic is done in-place in the first 2 arguments, so the returned values are simply references to these * arguments. If new returned arrays are desired, then the first 2 arguments should be copies of the input arrays, * using method <i>multiArrayCopy</i>. </p><p> If the input arrays are not all the same dimensionality and size, or * if at least one of them does not contain arithmetic data types, the this method returns <i>null</i>. If the given * objects are not arrays, or are arrays of non-primitive objects, then the method also returns <i>null</i>. * * @param a1r The real part of the numerator array * @param a1i The imaginary part of the numeratorarray * @param a2r The real part of the denominator array * @param a2i The imaginary part of the denominator array * @return The quotient of the first 2 arrays by the second two, using element-by-element complex division */ public static ArrayList divideArrays(Object a1r, Object a1i, Object a2r, Object a2i) { if (!similarArrays(a1r, a2r)) { return null; } if ((!isArithmeticArray(a1r)) || (!isArithmeticArray(a2r))) { return null; } if (a1i != null) { if (!similarArrays(a1r, a1i)) { return null; } if (!isArithmeticArray(a1i)) { return null; } } if (a2i != null) { if (!similarArrays(a1r, a2i)) { return null; } if (!isArithmeticArray(a2i)) { return null; } } boolean complex = ((a1i != null) || (a2i != null)); ArrayList answer = new ArrayList(2); FlatArray fl1r = new FlatArray(a1r); String name = fl1r.getComponentName(); Object o1r = fl1r.getFlatArray(); FlatArray fl2r = new FlatArray(a2r); Object o2r = fl2r.getFlatArray(); FlatArray fl1i = null; FlatArray fl2i = null; Object o1i = null; Object o2i = null; Class component = o1r.getClass().getComponentType(); int len = Array.getLength(o1r); if (a1i != null) { fl1i = new FlatArray(a1i); o1i = fl1i.getFlatArray(); } else if (complex) { o1i = Array.newInstance(component, len); initializeArray(o1i); } if (a2i != null) { fl2i = new FlatArray(a2i); o2i = fl2i.getFlatArray(); } else if (complex) { o2i = Array.newInstance(component, len); initializeArray(o2i); } int k; if ((Byte.TYPE).getName().equals(name)) { byte[] b1r = (byte[]) o1r; byte[] b1i = (byte[]) o1i; byte[] b2r = (byte[]) o2r; byte[] b2i = (byte[]) o2i; if (complex) { byte denom, scratch; for (k = 0; k < len; k++) { scratch = b1r[k]; denom = (byte) (b2r[k] * b2r[k] + b2i[k] * b2i[k]); b1r[k] = (byte) ((b1r[k] * b2r[k] + b1i[k] * b2i[k]) / denom); b1i[k] = (byte) ((scratch * b2i[k] - b2r[k] * b1i[k]) / denom); } } else { for (k = 0; k < len; k++) { b1r[k] /= b2r[k]; } } } else if ((Double.TYPE).getName().equals(name)) { double[] d1r = (double[]) o1r; double[] d1i = (double[]) o1i; double[] d2r = (double[]) o2r; double[] d2i = (double[]) o2i; if (complex) { double denom, scratch; for (k = 0; k < len; k++) { scratch = d1r[k]; denom = d2r[k] * d2r[k] + d2i[k] * d2i[k]; d1r[k] = (d1r[k] * d2r[k] + d1i[k] * d2i[k]) / denom; d1i[k] = (scratch * d2i[k] - d2r[k] * d1i[k]) / denom; } } else { for (k = 0; k < len; k++) { d1r[k] /= d2r[k]; } } } else if ((Float.TYPE).getName().equals(name)) { float[] f1r = (float[]) o1r; float[] f1i = (float[]) o1i; float[] f2r = (float[]) o2r; float[] f2i = (float[]) o2i; if (complex) { float denom, scratch; for (k = 0; k < len; k++) { scratch = f1r[k]; denom = (f2r[k] * f2r[k] + f2i[k] * f2i[k]); f1r[k] = ((f1r[k] * f2r[k] + f1i[k] * f2i[k]) / denom); f1i[k] = ((scratch * f2i[k] - f2r[k] * f1i[k]) / denom); } } else { for (k = 0; k < len; k++) { f1r[k] /= f2r[k]; } } } else if ((Integer.TYPE).getName().equals(name)) { int[] i1r = (int[]) o1r; int[] i1i = (int[]) o1i; int[] i2r = (int[]) o2r; int[] i2i = (int[]) o2i; if (complex) { int denom, scratch; for (k = 0; k < len; k++) { scratch = i1r[k]; denom = i2r[k] * i2r[k] + i2i[k] * i2i[k]; i1r[k] = (i1r[k] * i2r[k] + i1i[k] * i2i[k]) / denom; i1i[k] = (scratch * i2i[k] - i2r[k] * i1i[k]) / denom; } } else { for (k = 0; k < len; k++) { i1r[k] /= i2r[k]; } } } else if ((Long.TYPE).getName().equals(name)) { long[] l1r = (long[]) o1r; long[] l1i = (long[]) o1i; long[] l2r = (long[]) o2r; long[] l2i = (long[]) o2i; if (complex) { long denom, scratch; for (k = 0; k < len; k++) { scratch = l1r[k]; denom = (l2r[k] * l2r[k] + l2i[k] * l2i[k]); l1r[k] = ((l1r[k] * l2r[k] + l1i[k] * l2i[k]) / denom); l1i[k] = ((scratch * l2i[k] - l2r[k] * l1i[k]) / denom); } } else { for (k = 0; k < len; k++) { l1r[k] /= l2r[k]; } } } else if ((Short.TYPE).getName().equals(name)) { short[] s1r = (short[]) o1r; short[] s1i = (short[]) o1i; short[] s2r = (short[]) o2r; short[] s2i = (short[]) o2i; if (complex) { short denom, scratch; for (k = 0; k < len; k++) { scratch = s1r[k]; denom = (short) (s2r[k] * s2r[k] + s2i[k] * s2i[k]); s1r[k] = (short) ((s1r[k] * s2r[k] + s1i[k] * s2i[k]) / denom); s1i[k] = (short) ((scratch * s2i[k] - s2r[k] * s1i[k]) / denom); } } else { for (k = 0; k < len; k++) { s1r[k] /= s2r[k]; } } } fl1r.restoreArray(); answer.add(a1r); if (complex) { fl1i.restoreArray(); answer.add(a1i); } else { answer.add(null); } return answer; } /** * Class method that divides the individual elements of two arrays. Corresponding elements of the first array are * divided by the elements of the second. Integer data types use integer division. If remaindered division is * required, convert both arguments to a floating type first, using for example the method <i>toDoubleArray</i>. If * the arrays have the same dimensionality and size, and if the elements are arithmetic Java data types, then the * method divides the arrays and returns the result as an Object that is an array of the same type. The arithmetic * is done in-place in the first 2 arguments, so the returned value is simply a reference to the first argument. If * a new returned array is desired, then the first argument should be a copy of the input array, using method * <i>multiArrayCopy</i>. </p><p> If the input arrays are not the same dimensionality and size, or if at least one * of them does not contain arithmetic data types, the this method returns <i>null</i>. If the given objects are not * arrays, or are arrays of non-primitive objects, then the method also returns <i>null</i>. * * @param a1 The numerator array * @param a2 The denominator array * @return The quotient of the two arrays element-by-element */ public static Object divideArrays(Object a1, Object a2) { ArrayList answer = divideArrays(a1, null, a2, null); return answer.get(0); } /* * Class methods to assist manipulating spectra. In this version of * FlatArray these do not deal with multi-dimensional arrays, so * they are not really related to the other utilities here. But * in future releases they will be upgraded to use the facilities * of FlatArray to deal with multi-dimensional spectra. The * upgrade should consist of new methods over-loading the same * name with different arguments. */ /** * Class method that converts a full-bandwidth spectrum to a narrow-band spectrum. It returns a double[] array * containing only the elements of the original array that are required by the Triana data model for spectra. The * input array can be either the real or the imaginary part of the spectrum. The bandwidth is determined by the * integer indices of the edges of the band in the input array; these are the indices of the positive-frequency * elements in the full-bandwidth spectrum. The method assumes that the data obeys the Triana spectral data model, * but it does not check this. * * @param fullArray The input full-bandwidth spectrum * @param oneSide <i>True</i> if the input spectrum in one-sided * @param low The index of the lower edge of the returned band * @param high The index of the higher edge of the returned band * @return The corresponding narrow-band spectrum * @see triana.types.Spectrum * @see triana.types.ComplexSpectrum */ public static double[] convertToNarrowBand(double[] fullArray, boolean oneSide, int low, int high) { double[] narrowArray = fullArray; // fool the compiler about initalization int lenFull = fullArray.length; int bandwidth = high - low + 1; int topwidth = bandwidth; int starttop; boolean even = (lenFull % 2 == 0); if (oneSide) { // one-sided input array narrowArray = new double[bandwidth]; System.arraycopy(fullArray, low, narrowArray, 0, bandwidth); } else { // two-sided input array if (even) { // input array has even number of elements if (low == 0) { // output band contains zero freq topwidth = bandwidth - 1; starttop = lenFull - topwidth; } else if (high == lenFull / 2) { //output band contains top freq topwidth = bandwidth - 1; starttop = high + 1; } else { // output band does not include either end of spectrum topwidth = bandwidth; starttop = lenFull - high; } } else { // input array has odd number of elements if (low == 0) { // output band contains zero freq topwidth = bandwidth - 1; starttop = lenFull - topwidth; } else if (high == (lenFull - 1) / 2) { //output band contains top freq topwidth = bandwidth; starttop = high + 1; } else { // output band does not include either end of spectrum topwidth = bandwidth; starttop = lenFull - high; } } narrowArray = new double[bandwidth + topwidth]; System.arraycopy(fullArray, low, narrowArray, 0, bandwidth); System.arraycopy(fullArray, starttop, narrowArray, bandwidth, topwidth); } return narrowArray; } /** * Class method that takes the given array, assumed to be a two-sided spectrum (either the real or imaginary part) * and returns the associated one-sided spectrum. It uses the information in the given integer full (the number of * elements in the full two-sided spectrum, which is not the same as the number of elements of the given array if it * is narrow-band); the given boolean narrowband (the flag that is true if the given array is a narrow-band slice of * a full spectrum); and the given boolean <i>containsZeroFrequency</i> (<i>true</i> if the given array contains the * zero-frequency element of the full spectrum). * <p/> * The storage order is the order given by the Triana storage convention. The method extracts the one-sided spectrum * essentially by copying the positive-frequency elements of the two-sided spectrum. It does not check whether the * negative-frequency elements are appropriately related to the positive-frequency ones. * * @param twoSide The input array containing a two-sided spectrum * @param full The number of elements in the full two-sided spectrum * @param narrowband <i>True</i> if the input array is a narrow-band spectrum * @param containsZeroFrequency <i>True</i> if the zero-frequency element is present in the input array * @return The associated one-sided spectrum * @see triana.types.Spectrum * @see triana.types.ComplexSpectrum */ public static double[] convertToOneSided(double[] twoSide, int full, boolean narrowband, boolean containsZeroFrequency) { boolean fullEven = (full % 2 == 0); int len2 = twoSide.length; boolean inputEven = (len2 % 2 == 0); int len1; double[] oneSide = twoSide; // fool compiler on intialization if (fullEven) { // full data set had an even number of elements if (!narrowband) { // complete spectrum in input data set len1 = len2 / 2 + 1; } else { // input spectrum is narrow-band if (containsZeroFrequency) { // zero freq is in band len1 = (len2 + 1) / 2; } else { // zero freq missing from band if (inputEven) { // the highest freq also missing len1 = len2 / 2; } else { // the highest freq is present in band len1 = (len2 + 1) / 2; } } } } else { // full data set had an odd number of elements if (!narrowband) { // complete spectrum in input data set len1 = (len2 + 1) / 2; } else { // narrow-band input data set if (containsZeroFrequency) { // zero frequency is in the band len1 = (len2 + 1) / 2; } else { // zero frequency missing from band len1 = len2 / 2; } } } // System.out.println("FlatArray convertToOneSided: input array of type " + twoSide.getClass().getName() + " with " + String.valueOf(full) + " elements will be reduced to length " + String.valueOf(len1) ); oneSide = new double[len1]; System.arraycopy(twoSide, 0, oneSide, 0, len1); return oneSide; } /** * Class method that converts takes the given spectral data array, which can be one-sided and/or narrow-band, and * which is assumed to be stored according to the Triana spectral data model, and returns the associated two-sided * full-bandwidth data array. The conversion from one-sided to two-sided assumes that the data from which the * spectrum was obtained are real, so that the spectrum at negative frequencies is the complex-conjugate of that at * positive frequencies. In this case it reads the boolean parameter realpart to determine if it is converting the * real part or the imaginary part of the spectrum. The conversion from narrow-band to full-band pads the remaining * parts of the spectrum with zeros; it uses the int parameter low to determine the lowest edge of the given band. * </p><p> The nature of the input array is given by the flags onesided and narrowband. The int argument full is the * number of elements in the returned array. </p><p> The method does not check that the input data obeys the Triana * spectral data model. The user is responsible for ensuring this. * * @param input The input array * @param full The number of data points in the returned array * @param onesided <i>True</i> if the input data are one-sided * @param realpart <i>True</i> if the input data are the real part of a spectrum, <i>false</i> if imaginary part * @param narrowband <i>True</i> if the input data are narrow-band * @param low The lower bound on the frequency spectrum * @return The data as a two-sided full-bandwidth spectrum */ public static double[] convertToFullSpectrum(double[] input, int full, boolean onesided, boolean realpart, boolean narrowband, int low) { if (!(onesided || narrowband)) { return input; } // no action needed // System.out.println("FlatArray convertToFullSpectrum: inputLength = " + String.valueOf(input.length) + ", full length = " + String.valueOf(full) + ", low = " + String.valueOf(low) ); // System.out.println("onesided = " + String.valueOf(onesided) + ", realpart = " + String.valueOf(realpart) + ", narrowband = " + String.valueOf(narrowband) ); int high = low; int inputLength = input.length; double[] fullArray = new double[full]; boolean evenFull = (full % 2 == 0); if (!narrowband) { // data is full-bandwidth so it must be one-sided // System.out.println("Begin copying input array into fullArray."); System.arraycopy(input, 0, fullArray, 0, inputLength); reverseArray(input, realpart); if (evenFull) { // full spectrum has even no. elems // System.out.println("Begin copying reverse array into fullArray for full even."); System.arraycopy(input, 1, fullArray, inputLength, inputLength - 2); } else { // full spectrum has odd no. elems // System.out.println("Begin copying reverse array into fullArray for full odd."); System.arraycopy(input, 0, fullArray, inputLength, inputLength - 1); } } else { // data is narrow-band int bandwidth; if (onesided) { // data is one-sided and narrowband bandwidth = inputLength; high = low + bandwidth - 1; System.arraycopy(input, 0, fullArray, low, bandwidth); reverseArray(input, realpart); if (evenFull) { // two-sided has even no. elems if (low == 0) { // bandwidth begins with zero freq System.arraycopy(input, 0, fullArray, full - inputLength + 1, inputLength - 1); } else if (high == full / 2) { // bandwidth goes up to top System.arraycopy(input, 1, fullArray, full / 2 + 1, inputLength - 1); } else { // band does not reach bottom or top of spectrum System.arraycopy(input, 0, fullArray, full - high, inputLength); } } else { // two-sided has odd no. elems if (low == 0) { // bandwidth begins with zero freq System.arraycopy(input, 0, fullArray, full - inputLength + 1, inputLength - 1); } else if (high == (full - 1) / 2) { //band goes to top System.arraycopy(input, 0, fullArray, low + inputLength, inputLength); } else { // band does not reach either top or bottom System.arraycopy(input, 0, fullArray, full - high, inputLength); } } } else { // data is two-sided but narrowband boolean evenInput = ((inputLength / 2) * 2 == inputLength); if (evenInput) { // two-sided data are symmetrical bandwidth = inputLength / 2; high = low + bandwidth - 1; System.arraycopy(input, 0, fullArray, low, bandwidth); System.arraycopy(input, bandwidth, fullArray, full - high, bandwidth); } else { // first part of input has one extra val (zero freq or top) bandwidth = (inputLength + 1) / 2; high = low + bandwidth - 1; System.arraycopy(input, 0, fullArray, low, bandwidth); if (low == 0) { // input contains zero freq System.arraycopy(input, bandwidth, fullArray, full - high, bandwidth - 1); } else { // input contains highest freq System.arraycopy(input, bandwidth, fullArray, full - high + 1, bandwidth - 1); } } } } return fullArray; } /** * Class method to test whether a spectral data set has the symmetry property that will result in its inverse * transform being pure real or pure imaginary. If, for a full data set (not one-sided spectra, which will * automatically transform to real), x[k] = ComplexConjugate(x[N-k]), then the transform will be real. If x[k] = * -ComplexConjugate(x[N-k]) then the transform will be imaginary. This test should only be applied to the full data * set that will be transformed. * <p/> * The return value is an int. It has value +1 if the transform will be real, -1 if imaginary, 0 if there is no * special symmetry. * * @param xr The real part of the input data set (can be null) * @param xi The imaginary part of the input data set (can be null) * @return Takes value 1, 0, or -1 for symmetry, no symmetry, antisymmetry */ public static int testConjugateSymmetry(double[] xr, double[] xi) { boolean symmetric = true; boolean antisymmetric = true; int k, len, lk; if (xi == null) { if (xr == null) { return 0; } else { if (xr[0] != 0) { antisymmetric = false; } k = 1; len = xr.length; while ((k < len / 2) && (symmetric || antisymmetric)) { lk = len - k; if ((symmetric) && (xr[k] != xr[lk])) { symmetric = false; } if ((antisymmetric) && (xr[k] != -xr[lk])) { antisymmetric = false; } k++; } } } else { if (xr == null) { if (xi[0] != 0) { symmetric = false; } k = 1; len = xi.length; while ((k < len / 2) && (symmetric || antisymmetric)) { lk = len - k; if ((symmetric) && (xi[k] != -xi[lk])) { symmetric = false; } if ((antisymmetric) && (xi[k] != xi[lk])) { antisymmetric = false; } k++; } } else { if (xr[0] != 0) { antisymmetric = false; } if (xi[0] != 0) { symmetric = false; } k = 1; len = xr.length; while ((k < len / 2) && (symmetric || antisymmetric)) { lk = len - k; if ((symmetric) && ((xr[k] != xr[lk]) || (xi[k] != -xi[lk]))) { symmetric = false; } if ((antisymmetric) && ((xr[k] != -xr[lk]) || (xi[k] != xi[lk]))) { antisymmetric = false; } k++; } } } if (symmetric) { return 1; } if (antisymmetric) { return -1; } return 0; } /** * Utility method that takes an input double[] array and reverses the order of its elements. If the argument flag * <i>plus</i> is <i>false</i>, then it also multiplies the elements of the array by -1. */ private static void reverseArray(double[] a, boolean plus) { int len = a.length; int k, last, kconj; double scratch; boolean even = (len % 2 == 0); last = len - 1; if (plus) { for (k = 0; k < len / 2; k++) { kconj = last - k; scratch = a[k]; a[k] = a[kconj]; a[kconj] = scratch; } } else { for (k = 0; k < len / 2; k++) { kconj = last - k; scratch = a[k]; a[k] = -a[kconj]; a[kconj] = -scratch; } if (!even) { a[len / 2] *= -1; } } } /* * Instance methods. */ /** * Returns the array that holds the flattened data. * * @return Object The flattened array */ public Object getFlatArray() { return flatArray; } /** * Returns the int[] that contains the lengths of the dimensions of the original array. * * @return int[] The original dimension lengths */ public int[] getLengths() { return lengths; } /** * Returns the length of the flattened array, which is the total number of elements in the original array. * * @return int The total flattened length */ public int getFlatLength() { return totalLength; } /** * Returns the name of the type of the components of the flattened array. * * @return String The name of the components */ public String getComponentName() { return componentName; } /** * Sets the Array that holds the flattened data. * * @param newArray The new flattened array */ public void setFlatArray(Object newArray) { if (newArray.getClass().isArray()) { flatArray = newArray; if (Array.getLength(newArray) != totalLength) { inputObject = null; } } else { flatArray = new Object[1]; Array.set(flatArray, 0, newArray); lengths = new int[1]; lengths[0] = 1; } componentName = flatArray.getClass().getComponentType().getName(); } /** * Sets the int[] that holds the lengths of the original dimensions. * * @param newLengths The new lengths array */ public void setLengths(int[] newLengths) { lengths = newLengths; int totalDims = lengths.length; if (totalDims == 1) { totalLength = lengths[0]; } else { int totalNumbers = lengths[0]; for (int i = 1; i < totalDims; i++) { totalNumbers *= lengths[i]; } totalLength = totalNumbers; } } /** * Sets the name of the type of the component of the flat array. * * @param newComponentName The new type name */ public void setComponentName(String newComponentName) { componentName = newComponentName; } /** * Returns an array of the dimensionality given by <i>lengths</i> and containing the data held in <i>flatArray</i>. * It will restore to the original array that was used to create <i>flatArray</i> if parameter <i>newArray</i> is * <i>false</i>. If the parameter is <i>true</i>, it will create a new array; if the elements of the old array are * primitive types, they will be copied to the new array; if they are reference types the new array will contain * references to them. * * @param copy True if the output should be a new object, false if the old object should be restored * @return The new multidimensional array */ public Object restoreArray(boolean copy) { Object o; Class componentClass; int flattenedDims = lengths.length; if (flattenedDims == 1) { o = flatArray; } else if ((copy) || (inputObject == null)) { try { if (componentName.equals("boolean")) { componentClass = Boolean.TYPE; } else if (componentName.equals("char")) { componentClass = Character.TYPE; } else if (componentName.equals("byte")) { componentClass = Byte.TYPE; } else if (componentName.equals("short")) { componentClass = Short.TYPE; } else if (componentName.equals("int")) { componentClass = Integer.TYPE; } else if (componentName.equals("long")) { componentClass = Long.TYPE; } else if (componentName.equals("float")) { componentClass = Float.TYPE; } else if (componentName.equals("double")) { componentClass = Double.TYPE; } else if (componentName.equals("void")) { componentClass = Void.TYPE; } else { componentClass = Class.forName(componentName); } o = Array.newInstance(componentClass, lengths); recurseArrayPack(flatArray, o, lengths, 0, flattenedDims - 1, 0); } catch (ClassNotFoundException ex) { System.out.println("Class with name " + componentName + " does not exist. Return null from FlatArray.restoreArray()."); o = null; } } else { o = inputObject; recurseArrayPack(flatArray, o, lengths, 0, flattenedDims - 1, 0); } return o; } /** * Creates an array of the dimensionality given by lengths and containing the data held in <i>flatArray,/i>. It will * restore to the original array that was used to create <i>flatArray</i>. */ public Object restoreArray() { return restoreArray(false); } }
BartoszMilewski/CodeCoop
co-op/Common/VerificationReport.h
<reponame>BartoszMilewski/CodeCoop #if !defined (VERIFICATIONREPORT_H) #define VERIFICATIONREPORT_H //------------------------------------ // (c) Reliable Software, 1999 - 2008 //------------------------------------ #include "GlobalId.h" #include <MultiMap.h> class VerificationReport { public: enum ItemKind { MissingFolder, // Folder missing from disk AbsentFolder, // Folder missing from the project (in state none when its contents is controlled) MissingCheckedout, MissingNew, MissingReadOnlyAttribute, PreservedLocalEdits, IllegalName, DirtyUncontrolled, SyncAreaOrphan, Corrupted }; private: typedef std::multimap<ItemKind, GlobalId> ItemMap; class IsEqualGid : public std::unary_function<ItemMap::const_iterator, bool> { public: IsEqualGid (GlobalId gid) : _gid (gid) {} bool operator () (std::pair<ItemKind, GlobalId> pair) const { return pair.second == _gid; } private: GlobalId _gid; }; public: typedef MmCountRangeSequencer<std::multimap<ItemKind, GlobalId> > Sequencer; void Remember (VerificationReport::ItemKind itemKind, GlobalId gid) { _report.insert (std::make_pair(itemKind, gid)); } bool IsEmpty () const { return _report.empty (); } bool IsPresent (VerificationReport::ItemKind itemKind, GlobalId gid) const { typedef ItemMap::const_iterator Iterator; std::pair<Iterator, Iterator> range = _report.equal_range (itemKind); if (range.first == range.second) return false; else return std::find_if (range.first, range.second, IsEqualGid (gid)) != range.second; } Sequencer GetSequencer (VerificationReport::ItemKind itemKind) const { return Sequencer (_report.equal_range (itemKind)); } private: ItemMap _report; }; #endif
ayompedb/culper
src/models/foreignBenefit.js
<filename>src/models/foreignBenefit.js import { foreignBenefitFrequencyTypeOptions, foreignBenefitTypeOptions, ONE_TIME, FUTURE, CONTINUING, OTHER, } from 'constants/enums/foreignActivityOptions' import foreignBenefitType from 'models/foreignBenefitType' const foreignBenefit = { InterestTypes: { presence: true, array: { validator: { presence: true }, length: { minimum: 1 }, }, }, BenefitType: { presence: true, hasValue: { validator: { inclusion: foreignBenefitTypeOptions }, }, }, OtherBenefitType: (value, attributes) => { const { BenefitType } = attributes if (BenefitType && BenefitType.value === 'Other') { return { presence: true, hasValue: true } } return {} }, BenefitFrequency: { presence: true, hasValue: { validator: { inclusion: foreignBenefitFrequencyTypeOptions }, }, }, OneTimeBenefit: (value, attributes) => { const { BenefitFrequency } = attributes if (BenefitFrequency && BenefitFrequency.value === ONE_TIME) { return { presence: true, model: { validator: foreignBenefitType, benefitType: ONE_TIME, }, } } return {} }, FutureBenefit: (value, attributes) => { const { BenefitFrequency } = attributes if (BenefitFrequency && BenefitFrequency.value === FUTURE) { return { presence: true, model: { validator: foreignBenefitType, benefitType: FUTURE, }, } } return {} }, ContinuingBenefit: (value, attributes) => { const { BenefitFrequency } = attributes if (BenefitFrequency && BenefitFrequency.value === CONTINUING) { return { presence: true, model: { validator: foreignBenefitType, benefitType: CONTINUING, }, } } return {} }, OtherBenefit: (value, attributes) => { const { BenefitFrequency } = attributes if (BenefitFrequency && BenefitFrequency.value === OTHER) { return { presence: true, model: { validator: foreignBenefitType, benefitType: OTHER, }, } } return {} }, } export default foreignBenefit
rajasegar/ember-voca
addon/helpers/voca-count-words.js
import { helper } from '@ember/component/helper'; import { countWords } from 'voca'; export default helper(function vocaCountWords(params/*, hash */ ) { return countWords(params[0]); });
paullewallencom/javascript-978-1-7843-9264-2
_/2642OS_05_Codes/start.js
var orderSystemWith = require('mycafe-core'), express = require('express'), routes = require('./index'), port = process.argv[2] || 9090, app = express(); function inMemoryDAO(data) { return { byId: function (id, cb) { setImmediate(function () { cb(null, data[id]); }); }, update: function (entity, cb) { setImmediate(function () { data[entity.id] = entity.data; cb(null); }); } } } var orders = {}, messages = {}, orderSystem = orderSystemWith({ order: inMemoryDAO(orders), message: inMemoryDAO(messages) }); orders['1'] = [ { beverage: { beverage: 'Frapuccino', price: 4, id: 'b1' }, quantity: 2 }, { beverage: { beverage: 'Mocaccino', price: 2.3, id: 'b2' }, quantity: 1 }, { beverage: { beverage: 'Expresso', price: 1.5, id: 'b3' }, quantity: 1 } ]; messages['1'] = [ { key: "error.beverage.notExists" }, { key: "error.quantity", params: [-1]} ]; app .use('/orders', routes.order(orderSystem)) .listen(port, function (err) { if (err) return console.log('Error starting the server', err); console.log('Server running on port:', port); });
ashariati/gtsam-3.2.1
timing/timeLago.cpp
/* ---------------------------------------------------------------------------- * GTSAM Copyright 2010, Georgia Tech Research Corporation, * Atlanta, Georgia 30332-0415 * All Rights Reserved * Authors: <NAME>, et al. (see THANKS for the full author list) * See LICENSE for the license information * -------------------------------------------------------------------------- */ /** * @file timeVirtual.cpp * @brief Time the overhead of using virtual destructors and methods * @author <NAME> * @date Dec 3, 2010 */ #include <gtsam/slam/dataset.h> #include <gtsam/slam/PriorFactor.h> #include <gtsam/slam/lago.h> #include <gtsam/nonlinear/GaussNewtonOptimizer.h> #include <gtsam/linear/Sampler.h> #include <gtsam/base/timing.h> #include <iostream> using namespace std; using namespace gtsam; int main(int argc, char *argv[]) { size_t trials = 1; // read graph Values::shared_ptr solution; NonlinearFactorGraph::shared_ptr g; string inputFile = findExampleDataFile("w10000"); SharedDiagonal model = noiseModel::Diagonal::Sigmas((Vector(3) << 0.05, 0.05, 5.0 * M_PI / 180.0)); boost::tie(g, solution) = load2D(inputFile, model); // add noise to create initial estimate Values initial; Sampler sampler(42u); Values::ConstFiltered<Pose2> poses = solution->filter<Pose2>(); SharedDiagonal noise = noiseModel::Diagonal::Sigmas((Vector(3) << 0.5, 0.5, 15.0 * M_PI / 180.0)); BOOST_FOREACH(const Values::ConstFiltered<Pose2>::KeyValuePair& it, poses) initial.insert(it.key, it.value.retract(sampler.sampleNewModel(noise))); // Add prior on the pose having index (key) = 0 noiseModel::Diagonal::shared_ptr priorModel = // noiseModel::Diagonal::Sigmas(Vector3(1e-6, 1e-6, 1e-8)); g->add(PriorFactor<Pose2>(0, Pose2(), priorModel)); // LAGO for (size_t i = 0; i < trials; i++) { { gttic_(lago); gttic_(init); Values lagoInitial = lago::initialize(*g); gttoc_(init); gttic_(refine); GaussNewtonOptimizer optimizer(*g, lagoInitial); Values result = optimizer.optimize(); gttoc_(refine); } { gttic_(optimize); GaussNewtonOptimizer optimizer(*g, initial); Values result = optimizer.optimize(); } tictoc_finishedIteration_(); } tictoc_print_(); return 0; }
Yinqingwen/Dva
server/dvaui/admin.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib import admin from .models import StoredDVAPQL @admin.register(StoredDVAPQL) class StoredDVAPQLAdmin(admin.ModelAdmin): pass
Hussam-Turjman/FLDServer
fldserver/base/win/hstring_compare.cc
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "fldserver/base/win/hstring_compare.h" #include "fldserver/base/native_library.h" #include "fldserver/base/win/windows_version.h" #include <winstring.h> namespace base { namespace win { HRESULT HStringCompare(HSTRING string1, HSTRING string2, INT32* result) { using CompareStringFunc = decltype(&::WindowsCompareStringOrdinal); static const auto compare_string_func = []() -> CompareStringFunc { if (GetVersion() < Version::WIN8) return nullptr; NativeLibraryLoadError load_error; NativeLibrary combase_module = PinSystemLibrary(FILE_PATH_LITERAL("combase.dll"), &load_error); if (load_error.code) return nullptr; return reinterpret_cast<CompareStringFunc>( GetFunctionPointerFromNativeLibrary(combase_module, "WindowsCompareStringOrdinal")); }(); if (!compare_string_func) return E_FAIL; return compare_string_func(string1, string2, result); } } // namespace win } // namespace base
DBatOWL/tutorials
spring-core-5/src/main/java/com/baeldung/springbean/naming/component/CustomComponent.java
package com.baeldung.springbean.naming.component; import org.springframework.stereotype.Component; @Component("myBean") public class CustomComponent { }
makuto/7drl2018
src/Globals.hpp
<gh_stars>0 #pragma once // Global.hpp is included by all files automatically #define CLAMP(value, min, max) \ if (value > max) \ value = max; \ else if (value < min) \ value = min; #define MIN(a, b) (a <= b ? a : b) #define MAX(a, b) (a >= b ? a : b) // // Colors // // Interface #define WIN_BACKGROUND_COLOR 34, 34, 34, 255 #define LOG_COLOR_NORMAL 255, 255, 255, 255 #define LOG_COLOR_IMPORTANT 249, 212, 35, 255 #define LOG_COLOR_DEAD 232, 30, 34, 255 #define LOOK_CURSOR_COLOR 252, 145, 57, 255 #define STATUS_COLOR_UNIMPORTANT 255, 255, 255, 255 #define STATUS_COLOR_NORMAL 252, 145, 57, 255 #define STATUS_COLOR_IMPORTANT 232, 30, 34, 255 // Tiles #define WALL_TILE_COLOR_NORMAL 255, 255, 255, 255 #define TREE_TILE_COLOR_NORMAL 184, 247, 34, 255 #define GRASS_TILE_COLOR_NORMAL 164, 227, 14, 255 #define FLOOR_TILE_COLOR_NORMAL 46, 64, 75, 255 #define HELL_TILE_COLOR_NORMAL 232, 30, 34, 255 #define ABILITY_TILE_COLOR_NORMAL 252, 145, 57, 255 // Entities #define PLAYER_COLOR_NORMAL 252, 145, 57, 255 #define ENEMY_COLOR_NORMAL 232, 30, 34, 255 #define STAIRS_COLOR_NORMAL 252, 145, 57, 255 // FX #define FX_LIGHTNING 41, 94, 243, 150 #define FX_PHASE_DOOR 249, 212, 35, 150 #define FX_FIREBOMB 232, 30, 34, 150 // // Tile/Entity Types // #define TYPE_NONE ' ' #define PLAYER_TYPE '@' #define SKELETON_TYPE 's' #define SUMMONER_TYPE 'c' #define GOBLIN_TYPE 'g' #define BANDIT_TYPE 'h' #define DRAGON_TYPE 'b' #define FIRE_DRAGON_TYPE 'd' #define WIZARD_TYPE 'w' #define CONTROL_WIZARD_TYPE 'm' #define CORPSE_TYPE '%' #define ABILITY_TYPE '!' #define STAIRS_DOWN_TYPE '>' #define WALL_TYPE '#' #define TREE_TYPE '#' #define FLOOR_TYPE '.' #define LOOK_MODE_CURSOR "_" // // Combat constants // #define PLAYER_STARTING_MAX_HEALTH 100 #define PLAYER_STARTING_RESTORE_HEALTH 5 #define PLAYER_DEFAULT_RESTORE_RATE_HEALTH 2 #define PLAYER_STARTING_MAX_STAMINA 100 #define PLAYER_STARTING_RESTORE_STAMINA 5 #define PLAYER_DEFAULT_RESTORE_RATE_STAMINA 2 #define PLAYER_STARTING_MAX_STRENGTH 10 #define ENABLE_ONLY_HEAL_FULL_STAMINA false #define ENABLE_OVEREXERTION false // If the player rests, the number of turns to restore goes down by this number #define PLAYER_RESTING_BONUS 2 #define PLAYER_MELEE_ATTACKING_BONUS 0 #define PLAYER_NUM_ABILITY_SLOTS 3 // Every level, the training stat will increase its max by this * level #define STAT_INCREASE_LEVEL_MULTIPLIER 1.5f // The absolute max a single summoner can spawn #define MAX_SINGLE_SUMMONS 100 #define SUMMONING_RADIUS 5 #define SUMMONER_SPAWN_RATE_TURNS 25 // Each level, spawn rate turns cooldown decreases by this * level (0.5 = every other level lower // spawn rate by 1) #define SUMMONER_SPAWN_RATE_LEVEL_MULTIPLIER 0.5f // This makes enemies slower to spawn // TurnCounter % (SpawnRate + spawn rate modifier) #define LIGHTNINGWIZARD_SPAWN_RATE_MODIFIER 35 #define CONTROLWIZARD_SPAWN_RATE_MODIFIER 55 #define FIREDRAGON_SPAWN_RATE_MODIFIER 50 #define RANGED_ENEMY_MAX_DIST_MANHATTAN 27 #define LEVELENEMY_PLAYER_DETECT_MANHATTAN_RADIUS 20 // Each death, chance of ability dropping = this * level #define DEATH_ABILITY_DROP_LEVEL_MULTIPLIER 1.5f // Dragons don't shoot fire too often #define DRAGON_FIRE_RATE 10 // turncounter % chance < rate = random walk #define BANDIT_CONFUSION_CHANCE 40 #define BANDIT_CONFUSION_RATE 20 // // Abilities // #define PHASE_DOOR_SQUARE_RADIUS 20 #define PHASE_TARGET_ON_PLAYER_RADIUS 15 #define PHASE_TARGET_ON_ENEMY_RADIUS 15 #define FIREBOMB_RADIUS 2 // // Levels // #define LEVEL_NUM_FOREST 3 #define LEVEL_NUM_BARREN 6 // lol have fun #define LEVEL_NUM_HELLSCAPE 10000 #define NUM_LEVELS_TO_WIN 10 // Number of level enemies spawned = this * level #define LEVELENEMY_SPAWN_NUM_MULTIPLIER_FOREST 7.f #define LEVELENEMY_SPAWN_NUM_MULTIPLIER_BARREN 7.f #define LEVELENEMY_SPAWN_NUM_MULTIPLIER_HELLSCAPE 3.f // Size of one axis e.g. total tiles = FOREST_SIZE * FOREST_SIZE #define FOREST_SIZE 80 #define BARREN_SIZE 100 #define HELLSCAPE_SIZE 80 // // Only display once in log strings // #define WALL_BUMP "You bump into a wall" #define LOOK_MODE_EXIT "(Last turn)" #define CANCELLED_ABILITY_ACTIVATE "(Cancelled activate)" #define ABILITY_ON_COOLDOWN "Ability on cooldown" #define TRAINING_CHANGED "Now training " // // Engine tuning // #define MAX_PLACEMENT_ATTEMPTS 100 // How far target mode will snap to enemy #define MAX_PLAYER_TARGET_DIST 30.f
Lzw2016/clever-data
hinny-core/src/main/java/org/clever/hinny/core/ExcelUtils.java
package org.clever.hinny.core; import com.alibaba.excel.context.AnalysisContext; import com.alibaba.excel.converters.Converter; import com.alibaba.excel.converters.ConverterKeyBuild; import com.alibaba.excel.enums.CellDataTypeEnum; import com.alibaba.excel.enums.CellExtraTypeEnum; import com.alibaba.excel.event.AnalysisEventListener; import com.alibaba.excel.exception.ExcelDataConvertException; import com.alibaba.excel.metadata.Cell; import com.alibaba.excel.metadata.CellData; import com.alibaba.excel.metadata.GlobalConfiguration; import com.alibaba.excel.metadata.Head; import com.alibaba.excel.metadata.property.*; import com.alibaba.excel.read.builder.ExcelReaderBuilder; import com.alibaba.excel.read.metadata.holder.ReadHolder; import com.alibaba.excel.read.metadata.property.ExcelReadHeadProperty; import com.alibaba.excel.support.ExcelTypeEnum; import com.alibaba.excel.write.builder.ExcelWriterBuilder; import com.alibaba.excel.write.handler.AbstractCellWriteHandler; import com.alibaba.excel.write.handler.AbstractRowWriteHandler; import com.alibaba.excel.write.merge.OnceAbsoluteMergeStrategy; import com.alibaba.excel.write.metadata.holder.WriteSheetHolder; import com.alibaba.excel.write.metadata.holder.WriteTableHolder; import com.alibaba.excel.write.metadata.style.WriteCellStyle; import com.alibaba.excel.write.metadata.style.WriteFont; import com.alibaba.excel.write.style.AbstractVerticalCellStyleStrategy; import com.alibaba.excel.write.style.column.AbstractHeadColumnWidthStyleStrategy; import com.alibaba.excel.write.style.row.SimpleRowHeightStyleStrategy; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.poi.ss.usermodel.*; import org.apache.poi.ss.util.CellRangeAddress; import org.clever.common.utils.codec.DigestUtils; import org.clever.common.utils.codec.EncodeDecodeUtils; import org.clever.common.utils.excel.ExcelDataReader; import org.clever.common.utils.excel.ExcelDataWriter; import org.clever.common.utils.excel.ExcelReaderExceptionHand; import org.clever.common.utils.excel.ExcelRowReader; import org.clever.common.utils.excel.dto.ExcelData; import org.clever.common.utils.excel.dto.ExcelRow; import org.clever.common.utils.tuples.TupleTow; import org.springframework.util.Assert; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.*; import java.util.stream.Collectors; /** * 作者:lizw <br/> * 创建时间:2020/07/28 22:33 <br/> */ public class ExcelUtils { public static final ExcelUtils Instance = new ExcelUtils(); private ExcelUtils() { } @SuppressWarnings("rawtypes") @SneakyThrows public ExcelDataReader<Map> createReader(ExcelDataReaderConfig config) { Assert.notNull(config, "参数config不能为null"); ExcelDataReader<Map> excelDataReader; if (config.getRequest() != null) { excelDataReader = new ExcelDataReader<>( config.getRequest(), Map.class, config.limitRows, config.enableExcelData, false, config.excelRowReader, config.excelReaderExceptionHand); } else { excelDataReader = new ExcelDataReader<>( config.filename, config.inputStream, Map.class, config.limitRows, config.enableExcelData, false, config.excelRowReader, config.excelReaderExceptionHand); } excelDataReader.setEnableValidation(false); ExcelReaderBuilder builder = excelDataReader.read(); builder.autoCloseStream(config.autoCloseStream); if (config.extraRead != null) { for (CellExtraTypeEnum typeEnum : config.extraRead) { if (typeEnum != null) { builder.extraRead(typeEnum); } } } builder.ignoreEmptyRow(config.ignoreEmptyRow); builder.mandatoryUseInputStream(config.mandatoryUseInputStream); if (config.password != null) { builder.password(config.password); } if (StringUtils.isNotBlank(config.sheetName)) { builder.sheet(config.sheetName); } if (config.sheetNo != null) { builder.sheet(config.sheetNo); } if (config.headRowNumber != null) { builder.headRowNumber(config.headRowNumber); } else { builder.headRowNumber(config.getHeadRowCount()); } builder.useScientificFormat(config.useScientificFormat); builder.use1904windowing(config.use1904windowing); if (config.locale != null) { builder.locale(config.locale); } builder.autoTrim(config.autoTrim); builder.customObject(config.customObject); // 自定义解析逻辑 builder.useDefaultListener(false); builder.registerReadListener(new ExcelDateReadListener(config, excelDataReader)); return excelDataReader; } public ExcelDataWriter createWriter(ExcelDataWriterConfig config) { Assert.notNull(config, "参数config不能为null"); ExcelDataWriter excelDataWriter; if (config.request != null && config.response != null) { excelDataWriter = new ExcelDataWriter(config.request, config.response, config.fileName, null); } else { excelDataWriter = new ExcelDataWriter(config.outputStream, null); if (StringUtils.isNotBlank(config.fileName)) { excelDataWriter.write().file(config.fileName); } } ExcelWriterBuilder builder = excelDataWriter.write(); List<List<String>> heads = config.getHeads(); if (heads.isEmpty() || heads.get(0).isEmpty()) { builder.needHead(false); } else { builder.head(heads); } builder.autoCloseStream(config.autoCloseStream); builder.inMemory(config.inMemory); if (StringUtils.isNotBlank(config.template)) { builder.withTemplate(config.template); } if (config.templateInputStream != null) { builder.withTemplate(config.templateInputStream); } builder.automaticMergeHead(config.automaticMergeHead); if (!config.excludeColumnFiledNames.isEmpty()) { builder.excludeColumnFiledNames(config.excludeColumnFiledNames); } if (!config.excludeColumnIndexes.isEmpty()) { builder.excludeColumnIndexes(config.excludeColumnIndexes); } if (!config.includeColumnFiledNames.isEmpty()) { builder.includeColumnFiledNames(config.includeColumnFiledNames); } if (!config.includeColumnIndexes.isEmpty()) { builder.includeColumnIndexes(config.includeColumnIndexes); } builder.needHead(config.needHead); builder.relativeHeadRowIndex(config.relativeHeadRowIndex); builder.useDefaultStyle(config.useDefaultStyle); builder.excelType(config.excelType); if (config.password != null) { builder.password(config.password); } if (config.sheetNo != null) { builder.sheet(config.sheetNo); } if (StringUtils.isNotBlank(config.sheetName)) { builder.sheet(config.sheetName); } builder.use1904windowing(config.use1904windowing); if (config.locale != null) { builder.locale(config.locale); } builder.autoTrim(config.autoTrim); // 根据列配置加入各种 WriteHandler 如:AbstractHeadColumnWidthStyleStrategy、AbstractVerticalCellStyleStrategy。参考 AbstractWriteHolder builder.registerWriteHandler(new FillHeadStrategy(config)); boolean hasColumnWidth = false; boolean hasStyle = false; for (Map.Entry<String, ExcelWriterHeadConfig> entry : config.columns.entrySet()) { ExcelWriterHeadConfig headConfig = entry.getValue(); if (headConfig.columnWidth.columnWidth != null) { hasColumnWidth = true; } if (!hasStyle && (headConfig.headStyle.isSetValue() || headConfig.headFontStyle.isSetValue() || headConfig.contentStyle.isSetValue() || headConfig.contentFontStyle.isSetValue())) { hasStyle = true; } // 应用合并单元格配置 if (headConfig.contentLoopMerge.isSetValue()) { builder.registerWriteHandler(new LoopMergeStrategy(headConfig.contentLoopMerge.eachRow, headConfig.contentLoopMerge.columnExtend, headConfig)); } } // 应用列宽配置 if (hasColumnWidth) { builder.registerWriteHandler(new ColumnWidthStyleStrategy()); } // 应用样式配置 if (hasStyle || config.styleConfig.headStyle.isSetValue() || config.styleConfig.headFontStyle.isSetValue() || config.styleConfig.contentStyle.isSetValue() || config.styleConfig.contentFontStyle.isSetValue()) { builder.registerWriteHandler(new StyleStrategy()); } // 应用行高配置 RowHeightProperty headRowHeightProperty = config.styleConfig.headRowHeight.getRowHeightProperty(); RowHeightProperty contentRowHeightProperty = config.styleConfig.contentRowHeight.getRowHeightProperty(); Short headRowHeight = headRowHeightProperty.getHeight(); Short contentRowHeight = contentRowHeightProperty.getHeight(); if (headRowHeight != null || contentRowHeight != null) { builder.registerWriteHandler(new SimpleRowHeightStyleStrategy(headRowHeight, contentRowHeight)); } // 应用OnceAbsoluteMerge配置 if (config.styleConfig.onceAbsoluteMerge.isSetValue()) { OnceAbsoluteMergeProperty onceAbsoluteMergeProperty = config.styleConfig.onceAbsoluteMerge.getOnceAbsoluteMergeProperty(); builder.registerWriteHandler(new OnceAbsoluteMergeStrategy(onceAbsoluteMergeProperty)); } return excelDataWriter; } // 配置类 //---------------------------------------------------------------------------------------------------------------------------------------------- @Data public static class ExcelDataReaderConfig implements Serializable { /** * Excel文件上传的请求对象 */ private HttpServletRequest request; /** * 上传的Excel文件名称 */ private String filename; /** * 上传的文件数据流 */ private InputStream inputStream; /** * 读取Excel文件最大行数 */ private int limitRows = org.clever.common.utils.excel.ExcelDataReader.LIMIT_ROWS; /** * 是否缓存读取的数据结果到内存中(默认启用) */ private boolean enableExcelData = true; /** * 是否启用数据校验(默认启用) */ private boolean enableValidation = true; /** * 处理读取Excel异常 */ private ExcelReaderExceptionHand excelReaderExceptionHand; /** * 处理Excel数据行 */ @SuppressWarnings("rawtypes") private ExcelRowReader<Map> excelRowReader; // ---------------------------------------------------------------------- /** * 是否自动关闭输入流 */ private boolean autoCloseStream = false; /** * 读取扩展信息配置 */ private CellExtraTypeEnum[] extraRead = new CellExtraTypeEnum[]{}; /** * 是否忽略空行 */ private boolean ignoreEmptyRow = false; /** * 强制使用输入流,如果为false,则将“inputStream”传输到临时文件以提高效率 */ private boolean mandatoryUseInputStream = false; /** * Excel文件密码 */ private String password; /** * Excel页签编号(从0开始) */ private Integer sheetNo; /** * Excel页签名称(xlsx格式才支持) */ private String sheetName; /** * 表头行数 */ private Integer headRowNumber; /** * 使用科学格式 */ private boolean useScientificFormat = false; /** * 如果日期使用1904窗口,则为True;如果使用1900日期窗口,则为false */ private boolean use1904windowing = false; /** * Locale对象表示特定的地理、政治或文化区域。设置日期和数字格式时使用此参数 */ private Locale locale = Locale.SIMPLIFIED_CHINESE; /** * 自动删除空格字符 */ private boolean autoTrim = true; /** * 设置一个自定义对象,可以在侦听器中读取此对象(AnalysisContext.getCustom()) */ private Object customObject; /** * Excel列配置(表头) {@code Map<Entity.propertyName, ExcelReaderHeadConfig>} */ private final LinkedHashMap<String, ExcelReaderHeadConfig> columns = new LinkedHashMap<>(); /** * 返回表头行数 */ public int getHeadRowCount() { int headRowCount = 0; for (Map.Entry<String, ExcelReaderHeadConfig> entry : columns.entrySet()) { ExcelReaderHeadConfig headConfig = entry.getValue(); if (headConfig != null && headConfig.excelProperty.column.size() > headRowCount) { headRowCount = headConfig.excelProperty.column.size(); } } return headRowCount; } } @Data public static class ExcelDataWriterConfig implements Serializable { /** * Excel导出请求对象 */ private HttpServletRequest request; /** * Excel导出响应对象 */ private HttpServletResponse response; /** * Excel导出文件名 */ private String fileName; /** * Excel文件对应输出流 */ private OutputStream outputStream; /** * 是否自动关闭输入流 */ private boolean autoCloseStream = false; /** * 在内存中编写excel。默认为false,则创建缓存文件并最终写入excel。仅在内存模式下支持Comment和RichTextString */ private boolean inMemory = false; /** * Excel模板文件路径 */ private String template; /** * Excel模板文件输入流 */ private InputStream templateInputStream; /** * 写入Excel时出现异常是否仍然继续导出 */ private boolean writeExcelOnException = false; /** * 是否自动合并表头 */ private boolean automaticMergeHead = true; /** * 忽略自定义列 */ private final List<String> excludeColumnFiledNames = new ArrayList<>(); /** * 忽略自定义列 */ private final List<Integer> excludeColumnIndexes = new ArrayList<>(); /** * 只输出自定义列 */ private final List<String> includeColumnFiledNames = new ArrayList<>(); /** * 只输出自定义列 */ private final List<Integer> includeColumnIndexes = new ArrayList<>(); /** * 是否输出表头 */ private boolean needHead = true; /** * 输出第一行的位置 */ private int relativeHeadRowIndex = 0; /** * 是否使用默认样式 */ private boolean useDefaultStyle = true; /** * Excel类型 */ private ExcelTypeEnum excelType = ExcelTypeEnum.XLSX; /** * Excel文件密码 */ private String password; /** * Excel页签编号(从0开始) */ private Integer sheetNo; /** * Excel页签名称(xlsx格式才支持) */ private String sheetName; /** * 如果日期使用1904窗口,则为True;如果使用1900日期窗口,则为false */ private boolean use1904windowing = false; /** * Locale对象表示特定的地理、政治或文化区域。设置日期和数字格式时使用此参数 */ private Locale locale = Locale.SIMPLIFIED_CHINESE; /** * 自动删除空格字符 */ private boolean autoTrim = true; /** * Excel表头 {@code Map<Entity.propertyName, ExcelWriterHeadConfig>} */ private final LinkedHashMap<String, ExcelWriterHeadConfig> columns = new LinkedHashMap<>(); /** * 全局样式配置 */ private final WriterStyleConfig styleConfig = new WriterStyleConfig(); public List<TupleTow<String, ExcelWriterHeadConfig>> getHeadConfigs() { List<TupleTow<String, ExcelWriterHeadConfig>> list = new ArrayList<>(columns.size()); columns.forEach((propertyName, headConfig) -> list.add(TupleTow.creat(propertyName, headConfig))); list.sort((o1, o2) -> { int idx1 = o1.getValue2().excelProperty.index == null ? -1 : o1.getValue2().excelProperty.index; int idx2 = o2.getValue2().excelProperty.index == null ? -1 : o2.getValue2().excelProperty.index; return Integer.compare(idx1, idx2); }); Integer indexMax = null; if (!list.isEmpty()) { indexMax = list.get(list.size() - 1).getValue2().excelProperty.index; } if (indexMax == null) { indexMax = 0; } if (indexMax < list.size()) { indexMax = list.size(); } // indexMax += 1; // 构造表头 List<TupleTow<String, ExcelWriterHeadConfig>> headConfigs = new ArrayList<>(indexMax); for (int i = 0; i < indexMax; i++) { headConfigs.add(null); } List<TupleTow<String, ExcelWriterHeadConfig>> tmp = new ArrayList<>(list.size()); // 先设置index有值的Head for (TupleTow<String, ExcelWriterHeadConfig> tupleTow : list) { String propertyName = tupleTow.getValue1(); ExcelWriterHeadConfig headConfig = tupleTow.getValue2(); if (headConfig.excelProperty.index != null && headConfig.excelProperty.index >= 0) { headConfigs.set(headConfig.excelProperty.index, TupleTow.creat(propertyName, headConfig)); } else { tmp.add(TupleTow.creat(propertyName, headConfig)); } } // 再设置其它Head for (TupleTow<String, ExcelWriterHeadConfig> tupleTow : tmp) { String propertyName = tupleTow.getValue1(); ExcelWriterHeadConfig headConfig = tupleTow.getValue2(); for (int i = 0; i < headConfigs.size(); i++) { if (headConfigs.get(i) == null) { headConfigs.set(i, TupleTow.creat(propertyName, headConfig)); break; } } } // 最后填充heads for (int i = 0; i < headConfigs.size(); i++) { if (headConfigs.get(i) == null) { TupleTow<String, ExcelWriterHeadConfig> tupleTow = TupleTow.creat(null, new ExcelWriterHeadConfig("")); headConfigs.set(i, tupleTow); break; } } return headConfigs; } public List<List<String>> getHeads() { List<TupleTow<String, ExcelWriterHeadConfig>> headConfigs = getHeadConfigs(); return headConfigs.stream().filter(Objects::nonNull).map(tupleTow -> tupleTow.getValue2().excelProperty.column).collect(Collectors.toList()); } } @Data public static class ExcelProperty implements Serializable { /** * 列名称 */ private final List<String> column = new ArrayList<>(); /** * 是否忽略当前列 */ private Boolean ignore; /** * 列的索引在列的索引上读写,如果等于-1,则按Java类排序。优先级:index>默认排序 */ private Integer index = -1; } @Data public static class DateTimeFormat implements Serializable { /** * 时间格式化的格式定义 */ private String dateFormat; /** * 如果日期使用1904窗口,则为True;如果使用1900日期窗口,则为false */ private Boolean use1904windowing; /** * 是否设置过值 */ public boolean isSetValue() { return dateFormat != null || use1904windowing != null; } public DateTimeFormatProperty getDateTimeFormatProperty() { return new DateTimeFormatProperty(dateFormat, use1904windowing); } } @Data public static class NumberFormat implements Serializable { /** * 数字格式化 */ private String numberFormat; /** * 四舍五入模式 */ private RoundingMode roundingMode; /** * 是否设置过值 */ public boolean isSetValue() { return numberFormat != null || roundingMode != null; } public NumberFormatProperty getNumberFormatProperty() { return new NumberFormatProperty(numberFormat, roundingMode); } } @Data public static class ColumnWidth implements Serializable { /** * 列宽 */ private Integer columnWidth; } @Data public static class ExcelFontStyle implements Serializable { /** * 字体的名称(如: Arial) */ private String fontName; /** * 以熟悉的测量单位表示的高度- points */ private Short fontHeightInPoints; /** * 是否使用斜体 */ private Boolean italic; /** * 是否在文本中使用删除线水平线 */ private Boolean strikeout; /** * 字体的颜色 */ private Short color; /** * 设置normal、super或subscript */ private Short typeOffset; /** * 要使用的文本下划线 */ private Byte underline; /** * 设置要使用的字符集 */ private Integer charset; /** * 粗体 */ private Boolean bold; /** * 是否设置过值 */ public boolean isSetValue() { return fontName != null || fontHeightInPoints != null || italic != null || strikeout != null || color != null || typeOffset != null || underline != null || charset != null || bold != null; } public FontProperty getFontProperty() { FontProperty fontProperty = new FontProperty(); fontProperty.setFontName(fontName); fontProperty.setFontHeightInPoints(fontHeightInPoints); fontProperty.setItalic(italic); fontProperty.setStrikeout(strikeout); fontProperty.setColor(color); fontProperty.setTypeOffset(typeOffset); fontProperty.setUnderline(underline); fontProperty.setCharset(charset); fontProperty.setBold(bold); return fontProperty; } public FontProperty getFontProperty(ExcelFontStyle second) { FontProperty fontProperty = new FontProperty(); fontProperty.setFontName(fontName == null ? second.fontName : fontName); fontProperty.setFontHeightInPoints(fontHeightInPoints == null ? second.fontHeightInPoints : fontHeightInPoints); fontProperty.setItalic(italic == null ? second.italic : italic); fontProperty.setStrikeout(strikeout == null ? second.strikeout : strikeout); fontProperty.setColor(color == null ? second.color : color); fontProperty.setTypeOffset(typeOffset == null ? second.typeOffset : typeOffset); fontProperty.setUnderline(underline == null ? second.underline : underline); fontProperty.setCharset(charset == null ? second.charset : charset); fontProperty.setBold(bold == null ? second.bold : bold); return fontProperty; } } @EqualsAndHashCode(callSuper = true) @Data public static class ContentFontStyle extends ExcelFontStyle { } @Data public static class ContentLoopMerge implements Serializable { /** * 行 */ private Integer eachRow; /** * 列 */ private Integer columnExtend; /** * 是否设置过值 */ public boolean isSetValue() { return eachRow != null || columnExtend != null; } // public LoopMergeProperty getLoopMergeProperty() { // return new LoopMergeProperty(eachRow, columnExtend); // } } @Data public static class ContentRowHeight implements Serializable { /** * 行高 */ private Short rowHeight; public RowHeightProperty getRowHeightProperty() { return new RowHeightProperty(rowHeight); } } @Data public static class ExcelCellStyle implements Serializable { /** * 设置数据格式(必须是有效格式)。内置格式在内置信息中定义 {@link BuiltinFormats}. */ private Short dataFormat; /** * 将单元格使用此样式设置为隐藏 */ private Boolean hidden; /** * 将单元格使用此样式设置为锁定 */ private Boolean locked; /** * 打开或关闭样式的“Quote Prefix”或“123 Prefix”, * 用于告诉Excel,看起来像数字或公式的内容不应被视为打开。 * 打开此选项有点(但不是完全打开,请参见IgnoredErrorType)类似于在Excel中为单元格值添加前缀 * {@link IgnoredErrorType}) */ private Boolean quotePrefix; /** * 设置单元格的水平对齐方式 */ private HorizontalAlignment horizontalAlignment; /** * 设置是否应该换行。将此标志设置为true可以通过在多行上显示所有内容来使其在一个单元格中可见 */ private Boolean wrapped; /** * 设置单元格的垂直对齐方式 */ private VerticalAlignment verticalAlignment; /** * 设置单元格中文本的旋转度<br /> * 注意:HSSF使用-90至90度的值,而XSSF使用0至180度的值。 * 此方法的实现将在这两个值范围之间进行映射, * 但是,相应的getter返回此CellStyle所应用的当前Excel文件格式类型所要求的范围内的值。 */ private Short rotation; /** * 设置空格数以缩进单元格中的文本 */ private Short indent; /** * 设置要用于单元格左边框的边框类型 */ private BorderStyle borderLeft; /** * 设置用于单元格右边框的边框类型 */ private BorderStyle borderRight; /** * 设置要用于单元格顶部边框的边框类型 */ private BorderStyle borderTop; /** * 设置用于单元格底部边框的边框类型 */ private BorderStyle borderBottom; /** * 设置用于左边框的颜色 * * @see IndexedColors */ private Short leftBorderColor; /** * 设置用于右边框的颜色 * * @see IndexedColors */ private Short rightBorderColor; /** * 设置要用于顶部边框的颜色 * * @see IndexedColors */ private Short topBorderColor; /** * 设置用于底边框的颜色 * * @see IndexedColors */ private Short bottomBorderColor; /** * 设置为1会使单元格充满前景色...不知道其他值 * * @see FillPatternType#SOLID_FOREGROUND */ private FillPatternType fillPatternType; /** * 设置背景填充颜色 * * @see IndexedColors */ private Short fillBackgroundColor; /** * 设置前景色填充颜色<br /> * 注意:确保将前景色设置为背景颜色之前 * * @see IndexedColors */ private Short fillForegroundColor; /** * 控制如果文本太长,是否应自动调整单元格的大小以缩小以适合 */ private Boolean shrinkToFit; /** * 是否设置过值 */ public boolean isSetValue() { return dataFormat != null || hidden != null || locked != null || quotePrefix != null || horizontalAlignment != null || wrapped != null || verticalAlignment != null || rotation != null || indent != null || borderLeft != null || borderRight != null || borderTop != null || borderBottom != null || leftBorderColor != null || rightBorderColor != null || topBorderColor != null || bottomBorderColor != null || fillPatternType != null || fillBackgroundColor != null || fillForegroundColor != null || shrinkToFit != null; } public StyleProperty getStyleProperty() { StyleProperty styleProperty = new StyleProperty(); styleProperty.setDataFormat(dataFormat); // styleProperty.setWriteFont(); styleProperty.setHidden(hidden); styleProperty.setLocked(locked); styleProperty.setQuotePrefix(quotePrefix); styleProperty.setHorizontalAlignment(horizontalAlignment); styleProperty.setWrapped(wrapped); styleProperty.setVerticalAlignment(verticalAlignment); styleProperty.setRotation(rotation); styleProperty.setIndent(indent); styleProperty.setBorderLeft(borderLeft); styleProperty.setBorderRight(borderRight); styleProperty.setBorderTop(borderTop); styleProperty.setBorderBottom(borderBottom); styleProperty.setLeftBorderColor(leftBorderColor); styleProperty.setRightBorderColor(rightBorderColor); styleProperty.setTopBorderColor(topBorderColor); styleProperty.setBottomBorderColor(bottomBorderColor); styleProperty.setFillPatternType(fillPatternType); styleProperty.setFillBackgroundColor(fillBackgroundColor); styleProperty.setFillForegroundColor(fillForegroundColor); styleProperty.setShrinkToFit(shrinkToFit); return styleProperty; } public StyleProperty getStyleProperty(ExcelCellStyle second) { StyleProperty styleProperty = new StyleProperty(); styleProperty.setDataFormat(dataFormat == null ? second.dataFormat : dataFormat); // styleProperty.setWriteFont(); styleProperty.setHidden(hidden == null ? second.hidden : hidden); styleProperty.setLocked(locked == null ? second.locked : locked); styleProperty.setQuotePrefix(quotePrefix == null ? second.quotePrefix : quotePrefix); styleProperty.setHorizontalAlignment(horizontalAlignment == null ? second.horizontalAlignment : horizontalAlignment); styleProperty.setWrapped(wrapped == null ? second.wrapped : wrapped); styleProperty.setVerticalAlignment(verticalAlignment == null ? second.verticalAlignment : verticalAlignment); styleProperty.setRotation(rotation == null ? second.rotation : rotation); styleProperty.setIndent(indent == null ? second.indent : indent); styleProperty.setBorderLeft(borderLeft == null ? second.borderLeft : borderLeft); styleProperty.setBorderRight(borderRight == null ? second.borderRight : borderRight); styleProperty.setBorderTop(borderTop == null ? second.borderTop : borderTop); styleProperty.setBorderBottom(borderBottom == null ? second.borderBottom : borderBottom); styleProperty.setLeftBorderColor(leftBorderColor == null ? second.leftBorderColor : leftBorderColor); styleProperty.setRightBorderColor(rightBorderColor == null ? second.rightBorderColor : rightBorderColor); styleProperty.setTopBorderColor(topBorderColor == null ? second.topBorderColor : topBorderColor); styleProperty.setBottomBorderColor(bottomBorderColor == null ? second.bottomBorderColor : bottomBorderColor); styleProperty.setFillPatternType(fillPatternType == null ? second.fillPatternType : fillPatternType); styleProperty.setFillBackgroundColor(fillBackgroundColor == null ? second.fillBackgroundColor : fillBackgroundColor); styleProperty.setFillForegroundColor(fillForegroundColor == null ? second.fillForegroundColor : fillForegroundColor); styleProperty.setShrinkToFit(shrinkToFit == null ? second.shrinkToFit : shrinkToFit); return styleProperty; } } @EqualsAndHashCode(callSuper = true) @Data public static class ContentStyle extends ExcelCellStyle { /** * 是否设置过值 */ @Override public boolean isSetValue() { return super.isSetValue(); } @Override public StyleProperty getStyleProperty() { return super.getStyleProperty(); } public StyleProperty getStyleProperty(ContentStyle second) { return super.getStyleProperty(second); } } @EqualsAndHashCode(callSuper = true) @Data public static class HeadFontStyle extends ExcelFontStyle { /** * 是否设置过值 */ @Override public boolean isSetValue() { return super.isSetValue(); } @Override public FontProperty getFontProperty() { return super.getFontProperty(); } public FontProperty getFontProperty(HeadFontStyle second) { return super.getFontProperty(second); } } @Data public static class HeadRowHeight implements Serializable { /** * Head行高 */ private Short headRowHeight; public RowHeightProperty getRowHeightProperty() { return new RowHeightProperty(headRowHeight); } } @EqualsAndHashCode(callSuper = true) @Data public static class HeadStyle extends ExcelCellStyle { /** * 是否设置过值 */ @Override public boolean isSetValue() { return super.isSetValue(); } @Override public StyleProperty getStyleProperty() { return super.getStyleProperty(); } public StyleProperty getStyleProperty(HeadStyle second) { return super.getStyleProperty(second); } } @Data public static class OnceAbsoluteMerge implements Serializable { /** * 第一行 */ private Integer firstRowIndex; /** * 最后一行 */ private Integer lastRowIndex; /** * 第一列 */ private Integer firstColumnIndex; /** * 最后一列 */ private Integer lastColumnIndex; /** * 是否设置过值 */ public boolean isSetValue() { return firstRowIndex != null || lastRowIndex != null || firstColumnIndex != null || lastColumnIndex != null; } public OnceAbsoluteMergeProperty getOnceAbsoluteMergeProperty() { int firstRowIndex = this.firstRowIndex == null ? -1 : this.firstRowIndex; int lastRowIndex = this.lastRowIndex == null ? -1 : this.lastRowIndex; int firstColumnIndex = this.firstColumnIndex == null ? -1 : this.firstColumnIndex; int lastColumnIndex = this.lastColumnIndex == null ? -1 : this.lastColumnIndex; return new OnceAbsoluteMergeProperty(firstRowIndex, lastRowIndex, firstColumnIndex, lastColumnIndex); } } @NoArgsConstructor @Data public static class ExcelReaderHeadConfig implements Serializable { /** * 列的数据类型 */ private Class<?> dataType; private final ExcelProperty excelProperty = new ExcelProperty(); private final DateTimeFormat dateTimeFormat = new DateTimeFormat(); private final NumberFormat numberFormat = new NumberFormat(); public ExcelReaderHeadConfig(Class<?> dataType, String... names) { this.dataType = dataType; if (names != null) { this.excelProperty.column.addAll(Arrays.asList(names)); } } } @NoArgsConstructor @Data public static class ExcelWriterHeadConfig implements Serializable { private final ExcelProperty excelProperty = new ExcelProperty(); private final DateTimeFormat dateTimeFormat = new DateTimeFormat(); private final NumberFormat numberFormat = new NumberFormat(); private final ColumnWidth columnWidth = new ColumnWidth(); private final HeadFontStyle headFontStyle = new HeadFontStyle(); private final HeadStyle headStyle = new HeadStyle(); private final ContentFontStyle contentFontStyle = new ContentFontStyle(); private final ContentStyle contentStyle = new ContentStyle(); private final ContentLoopMerge contentLoopMerge = new ContentLoopMerge(); public ExcelWriterHeadConfig(String... names) { if (names != null) { this.excelProperty.column.addAll(Arrays.asList(names)); } } } @Data public static class WriterStyleConfig implements Serializable { private final HeadRowHeight headRowHeight = new HeadRowHeight(); private final ContentRowHeight contentRowHeight = new ContentRowHeight(); private final HeadFontStyle headFontStyle = new HeadFontStyle(); private final HeadStyle headStyle = new HeadStyle(); private final ContentFontStyle contentFontStyle = new ContentFontStyle(); private final ContentStyle contentStyle = new ContentStyle(); private final OnceAbsoluteMerge onceAbsoluteMerge = new OnceAbsoluteMerge(); } // 自定义读取、写入操作 //---------------------------------------------------------------------------------------------------------------------------------------------- @SuppressWarnings("rawtypes") @Slf4j private static class ExcelDateReadListener extends AnalysisEventListener<Map<Integer, CellData<?>>> { private final ExcelDataReaderConfig config; private final ExcelDataReader<Map> excelDataReader; private final Map<Integer, List<String>> headsMap = new HashMap<>(); /** * {@code Map<index, TupleTow<ExcelReaderHeadConfig, Entity.propertyName>>} */ private Map<Integer, TupleTow<String, ExcelReaderHeadConfig>> columns; public ExcelDateReadListener(ExcelDataReaderConfig config, ExcelDataReader<Map> excelDataReader) { Assert.notNull(config, "参数config不能为null"); Assert.notNull(excelDataReader, "参数excelDataReader不能为null"); this.config = config; this.excelDataReader = excelDataReader; } private ExcelData<Map> getExcelData(AnalysisContext context) { final Integer sheetNo = context.readSheetHolder().getSheetNo(); final String sheetName = context.readSheetHolder().getSheetName(); String key = String.format("%s-%s", sheetNo, sheetName); return excelDataReader.getExcelSheetMap().computeIfAbsent(key, sheetKey -> new ExcelData<>(Map.class, sheetName, sheetNo)); } private Class<?> getCellDataType(CellData<?> cellData) { if (cellData.getType() == null) { return Void.class; } switch (cellData.getType()) { case NUMBER: return BigDecimal.class; case BOOLEAN: return Boolean.class; case DIRECT_STRING: case STRING: case ERROR: return String.class; case IMAGE: return Byte[].class; default: return Void.class; } } // 解析表头配置 public void parseHeadMap(AnalysisContext context) { columns = new HashMap<>(); LinkedHashMap<String, ExcelReaderHeadConfig> columnsConfig = config.columns; Set<String> propertyNameParsed = new HashSet<>(columnsConfig.size()); for (Map.Entry<Integer, List<String>> entry : headsMap.entrySet()) { int index = entry.getKey(); List<String> heads = entry.getValue(); String headsStr = StringUtils.join(heads, "|"); String propertyName = null; ExcelReaderHeadConfig headConfig = null; for (Map.Entry<String, ExcelReaderHeadConfig> configEntry : columnsConfig.entrySet()) { String propertyNameTmp = configEntry.getKey(); ExcelReaderHeadConfig headConfigTmp = configEntry.getValue(); // 根据index匹配 if (Objects.equals(index, headConfigTmp.excelProperty.index)) { propertyName = propertyNameTmp; headConfig = headConfigTmp; break; } // 根据column(表头列名)匹配 if (propertyNameParsed.contains(propertyNameTmp)) { continue; } String columnStr = StringUtils.join(headConfigTmp.excelProperty.column, "|"); if (headsStr.endsWith(columnStr) || columnStr.endsWith(headsStr)) { propertyNameParsed.add(propertyNameTmp); propertyName = propertyNameTmp; headConfig = headConfigTmp; headConfig.excelProperty.index = index; break; } } if (propertyName == null) { continue; } columns.put(index, TupleTow.creat(propertyName, headConfig)); // 格式化配置 boolean useDateTimeFormat = headConfig.dateTimeFormat.isSetValue(); boolean useNumberFormat = headConfig.numberFormat.isSetValue(); ExcelReadHeadProperty excelReadHeadProperty = context.currentReadHolder().excelReadHeadProperty(); if ((useDateTimeFormat || useNumberFormat) && excelReadHeadProperty != null && excelReadHeadProperty.getContentPropertyMap() != null) { ExcelContentProperty property = excelReadHeadProperty.getContentPropertyMap().computeIfAbsent(index, idx -> new ExcelContentProperty()); if (useDateTimeFormat) { property.setDateTimeFormatProperty(headConfig.dateTimeFormat.getDateTimeFormatProperty()); } if (useNumberFormat) { property.setNumberFormatProperty(headConfig.numberFormat.getNumberFormatProperty()); } } } Assert.notEmpty(columns, "无法解析Excel表头,请查看配置是否正确"); } @Override public void invokeHeadMap(Map<Integer, String> headMap, AnalysisContext context) { ExcelData<Map> excelData = getExcelData(context); if (excelData.getStartTime() == null) { excelData.setStartTime(System.currentTimeMillis()); } for (Map.Entry<Integer, String> entry : headMap.entrySet()) { Integer index = entry.getKey(); String head = entry.getValue(); List<String> list = headsMap.computeIfAbsent(index, idx -> new ArrayList<>()); list.add(head); } } @SuppressWarnings("unchecked") @Override public void invoke(Map<Integer, CellData<?>> data, AnalysisContext context) { // 第一次需要解析表头 if (columns == null) { parseHeadMap(context); } ExcelData<Map> excelData = getExcelData(context); if (excelData.getStartTime() == null) { excelData.setStartTime(System.currentTimeMillis()); } int index = context.readRowHolder().getRowIndex() + 1; ExcelRow<Map> excelRow = new ExcelRow<>(new HashMap(data.size()), index); // 数据签名-防重机制 Map<Integer, Cell> map = context.readRowHolder().getCellMap(); StringBuilder sb = new StringBuilder(map.size() * 32); for (Map.Entry<Integer, Cell> entry : map.entrySet()) { sb.append(entry.getKey()).append("=").append(entry.getValue().toString()).append("|"); } excelRow.setDataSignature(EncodeDecodeUtils.encodeHex(DigestUtils.sha1(sb.toString().getBytes()))); // 读取数据需要类型转换 ReadHolder currentReadHolder = context.currentReadHolder(); ExcelReadHeadProperty excelReadHeadProperty = context.currentReadHolder().excelReadHeadProperty(); Map<Integer, ExcelContentProperty> contentPropertyMap = excelReadHeadProperty.getContentPropertyMap(); for (Map.Entry<Integer, CellData<?>> entry : data.entrySet()) { Integer idx = entry.getKey(); CellData<?> cellData = entry.getValue(); TupleTow<String, ExcelReaderHeadConfig> tupleTow = columns.get(idx); if (tupleTow == null) { continue; } String propertyName = tupleTow.getValue1(); ExcelReaderHeadConfig headConfig = tupleTow.getValue2(); // 忽略当前字段(propertyName) if (Objects.equals(headConfig.excelProperty.ignore, Boolean.TRUE)) { continue; } // 获取字段类型 if (headConfig.dataType == null) { headConfig.dataType = getCellDataType(cellData); } // 获取字段值 Object value; if (Objects.equals(Void.class, headConfig.dataType)) { value = null; } else { // 格式化操作 dateFormat numberFormat ExcelContentProperty excelContentProperty = contentPropertyMap.get(idx); value = ConverterUtils.convertToJavaObject( cellData, headConfig.dataType, excelContentProperty, currentReadHolder.converterMap(), currentReadHolder.globalConfiguration(), context.readRowHolder().getRowIndex(), idx); } // 写入字段值 excelRow.getData().put(propertyName, value); } boolean success = true; final boolean enableExcelData = config.isEnableExcelData(); if (enableExcelData) { success = excelData.addRow(excelRow); } if (!success) { log.info("Excel数据导入数据重复,filename={} | data={}", config.getFilename(), data); } // 数据校验 //final boolean enableValidation = config.isEnableValidation(); //if (enableValidation && !excelRow.hasError()) { // // TODO 数据校验 //} // 自定义读取行处理逻辑 final ExcelRowReader<Map> excelRowReader = config.getExcelRowReader(); if (!excelRow.hasError() && excelRowReader != null) { try { excelRowReader.readRow(excelRow.getData(), excelRow, context); } catch (Throwable e) { excelRow.addErrorInRow(e.getMessage()); } } } @Override public void doAfterAllAnalysed(AnalysisContext context) { ExcelData<Map> excelData = getExcelData(context); if (excelData.getEndTime() == null) { excelData.setEndTime(System.currentTimeMillis()); } if (excelData.getEndTime() != null && excelData.getStartTime() != null) { log.info("Excel Sheet读取完成,sheet={} | 耗时:{}ms", excelData.getSheetName(), excelData.getEndTime() - excelData.getStartTime()); } ExcelRowReader<Map> excelRowReader = config.getExcelRowReader(); if (excelRowReader != null) { excelRowReader.readEnd(context); } // 清空表头解析数据 columns = null; headsMap.clear(); } @Override public void onException(Exception exception, AnalysisContext context) throws Exception { ExcelReaderExceptionHand excelReaderExceptionHand = config.getExcelReaderExceptionHand(); if (excelReaderExceptionHand != null) { excelReaderExceptionHand.exceptionHand(exception, context); } else { // 默认的异常处理 throw exception; } } @Override public boolean hasNext(AnalysisContext context) { // 未配置列 - 提前退出 if (context.readSheetHolder().getHeadRowNumber() > 0 && columns != null && columns.isEmpty()) { log.warn("未匹配到列配置"); return false; } final ExcelData<Map> excelData = getExcelData(context); // 是否重复读取 if (excelData.getEndTime() != null && excelData.getStartTime() != null) { log.info("Excel Sheet已经读取完成,当前跳过,sheet={}", excelData.getSheetName()); return false; } // 数据是否超出限制 LIMIT_ROWS final int limitRows = config.getLimitRows(); final int rowNum = context.readRowHolder().getRowIndex() + 1; final int dataRowNum = rowNum - context.currentReadHolder().excelReadHeadProperty().getHeadRowNumber(); if (limitRows > 0 && dataRowNum > limitRows) { log.info("Excel数据行超出限制:dataRowNum={} | limitRows={}", dataRowNum, limitRows); excelData.setInterruptByRowNum(rowNum); // 设置已经读取完成 doAfterAllAnalysed(context); return false; } return true; } } public static class ConverterUtils { private ConverterUtils() { } @SuppressWarnings("rawtypes") public static Object convertToJavaObject( CellData<?> cellData, Class<?> clazz, ExcelContentProperty contentProperty, Map<String, Converter> converterMap, GlobalConfiguration globalConfiguration, Integer rowIndex, Integer columnIndex) { if (clazz == null) { clazz = String.class; } if (Objects.equals(cellData.getType(), CellDataTypeEnum.EMPTY)) { if (Objects.equals(String.class, clazz)) { return StringUtils.EMPTY; } else { return null; } } Converter<?> converter = null; if (contentProperty != null) { converter = contentProperty.getConverter(); } if (converter == null) { converter = converterMap.get(ConverterKeyBuild.buildKey(clazz, cellData.getType())); } if (converter == null) { throw new ExcelDataConvertException(rowIndex, columnIndex, cellData, contentProperty, "Converter not found, convert " + cellData.getType() + " to " + clazz.getName()); } try { return converter.convertToJavaData(cellData, contentProperty, globalConfiguration); } catch (Exception e) { throw new ExcelDataConvertException(rowIndex, columnIndex, cellData, contentProperty, "Convert data " + cellData + " to " + clazz + " error ", e); } } } @Slf4j private static class FillHeadStrategy extends AbstractCellWriteHandler { private final ExcelDataWriterConfig config; // 保存已填充的表头index Map<index, filled> private final Map<Integer, Boolean> filledMap = new HashMap<>(); // 保存已处理的属性 private final Set<String> propertyNameParsed = new HashSet<>(); public FillHeadStrategy(ExcelDataWriterConfig config) { Assert.notNull(config, "参数config不能为null"); this.config = config; } @Override public void beforeCellCreate( WriteSheetHolder writeSheetHolder, WriteTableHolder writeTableHolder, Row row, Head head, Integer columnIndex, Integer relativeRowIndex, Boolean isHead) { boolean filled = filledMap.computeIfAbsent(columnIndex, idx -> false); if (filled) { return; } filledMap.put(columnIndex, true); if (config.columns.isEmpty() || head == null || head.getHeadNameList() == null) { return; } // Excel表头 Map<Entity.propertyName, ExcelWriterHeadConfig> ExcelWriterHeadConfig headConfig = null; String headsStr = StringUtils.join(head.getHeadNameList(), "|"); for (Map.Entry<String, ExcelWriterHeadConfig> entry : config.columns.entrySet()) { String propertyNameTmp = entry.getKey(); ExcelWriterHeadConfig headConfigTmp = entry.getValue(); // 根据index匹配 if (Objects.equals(headConfigTmp.excelProperty.index, columnIndex)) { propertyNameParsed.add(propertyNameTmp); headConfig = headConfigTmp; break; } // 根据column(表头列名)匹配 if (propertyNameParsed.contains(propertyNameTmp)) { continue; } String columnStr = StringUtils.join(headConfigTmp.excelProperty.column, "|"); if (headsStr.endsWith(columnStr) || columnStr.endsWith(headsStr)) { propertyNameParsed.add(propertyNameTmp); headConfig = headConfigTmp; headConfig.excelProperty.index = columnIndex; break; } } if (headConfig == null) { return; } if (headConfig.columnWidth.columnWidth != null) { head.setColumnWidthProperty(new ColumnWidthProperty(headConfig.columnWidth.columnWidth)); } // 合并配置 head.setHeadStyleProperty(headConfig.headStyle.getStyleProperty(config.styleConfig.headStyle)); head.setHeadFontProperty(headConfig.headFontStyle.getFontProperty(config.styleConfig.headFontStyle)); head.setContentStyleProperty(headConfig.contentStyle.getStyleProperty(config.styleConfig.contentStyle)); head.setContentFontProperty(headConfig.contentFontStyle.getFontProperty(config.styleConfig.contentFontStyle)); // 格式化配置 boolean useDateTimeFormat = headConfig.dateTimeFormat.isSetValue(); boolean useNumberFormat = headConfig.numberFormat.isSetValue(); if ((useDateTimeFormat || useNumberFormat) && writeSheetHolder.getExcelWriteHeadProperty() != null && writeSheetHolder.getExcelWriteHeadProperty().getContentPropertyMap() != null) { ExcelContentProperty property = writeSheetHolder.getExcelWriteHeadProperty().getContentPropertyMap().computeIfAbsent(columnIndex, idx -> new ExcelContentProperty()); if (useDateTimeFormat) { property.setDateTimeFormatProperty(headConfig.dateTimeFormat.getDateTimeFormatProperty()); } if (useNumberFormat) { property.setNumberFormatProperty(headConfig.numberFormat.getNumberFormatProperty()); } } } } private static class ColumnWidthStyleStrategy extends AbstractHeadColumnWidthStyleStrategy { @Override protected Integer columnWidth(Head head, Integer columnIndex) { if (head == null) { return null; } if (head.getColumnWidthProperty() != null) { return head.getColumnWidthProperty().getWidth(); } return null; } } private static class StyleStrategy extends AbstractVerticalCellStyleStrategy { @Override protected WriteCellStyle headCellStyle(Head head) { return build(head.getHeadStyleProperty(), head.getHeadFontProperty()); } @Override protected WriteCellStyle contentCellStyle(Head head) { return build(head.getContentStyleProperty(), head.getContentFontProperty()); } } private static class LoopMergeStrategy extends AbstractRowWriteHandler { /** * 每一行 */ private final int eachRow; /** * 延伸栏 */ private final int columnExtend; /** * 当前列数 */ private Integer columnIndex; private final ExcelWriterHeadConfig headConfig; public LoopMergeStrategy(int eachRow, int columnExtend, ExcelWriterHeadConfig headConfig) { this.eachRow = eachRow; this.columnExtend = columnExtend; this.headConfig = headConfig; this.columnIndex = headConfig.excelProperty.index; } @Override public void afterRowDispose(WriteSheetHolder writeSheetHolder, WriteTableHolder writeTableHolder, Row row, Integer relativeRowIndex, Boolean isHead) { if (isHead) { return; } if (this.columnIndex == null || this.columnIndex < 0) { columnIndex = headConfig.excelProperty.index; if (columnIndex == null) { return; } } if (relativeRowIndex % eachRow == 0) { CellRangeAddress cellRangeAddress = new CellRangeAddress( row.getRowNum(), row.getRowNum() + eachRow - 1, columnIndex, columnIndex + columnExtend - 1 ); writeSheetHolder.getSheet().addMergedRegionUnsafe(cellRangeAddress); } } } private static WriteCellStyle build(StyleProperty styleProperty, FontProperty fontProperty) { if (styleProperty == null && fontProperty == null) { return null; } WriteCellStyle writeCellStyle = new WriteCellStyle(); if (styleProperty != null) { if (styleProperty.getDataFormat() != null && styleProperty.getDataFormat() >= 0) { writeCellStyle.setDataFormat(styleProperty.getDataFormat()); } writeCellStyle.setHidden(styleProperty.getHidden()); writeCellStyle.setLocked(styleProperty.getLocked()); writeCellStyle.setQuotePrefix(styleProperty.getQuotePrefix()); writeCellStyle.setHorizontalAlignment(styleProperty.getHorizontalAlignment()); writeCellStyle.setWrapped(styleProperty.getWrapped()); writeCellStyle.setVerticalAlignment(styleProperty.getVerticalAlignment()); if (styleProperty.getRotation() != null && styleProperty.getRotation() >= 0) { writeCellStyle.setRotation(styleProperty.getRotation()); } if (styleProperty.getIndent() != null && styleProperty.getIndent() >= 0) { writeCellStyle.setIndent(styleProperty.getIndent()); } writeCellStyle.setBorderLeft(styleProperty.getBorderLeft()); writeCellStyle.setBorderRight(styleProperty.getBorderRight()); writeCellStyle.setBorderTop(styleProperty.getBorderTop()); writeCellStyle.setBorderBottom(styleProperty.getBorderBottom()); if (styleProperty.getLeftBorderColor() != null && styleProperty.getLeftBorderColor() >= 0) { writeCellStyle.setLeftBorderColor(styleProperty.getLeftBorderColor()); } if (styleProperty.getRightBorderColor() != null && styleProperty.getRightBorderColor() >= 0) { writeCellStyle.setRightBorderColor(styleProperty.getRightBorderColor()); } if (styleProperty.getTopBorderColor() != null && styleProperty.getTopBorderColor() >= 0) { writeCellStyle.setTopBorderColor(styleProperty.getTopBorderColor()); } if (styleProperty.getBottomBorderColor() != null && styleProperty.getBottomBorderColor() >= 0) { writeCellStyle.setBottomBorderColor(styleProperty.getBottomBorderColor()); } writeCellStyle.setFillPatternType(styleProperty.getFillPatternType()); if (styleProperty.getFillBackgroundColor() != null && styleProperty.getFillBackgroundColor() >= 0) { writeCellStyle.setFillBackgroundColor(styleProperty.getFillBackgroundColor()); } if (styleProperty.getFillForegroundColor() != null && styleProperty.getFillForegroundColor() >= 0) { writeCellStyle.setFillForegroundColor(styleProperty.getFillForegroundColor()); } writeCellStyle.setShrinkToFit(styleProperty.getShrinkToFit()); } if (fontProperty != null) { WriteFont writeFont = new WriteFont(); writeCellStyle.setWriteFont(writeFont); if (!com.alibaba.excel.util.StringUtils.isEmpty(fontProperty.getFontName())) { writeFont.setFontName(fontProperty.getFontName()); } writeFont.setFontHeightInPoints(fontProperty.getFontHeightInPoints()); writeFont.setItalic(fontProperty.getItalic()); writeFont.setStrikeout(fontProperty.getStrikeout()); if (fontProperty.getColor() != null && fontProperty.getColor() >= 0) { writeFont.setColor(fontProperty.getColor()); } if (fontProperty.getTypeOffset() != null && fontProperty.getTypeOffset() >= 0) { writeFont.setTypeOffset(fontProperty.getTypeOffset()); } if (fontProperty.getUnderline() != null && fontProperty.getUnderline() >= 0) { writeFont.setUnderline(fontProperty.getUnderline()); } if (fontProperty.getCharset() != null && fontProperty.getCharset() >= 0) { writeFont.setCharset(fontProperty.getCharset()); } writeFont.setBold(fontProperty.getBold()); } return writeCellStyle; } }
SENA-CEET/1262154-G1G2-Trimestre-2
java/poo/EjemplosClases/src/co/edu/sena/clases/sobrecargametodos/Calculadora.java
<reponame>SENA-CEET/1262154-G1G2-Trimestre-2<gh_stars>0 /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package co.edu.sena.clases.sobrecargametodos; /** * Esta clase es para dearrollar las operaciones atemaicas de una calculadora * * @author Enrique * @since 1998 * @version 1 */ public class Calculadora { /** * Este metodo suma dos números donde el primero es double y el segundo es * entero * * @param a es un numero double * @param b es un numero entero * @return este metodo retorna la suma de a mas b * */ public static double suma(double a, int b) throws ArithmeticException { return a + b; } /** * * @param a * @param b * @return * @ */ public static int suma(int a, int b) { return a + b; } public static short suma(short a, short b) { return (short) (a + b); } public static long suma(long a, long b) { return a + b; } public static float suma(float a, float b) { return a + b; } public static char suma(char a, char b) { return (char) (a + b); } public static byte suma(byte a, byte b) { return (byte) (a + b); } /** * Este metodo hace la división de dos numeros enteros * * @param a primer número entero * @param b segundo número entero * @return retorna la division como un String */ public static String division(int a, int b) { if (b == 0) { return "division por cero"; } else { Integer div = a / b; return div.toString(); } } public static String conversionDecimalBinario(int numero) { int div = numero; int residuo; String resultado = ""; String resultadoFinal=""; while (div != 0) { residuo = div % 2; div = (int) div / 2; resultado += residuo; } for (int i = resultado.length()-1; i >=0 ; i--) { resultadoFinal=resultadoFinal+ resultado.charAt(i); } return resultadoFinal; } }
cafhach/SMTK
smtk/session/polygon/Session.h
//========================================================================= // Copyright (c) Kitware, Inc. // All rights reserved. // See LICENSE.txt for details. // // This software is distributed WITHOUT ANY WARRANTY; without even // the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // PURPOSE. See the above copyright notice for more information. //========================================================================= #ifndef __smtk_session_polygon_Session_h #define __smtk_session_polygon_Session_h #include "smtk/session/polygon/Exports.h" #include "smtk/session/polygon/PointerDefs.h" #include "smtk/session/polygon/internal/Entity.h" #include "smtk/model/Session.h" namespace smtk { namespace model { class Model; } namespace session { namespace polygon { namespace internal { class pmodel; class vertex; } // namespace internal /**\brief Methods that handle translation between polygon and SMTK instances. * * While the TDUUID class keeps a map from SMTK UUIDs to polygon ToolDataUser * pointers, this is not enough to handle everything SMTK provides: * there is no way to track cell-use or shell entities since they do * not inherit ToolDataUser instances. Also, some engines (e.g., facet) * do not appear to store some entity types (e.g., RefGroup). * * Also, simply loading a polygon file does not translate the entire model * into SMTK; instead, it assigns UUIDs to entities if they do not already * exist. This class (Session) provides a method for requesting the * entity, arrangement, and/or tessellation information for a UUID be * mapped into SMTK from polygon. */ class SMTKPOLYGONSESSION_EXPORT Session : public smtk::model::Session { public: smtkTypeMacro(Session); smtkSuperclassMacro(smtk::model::Session); smtkSharedFromThisMacro(smtk::model::Session); smtkCreateMacro(smtk::model::Session); typedef smtk::model::SessionInfoBits SessionInfoBits; ~Session() override; Session(const Session&) = delete; Session& operator=(const Session&) = delete; SessionInfoBits allSupportedInformation() const override; template<typename T, typename U, typename V> void consistentInternalDelete(T& container, U& modified, V& expunged, bool logDebug); std::string defaultFileExtension(const smtk::model::Model&) const override; internal::EntityIdToPtr::iterator beginStorage(); internal::EntityIdToPtr::iterator endStorage(); internal::EntityIdToPtr::const_iterator beginStorage() const; internal::EntityIdToPtr::const_iterator endStorage() const; void addStorage( const smtk::common::UUID& uid, smtk::session::polygon::internal::entity::Ptr storage); bool removeStorage(const smtk::common::UUID& uid); protected: friend class Neighborhood; friend class Operation; friend class Resource; friend class internal::pmodel; friend class SessionIOJSON; Session(); smtk::model::SessionInfoBits transcribeInternal( const smtk::model::EntityRef& entity, SessionInfoBits requestedInfo, int depth = -1) override; bool removeFaceReferences(const smtk::model::Face& f); bool removeEdgeReferences(const smtk::model::Edge& e); bool removeVertReferences(const smtk::model::Vertex& v); template<typename T> typename T::Ptr findStorage(const smtk::common::UUID& uid) { internal::EntityIdToPtr::iterator it = m_storage.find(uid); if (it != m_storage.end()) return smtk::dynamic_pointer_cast<T>(it->second); static typename T::Ptr blank; return blank; } template<typename T> T findOrAddStorage(const smtk::common::UUID& uid) { internal::EntityIdToPtr::iterator it = m_storage.find(uid); if (it != m_storage.end()) return smtk::dynamic_pointer_cast<T>(it->second); T blank = T::create(); it = m_storage .insert(internal::EntityIdToPtr::value_type( uid, smtk::dynamic_pointer_cast<internal::entity>(blank))) .first; return smtk::dynamic_pointer_cast<T>(it->second); } smtk::model::SessionIOPtr createIODelegate(const std::string& format) override; internal::EntityIdToPtr::iterator findStorageIterator(const smtk::common::UUID& uid); internal::EntityIdToPtr m_storage; int m_nextModelNumber{ 0 }; }; } // namespace polygon } //namespace session } // namespace smtk #endif // __smtk_session_polygon_Session_h
danicarrion/carto-python
carto/do_token.py
<reponame>danicarrion/carto-python """ Module for working with Data Observatory tokens .. module:: carto.DoToken :platform: Unix, Windows :synopsis: Module for working with Data Observatory tokens .. moduleauthor:: <NAME> <<EMAIL>> """ from pyrestcli.fields import CharField from .paginators import CartoPaginator from .resources import WarnResource, Manager API_VERSION = "v4" API_ENDPOINT = "api/{api_version}/do/token" class DoToken(WarnResource): """ Represents a Data Observatory token in CARTO. .. warning:: Non-public API. It may change with no previous notice """ access_token = CharField() class Meta: collection_endpoint = API_ENDPOINT.format(api_version=API_VERSION) name_field = "access_token" class DoTokenManager(Manager): """ Manager for the DoToken class. .. warning:: Non-public API. It may change with no previous notice """ resource_class = DoToken json_collection_attribute = None paginator_class = CartoPaginator def get(self): return super(DoTokenManager, self).get('token')
berkerY/rdmo
rdmo/projects/migrations/0041_value_external_id.py
<reponame>berkerY/rdmo<filename>rdmo/projects/migrations/0041_value_external_id.py # Generated by Django 2.2.13 on 2020-10-02 10:36 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('projects', '0040_issueresource'), ] operations = [ migrations.AddField( model_name='value', name='external_id', field=models.CharField(blank=True, help_text='External id for this value.', max_length=256, verbose_name='External id'), ), ]
lzbrooks/ci_output_parser
tests/test_log_file_parsers/test_pre_commit_log_file_parser.py
<filename>tests/test_log_file_parsers/test_pre_commit_log_file_parser.py<gh_stars>0 import pytest from ci_output_parser.log_file_parsers.pre_commit_log_file_parser import PreCommitLogLogFileParser, \ PreCommitDockerLogFileParser def test_pre_commit_log_file_parser_init_with_default_values(default_pre_commit_log_file_parser, valid_log_file_parser_fields, default_regex_log_file_parser_parameters): assert default_pre_commit_log_file_parser.log_file_path == valid_log_file_parser_fields.get("log_file_path") assert default_pre_commit_log_file_parser.parser_name == valid_log_file_parser_fields.get("parser_name") assert not default_pre_commit_log_file_parser.formatted_lines assert default_pre_commit_log_file_parser.text_output_file_name == valid_log_file_parser_fields.get( "text_output_file_name") assert default_pre_commit_log_file_parser.json_output_file_name == valid_log_file_parser_fields.get( "json_output_file_name") assert not default_pre_commit_log_file_parser.log_lines assert not default_pre_commit_log_file_parser.lint_lines assert not default_pre_commit_log_file_parser.lint_errors regex_parser = default_pre_commit_log_file_parser.log_file_parser assert regex_parser.start_regex == default_regex_log_file_parser_parameters.get("start_regex") assert regex_parser.stop_regex == default_regex_log_file_parser_parameters.get("stop_regex") assert regex_parser.parse_regex == default_regex_log_file_parser_parameters.get("parse_regex") regex_parser = default_pre_commit_log_file_parser.lint_parser assert regex_parser.start_regex == default_regex_log_file_parser_parameters.get("start_regex") assert regex_parser.stop_regex == default_regex_log_file_parser_parameters.get("stop_regex") assert regex_parser.parse_regex == default_regex_log_file_parser_parameters.get("parse_regex") regex_parser = default_pre_commit_log_file_parser.lint_error_parser assert regex_parser.start_regex == default_regex_log_file_parser_parameters.get("start_regex") assert regex_parser.stop_regex == default_regex_log_file_parser_parameters.get("stop_regex") assert regex_parser.parse_regex == default_regex_log_file_parser_parameters.get("parse_regex") regex_parser = default_pre_commit_log_file_parser.clean_lint_error_parser assert regex_parser.start_regex == default_regex_log_file_parser_parameters.get("start_regex") assert regex_parser.stop_regex == default_regex_log_file_parser_parameters.get("stop_regex") assert regex_parser.parse_regex == default_regex_log_file_parser_parameters.get("parse_regex") def test_get_lint_error_lines_with_valid_line(default_pre_commit_log_file_parser, valid_lint_lines): default_pre_commit_log_file_parser.lint_lines = [valid_lint_lines.get("pre_commit_lint")] default_pre_commit_log_file_parser.get_lint_error_lines() assert valid_lint_lines.get("pre_commit_lint") == default_pre_commit_log_file_parser.lint_errors[0] def test_get_lint_error_lines_with_invalid_line_first_group(default_pre_commit_log_file_parser, invalid_lint_lines): default_pre_commit_log_file_parser.lint_lines = [invalid_lint_lines.get("pre_commit_lint")[0]] default_pre_commit_log_file_parser.get_lint_error_lines() assert not default_pre_commit_log_file_parser.lint_errors def test_get_lint_error_lines_with_invalid_line_second_group(default_pre_commit_log_file_parser, invalid_lint_lines): default_pre_commit_log_file_parser.lint_lines = [invalid_lint_lines.get("pre_commit_lint")[1]] default_pre_commit_log_file_parser.get_lint_error_lines() assert not default_pre_commit_log_file_parser.lint_errors def test_get_lint_error_lines_with_empty_lines(default_pre_commit_log_file_parser): default_pre_commit_log_file_parser.get_lint_error_lines() assert not default_pre_commit_log_file_parser.lint_errors def test_clean_lint_error_lines_with_valid_line(default_pre_commit_log_file_parser, valid_lint_lines): default_pre_commit_log_file_parser.lint_errors = [valid_lint_lines.get("pre_commit_lint")] default_pre_commit_log_file_parser.clean_lint_error_lines() assert valid_lint_lines.get("pre_commit_lint") == default_pre_commit_log_file_parser.lint_errors[0] def test_clean_lint_error_lines_with_invalid_line(default_pre_commit_log_file_parser, invalid_lint_lines): default_pre_commit_log_file_parser.lint_errors = [invalid_lint_lines.get("pre_commit_lint")[2]] default_pre_commit_log_file_parser.clean_lint_error_lines() assert not default_pre_commit_log_file_parser.lint_errors def test_clean_lint_error_lines_with_empty_lines(default_pre_commit_log_file_parser): default_pre_commit_log_file_parser.clean_lint_error_lines() assert not default_pre_commit_log_file_parser.lint_errors def test_pre_commit_log_log_file_parser_init_with_valid_values(pre_commit_valid_file_path, pre_commit_log_log_file_parser_parameters): log_parser = PreCommitLogLogFileParser(pre_commit_valid_file_path.get("pre_commit_lint_valid_path")) log_parser_parameters = pre_commit_log_log_file_parser_parameters.get("log_line_parser") assert log_parser.lint_parser.start_regex == log_parser_parameters.get("start_regex") assert log_parser.lint_parser.stop_regex == log_parser_parameters.get("stop_regex") assert log_parser.lint_parser.parse_regex == log_parser_parameters.get("parse_regex") def test_pre_commit_log_log_file_parser_with_valid_file(pre_commit_valid_file_path, valid_lint_lines, mock_file_write_functions): log_parser = PreCommitLogLogFileParser(pre_commit_valid_file_path.get("pre_commit_lint_valid_path")) log_parser.log_parser() assert len(log_parser.formatted_lines) == 15 assert valid_lint_lines.get("pre_commit_lint") in log_parser.formatted_lines def test_pre_commit_log_log_file_parser_with_clean_file(pre_commit_valid_file_path, valid_lint_lines, mock_file_write_functions): log_parser = PreCommitLogLogFileParser(pre_commit_valid_file_path.get("pre_commit_lint_clean_valid_path")) log_parser.log_parser() assert not log_parser.formatted_lines def test_pre_commit_log_log_file_parser_with_invalid_file(invalid_file_path): log_parser = PreCommitLogLogFileParser(invalid_file_path) with pytest.raises(FileNotFoundError): log_parser.log_parser() def test_pre_commit_log_log_file_parser_with_empty_file(empty_file_path, valid_lint_lines, mock_file_write_functions): log_parser = PreCommitLogLogFileParser(empty_file_path) log_parser.log_parser() assert not log_parser.formatted_lines def test_pre_commit_docker_log_file_parser_init_with_valid_values(pre_commit_valid_file_path, pre_commit_docker_log_file_parser_parameters): log_parser = PreCommitDockerLogFileParser(pre_commit_valid_file_path.get("pre_commit_lint_valid_path")) file_parser_parameters = pre_commit_docker_log_file_parser_parameters.get("file_parser") assert log_parser.log_file_parser.start_regex == file_parser_parameters.get("start_regex") assert log_parser.log_file_parser.stop_regex == file_parser_parameters.get("stop_regex") assert log_parser.log_file_parser.parse_regex == file_parser_parameters.get("parse_regex") log_parser_parameters = pre_commit_docker_log_file_parser_parameters.get("log_line_parser") assert log_parser.lint_parser.start_regex == log_parser_parameters.get("start_regex") assert log_parser.lint_parser.stop_regex == log_parser_parameters.get("stop_regex") assert log_parser.lint_parser.parse_regex == log_parser_parameters.get("parse_regex") def test_pre_commit_docker_log_file_parser_with_valid_file(docker_valid_file_path, valid_lint_lines, mock_file_write_functions): log_parser = PreCommitDockerLogFileParser(docker_valid_file_path.get("docker_valid_path")) log_parser.log_parser() assert len(log_parser.formatted_lines) == 26 assert valid_lint_lines.get("pre_commit_lint") in log_parser.formatted_lines def test_pre_commit_docker_log_file_parser_with_clean_log_file(pre_commit_valid_file_path, valid_lint_lines, mock_file_write_functions): log_parser = PreCommitDockerLogFileParser(pre_commit_valid_file_path.get("pre_commit_lint_clean_valid_path")) log_parser.log_parser() assert not log_parser.formatted_lines def test_pre_commit_docker_log_file_parser_with_invalid_file(invalid_file_path): with pytest.raises(FileNotFoundError): log_parser = PreCommitDockerLogFileParser(invalid_file_path) log_parser.log_parser() def test_pre_commit_docker_log_file_parser_with_empty_file(empty_file_path, valid_lint_lines, mock_file_write_functions): log_parser = PreCommitDockerLogFileParser(empty_file_path) log_parser.log_parser() assert not log_parser.formatted_lines
lihongli528628/yishu
medtree/medtree/Groups/Find/Controller/WriteFeedViewController.h
// // WriteFeedViewController.h // medtree // // Created by tangshimi on 8/11/15. // Copyright (c) 2015 sam. All rights reserved. // #import "MedTreeBaseController.h" @interface WriteFeedViewController : MedTreeBaseController @property (nonatomic, copy) NSString *navigationTitle; @property (nonatomic, copy) dispatch_block_t publishFeedSuccessBlock; @end
tenderlove/aarch64
lib/aarch64/instructions/csdb.rb
module AArch64 module Instructions # CSDB -- A64 # Consumption of Speculative Data Barrier # CSDB class CSDB def encode 0b1101010100_0_00_011_0010_0010_100_11111 end end end end
itcr-uni-luebeck/Synthea-Gecco
src/main/groovy/syntheagecco/openehr/sdk/model/generated/geccovirologischerbefundcomposition/definition/ProAnalytCluster.java
<filename>src/main/groovy/syntheagecco/openehr/sdk/model/generated/geccovirologischerbefundcomposition/definition/ProAnalytCluster.java package syntheagecco.openehr.sdk.model.generated.geccovirologischerbefundcomposition.definition; import com.nedap.archie.rm.archetyped.FeederAudit; import com.nedap.archie.rm.datastructures.Cluster; import java.lang.String; import java.util.List; import javax.annotation.processing.Generated; import org.ehrbase.client.annotations.Archetype; import org.ehrbase.client.annotations.Choice; import org.ehrbase.client.annotations.Entity; import org.ehrbase.client.annotations.Path; import org.ehrbase.client.classgenerator.interfaces.LocatableEntity; import org.ehrbase.client.classgenerator.shareddefinition.NullFlavour; @Entity @Archetype("openEHR-EHR-CLUSTER.laboratory_test_analyte.v1") @Generated( value = "org.ehrbase.client.classgenerator.ClassGenerator", date = "2021-09-01T01:48:29.134665300+02:00", comments = "https://github.com/ehrbase/openEHR_SDK Version: 1.5.0" ) public class ProAnalytCluster implements LocatableEntity { /** * Path: GECCO_Virologischer Befund/Befund/Jedes Ereignis/Labortest-Panel/Pro Analyt/Virusnachweistest * Description: Der Name des untersuchten Analyts. * Comment: Der Wert dieses Elements wird normalerweise, meist durch eine Spezialisierung, in einem Template oder zur Laufzeit der Anwendung geliefert, um den aktuellen Analyt wiederzugeben. Zum Beispiel: 'Natrium im Serum', 'Hämoglobin'. * Die Codierung mit einer externen Terminologie, wie LOINC, NPU, SNOMED-CT oder lokalen Labor-Terminologien wird dringend empfohlen. */ @Path("/items[at0024 and name/value='Virusnachweistest']/value|defining_code") private VirusnachweistestDefiningCode virusnachweistestDefiningCode; /** * Path: GECCO_Virologischer Befund/Befund/Event Series/Jedes Ereignis/Tree/Labortest-Panel/Pro Analyt/Virusnachweistest/null_flavour */ @Path("/items[at0024 and name/value='Virusnachweistest']/null_flavour|defining_code") private NullFlavour virusnachweistestNullFlavourDefiningCode; /** * Path: GECCO_Virologischer Befund/Befund/Jedes Ereignis/Labortest-Panel/Pro Analyt/Nachweis * Description: (Mess-)Wert des Analyt-Ergebnisses. * Comment: Z.B. "7,3 mmol/l", "Erhöht". Der "Any"-Datentyp wird dann durch eine Spezialisierung, eine Vorlage oder zur Laufzeit der Anwendung auf einen passenden Datentyp eingeschränkt werden müssen, um das aktuelle Analyt-Ergebnis wiederzugeben. Der "Quantity"-Datentyp hat Referenzmodell-Attribute, wie Kennungen für normal/abnormal, Referenzbereiche und Näherungen - für weitere Details s. https://specifications.openehr.org/releases/RM/latest/data_types.html#_dv_quantity_class . */ @Path("/items[at0001 and name/value='Nachweis']/value|defining_code") private NachweisDefiningCode nachweisDefiningCode; /** * Path: GECCO_Virologischer Befund/Befund/Event Series/Jedes Ereignis/Tree/Labortest-Panel/Pro Analyt/Nachweis/null_flavour */ @Path("/items[at0001 and name/value='Nachweis']/null_flavour|defining_code") private NullFlavour nachweisNullFlavourDefiningCode; /** * Path: GECCO_Virologischer Befund/Befund/Jedes Ereignis/Labortest-Panel/Pro Analyt/Analyseergebnis-Detail * Description: Weitere Details zu einem einzelnen Ergebnis. */ @Path("/items[at0014]") private List<Cluster> analyseergebnisDetail; /** * Path: GECCO_Virologischer Befund/Befund/Event Series/Jedes Ereignis/Tree/Labortest-Panel/Pro Analyt/Testmethode/null_flavour */ @Path("/items[at0028]/null_flavour|defining_code") private NullFlavour testmethodeNullFlavourDefiningCode; /** * Path: GECCO_Virologischer Befund/Befund/Jedes Ereignis/Labortest-Panel/Pro Analyt/Ergebnis-Status * Description: Status des Analyt-Ergebniswertes. * Comment: Die Werte wurden speziell so ausgewählt, dass sie mit denen im HL7 FHIR-Diagnosebericht übereinstimmen, der ursprünglich aus der HL7v2-Praxis abgeleitet wurde. Andere lokale Codes / Begriffe können über die Textauswahl verwendet werden. * * Dieses Element ermöglicht mehrere Vorkommen, um Anwendungsfälle zu unterstützen, wo mehr als eine Art von Status implementiert werden muss. */ @Path("/items[at0005]/value|value") private String ergebnisStatusValue; /** * Path: GECCO_Virologischer Befund/Befund/Event Series/Jedes Ereignis/Tree/Labortest-Panel/Pro Analyt/Ergebnis-Status/null_flavour */ @Path("/items[at0005]/null_flavour|defining_code") private NullFlavour ergebnisStatusNullFlavourDefiningCode; /** * Path: GECCO_Virologischer Befund/Befund/Jedes Ereignis/Labortest-Panel/Pro Analyt/feeder_audit */ @Path("/feeder_audit") private FeederAudit feederAudit; /** * Path: GECCO_Virologischer Befund/Befund/Jedes Ereignis/Labortest-Panel/Pro Analyt/Testmethode * Description: Die Beschreibung der Methode, mit der der Test nur für diesen Analyten durchgeführt wurde. */ @Path("/items[at0028]/value") @Choice private ProAnalytTestmethodeChoice testmethode; public void setVirusnachweistestDefiningCode( VirusnachweistestDefiningCode virusnachweistestDefiningCode) { this.virusnachweistestDefiningCode = virusnachweistestDefiningCode; } public VirusnachweistestDefiningCode getVirusnachweistestDefiningCode() { return this.virusnachweistestDefiningCode ; } public void setVirusnachweistestNullFlavourDefiningCode( NullFlavour virusnachweistestNullFlavourDefiningCode) { this.virusnachweistestNullFlavourDefiningCode = virusnachweistestNullFlavourDefiningCode; } public NullFlavour getVirusnachweistestNullFlavourDefiningCode() { return this.virusnachweistestNullFlavourDefiningCode ; } public void setNachweisDefiningCode(NachweisDefiningCode nachweisDefiningCode) { this.nachweisDefiningCode = nachweisDefiningCode; } public NachweisDefiningCode getNachweisDefiningCode() { return this.nachweisDefiningCode ; } public void setNachweisNullFlavourDefiningCode(NullFlavour nachweisNullFlavourDefiningCode) { this.nachweisNullFlavourDefiningCode = nachweisNullFlavourDefiningCode; } public NullFlavour getNachweisNullFlavourDefiningCode() { return this.nachweisNullFlavourDefiningCode ; } public void setAnalyseergebnisDetail(List<Cluster> analyseergebnisDetail) { this.analyseergebnisDetail = analyseergebnisDetail; } public List<Cluster> getAnalyseergebnisDetail() { return this.analyseergebnisDetail ; } public void setTestmethodeNullFlavourDefiningCode( NullFlavour testmethodeNullFlavourDefiningCode) { this.testmethodeNullFlavourDefiningCode = testmethodeNullFlavourDefiningCode; } public NullFlavour getTestmethodeNullFlavourDefiningCode() { return this.testmethodeNullFlavourDefiningCode ; } public void setErgebnisStatusValue(String ergebnisStatusValue) { this.ergebnisStatusValue = ergebnisStatusValue; } public String getErgebnisStatusValue() { return this.ergebnisStatusValue ; } public void setErgebnisStatusNullFlavourDefiningCode( NullFlavour ergebnisStatusNullFlavourDefiningCode) { this.ergebnisStatusNullFlavourDefiningCode = ergebnisStatusNullFlavourDefiningCode; } public NullFlavour getErgebnisStatusNullFlavourDefiningCode() { return this.ergebnisStatusNullFlavourDefiningCode ; } public void setFeederAudit(FeederAudit feederAudit) { this.feederAudit = feederAudit; } public FeederAudit getFeederAudit() { return this.feederAudit ; } public void setTestmethode(ProAnalytTestmethodeChoice testmethode) { this.testmethode = testmethode; } public ProAnalytTestmethodeChoice getTestmethode() { return this.testmethode ; } }
Peiiii/wk
wk/extra/node/node.py
<reponame>Peiiii/wk import wk class NodeMetaClass(type): def __new__(cls, name, bases, attrs): debug=False if name == None: debug=True dict_attrs=['_attrs','environment'] for name in dict_attrs: if name in attrs.keys(): _attrs = attrs[name] tmp_dict = {} assert isinstance(_attrs, dict) for base in bases: if hasattr(base, name): base_attrs = getattr(base, name) tmp_dict.update(**base_attrs) tmp_dict.update(**_attrs) attrs[name] = tmp_dict return type.__new__(cls, name, bases, attrs) class Node(metaclass=NodeMetaClass): tag = 'Node' self_closing = False _attrs = {} _children = [] environment={} def __init__(self, **kwargs): def preprocess(kwargs): cls_attr = '_class' if cls_attr in kwargs.keys(): kwargs['class'] = kwargs.pop(cls_attr) return kwargs self.attrs = {} self.attrs.update(**self._attrs) self.attrs = preprocess(self.attrs) self.children = self._children kwargs = preprocess(kwargs) children_name = 'children' if children_name in kwargs.keys(): self.children = kwargs.pop(children_name) self.attrs.update(**kwargs) def to_string(self, indent=0, indent_step=2): tag_and_attrs_string = ' '.join([self.tag] + ['%s="%s"' % (name, value) for name, value in self.attrs.items()]) if self.self_closing: return '<{tag_and_attrs}>'.format(indent=' ' * indent, tag_and_attrs=tag_and_attrs_string) else: if len(self.children) == 1: "Handle such case that the child is text or Var with type of text" child = self.children[0] if isinstance(child, (str,)): children_string = str(child) return '<{tag_and_attrs}>{children_string}</{tag}>'.format(tag_and_attrs=tag_and_attrs_string, children_string=children_string, tag=self.tag) elif isinstance(child, Var) and child.attrs['type'] == 'text': children_string = child.to_string(indent=indent, indent_step=indent_step) return '<{tag_and_attrs}>{children_string}</{tag}>'.format(tag_and_attrs=tag_and_attrs_string, children_string=children_string, tag=self.tag) children_string = '\n{indent}'.format(indent=' ' * (indent + indent_step)).join([child.to_string( indent=indent + indent_step, indent_step=indent_step) if isinstance(child, (Node,)) else ' ' * ( indent + indent_step) + str(child) + '\n' for child in self.children]) if children_string: return '<{tag_and_attrs}>\n{next_indent}' \ '{children_string}' \ '\n{indent}</{tag}>'.format(next_indent=' ' * (indent + indent_step), indent=' ' * indent, tag_and_attrs=tag_and_attrs_string, children_string=children_string, tag=self.tag) else: return '<{tag_and_attrs}></{tag}>'.format(tag_and_attrs=tag_and_attrs_string, tag=self.tag) def to_structure(self,indent=0,indent_step=2): tag_and_attrs_string = ' '.join([self.tag] + ['%s="%s"' % (name, value) for name, value in self.attrs.items()]) if self.self_closing: return '<{tag_and_attrs}>'.format(indent=' ' * indent, tag_and_attrs=tag_and_attrs_string) else: if len(self.children) == 1: "Handle such case that the child is text or Var with type of text" child = self.children[0] if isinstance(child, (str,)): children_string = str(child) return '<{tag_and_attrs}>{children_string}</{tag}>'.format(tag_and_attrs=tag_and_attrs_string, children_string=children_string, tag=self.tag) elif isinstance(child, Var) and child.attrs['type'] == 'text': children_string = child.to_structure(indent=indent, indent_step=indent_step) return '<{tag_and_attrs}>{children_string}</{tag}>'.format(tag_and_attrs=tag_and_attrs_string, children_string=children_string, tag=self.tag) children_string = '\n{indent}'.format(indent=' ' * (indent + indent_step)).join([child.to_structure( indent=indent + indent_step, indent_step=indent_step) if isinstance(child, (Node,)) else ' ' * ( indent + indent_step) + str(child) + '\n' for child in self.children]) if children_string: return '<{tag_and_attrs}>\n{next_indent}' \ '{children_string}' \ '\n{indent}</{tag}>'.format(next_indent=' ' * (indent + indent_step), indent=' ' * indent, tag_and_attrs=tag_and_attrs_string, children_string=children_string, tag=self.tag) else: return '<{tag_and_attrs}></{tag}>'.format(tag_and_attrs=tag_and_attrs_string, tag=self.tag) def __str__(self): return self.to_string() # return self.to_structure() def __repr__(self): return self.to_structure() def __len__(self): return len(self.children) def __call__(self, children: list = []): if not isinstance(children, (list,)): assert isinstance(children, (Node, str, Var)) children = [children] self.children = children return self def to_file(self, filepath): with open(filepath, 'w', encoding='utf-8') as f: f.write(self.render()) def compile(self, **kwargs): ''' replace var with specific object , if the object is a list . the insert every element in this list into self.children''' if not len(self.children): return self index=0 for i in range(len(self.children)): child=self.children[index] if isinstance(child, str): index+=1 continue if isinstance(child, Var): name = child.attrs['name'] if name in kwargs.keys(): self.children.pop(index) new_nodes=kwargs[name] if not isinstance(new_nodes,(tuple,list)): new_nodes=[new_nodes] for new_node in new_nodes: self.children.insert(index,new_node) index+=1 else: self.children[index] = child.compile(**kwargs) index+=1 else: self.children[index] = child.compile(**kwargs) index+=1 return self def render(self, **kwargs): render_kwargs={} render_kwargs.update(**self.environment) render_kwargs.update(**kwargs) from jinja2 import Environment,Template tem = Environment().from_string(self.to_string()) return tem.render(**render_kwargs) class Text(Node): tag = 'text' def to_string(self, indent=0, indent_step=2): return self.children[0] def __call__(self, children: list = []): if not isinstance(children, (list,)): assert isinstance(children, ( str, )) children = [children] self.children = children return self class Var(Node): tag = 'var' _attrs = dict(type='node') def __init__(self, name, **kwargs): super().__init__(name=name, **kwargs) def to_string(self, indent=0, indent_step=2): if len(self.children) == 1: child = self.children[0] if isinstance(child, (str,)): return str(child) elif isinstance(child, Var) and child.attrs['type'] == 'text': return child.to_string(indent=indent, indent_step=indent_step) children_string = '\n{indent}'.format(indent=' ' * (indent)).join([child.to_string( indent=indent, indent_step=indent_step) if isinstance(child, (Node,)) else ' ' * ( indent) + str(child) + '\n' for child in self.children]) if children_string: return children_string else: return '' class Html(Node): tag = 'html' class Head(Node): tag = 'head' class Body(Node): tag = 'body' class Header(Node): tag = 'header' class Footer(Node): tag = 'footer' class Link(Node): tag = 'link' self_closing = True class Meta(Node): tag = 'meta' self_closing = True class Title(Node): tag = 'title' class Script(Node): tag = 'script' class Style(Node): tag = 'style' class Nav(Node): tag = 'nav' class Div(Node): tag = 'div' class Span(Node): tag = 'span' class H1(Node): tag = 'h1' class H2(Node): tag = 'h2' class H3(Node): tag = 'h3' class H4(Node): tag = 'h4' class H5(Node): tag = 'h5' class H6(Node): tag = 'h6' class P(Node): tag = 'p' class Table(Node): tag = 'table' class Caption(Node): tag = 'caption' class Thead(Node): tag = 'thead' class tbody(Node): tag = 'tbody' class Tr(Node): tag = 'tr' class Td(Node): tag = 'td' class Th(Node): tag = 'th' class Ul(Node): tag = 'ul' class Ol(Node): tag = 'ol' class Li(Node): tag = 'li' class Form(Node): tag = 'form' class Textarea(Node): tag = 'textarea' class Input(Node): tag = 'input' self_closing = True class Label(Node): tag = 'label' class Select(Node): tag = 'select' class A(Node): tag = 'a' class B(Node): tag = 'b' class Strong(Node): tag = 'strong' class I(Node): tag = 'i' class Em(Node): tag = 'em' class Strike(Node): tag = 'strike' class Del(Node): tag = 'del' class Hr(Node): tag = 'hr' self_closing = True class Br(Node): tag = 'br' self_closing = True class U(Node): tag = 'u' class Img(Node): tag = 'img' class Sub(Node): tag = 'sub' class Sup(Node): tag = 'sup' class Big(Node): tag = 'big' class Small(Node): tag = 'small' class Button(Node): tag = 'button' def smart_update_dict(dic1={}, dic2={}): '''if the some-value is also a dict , then try to update only the smaller dict''' for k, v in dic2.items(): if not k in dic1.keys(): dic1[k] = v else: if isinstance(dic1[k], dict) and isinstance(dic2[k], dict): smart_update_dict(dic1[k], dic2[k]) else: dic1[k] = v return dic1
gauravsbagul/ditto-clone
src/utilities/index.js
<gh_stars>0 export * from './emojis' export * from './isIphoneX' export * from './misc'
MobyFS/fhq-server
fhq-server/src/cmd/cmd_handlers_classbook.h
#ifndef CMD_HADNLERS_CLASSBOOK_H #define CMD_HADNLERS_CLASSBOOK_H #include <cmd_handlers.h> #include <QFile> // TODO redesign to c++ file /*! * This handler will be add classbook record * */ class CmdClassbookAddRecordHandler : public CmdHandlerBase { public: CmdClassbookAddRecordHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookAddRecordHandler) /*! * This handler will be delete classbook record * */ class CmdClassbookDeleteRecordHandler : public CmdHandlerBase { public: CmdClassbookDeleteRecordHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookDeleteRecordHandler) /*! * This handler will be export classbook record * */ class CmdClassbookExportHandler : public CmdHandlerBase { public: CmdClassbookExportHandler(); virtual void handle(ModelRequest *pRequest); private: void createHtml(QFile *file, const std::string &sLang, QSqlQuery query); void createMD(QFile *file, const std::string &sLang, QSqlQuery query); }; REGISTRY_CMD(CmdClassbookExportHandler) /*! * This handler will be return classbook record info * */ class CmdClassbookInfoHandler : public CmdHandlerBase { public: CmdClassbookInfoHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookInfoHandler) /*! * This handler will be return classbook records list * */ class CmdClassbookListHandler : public CmdHandlerBase { public: CmdClassbookListHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookListHandler) /*! * This handler will be update classbook record * */ class CmdClassbookUpdateRecordHandler : public CmdHandlerBase { public: CmdClassbookUpdateRecordHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookUpdateRecordHandler) /*! * This handler will be add classbook localization record * */ class CmdClassbookLocalizationAddRecordHandler : public CmdHandlerBase { public: CmdClassbookLocalizationAddRecordHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookLocalizationAddRecordHandler) /*! * This handler will be delete classbook localization record * */ class CmdClassbookLocalizationDeleteRecordHandler : public CmdHandlerBase { public: CmdClassbookLocalizationDeleteRecordHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookLocalizationDeleteRecordHandler) /*! * This handler will be info classbook localization record * */ class CmdClassbookLocalizationInfoHandler : public CmdHandlerBase { public: CmdClassbookLocalizationInfoHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookLocalizationInfoHandler) /*! * This handler will be update classbook localization record * */ class CmdClassbookLocalizationUpdateRecordHandler : public CmdHandlerBase { public: CmdClassbookLocalizationUpdateRecordHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookLocalizationUpdateRecordHandler) /*! * This handler will be update classbook localization record * */ // class CmdClassbookLocalizationUpdateRecordHandler : public CmdHandlerBase { // public: // CmdClassbookLocalizationUpdateRecordHandler(); // virtual void handle(ModelRequest *pRequest); // }; // REGISTRY_CMD(CmdClassbookLocalizationUpdateRecordHandler) /*! * This handler will be add classbook proposal record * */ class CmdClassbookProposalAddRecordHandler : public CmdHandlerBase { public: CmdClassbookProposalAddRecordHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookProposalAddRecordHandler) /*! * This handler will be delete classbook proposal record * */ class CmdClassbookProposalDeleteRecordHandler : public CmdHandlerBase { public: CmdClassbookProposalDeleteRecordHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookProposalDeleteRecordHandler) /*! * This handler will be info classbook proposal record * */ class CmdClassbookProposalInfoHandler : public CmdHandlerBase { public: CmdClassbookProposalInfoHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookProposalInfoHandler) /*! * This handler will be list classbook proposal record * */ class CmdClassbookProposalListHandler : public CmdHandlerBase { public: CmdClassbookProposalListHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookProposalListHandler) /*! * This handler will be prepare classbook proposal record * */ class CmdClassbookProposalPrepareMergeRecordHandler : public CmdHandlerBase { public: CmdClassbookProposalPrepareMergeRecordHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookProposalPrepareMergeRecordHandler) /*! * This handler will be approve classbook proposal record * */ class CmdClassbookProposalApproveHandler : public CmdHandlerBase { public: CmdClassbookProposalApproveHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookProposalApproveHandler) /*! * This handler will be update classbook proposal record * */ class CmdClassbookProposalUpdateHandler : public CmdHandlerBase { public: CmdClassbookProposalUpdateHandler(); virtual void handle(ModelRequest *pRequest); }; REGISTRY_CMD(CmdClassbookProposalUpdateHandler) /*! * This handler will be return classbook content (duplicate handler ? ) * */ class CmdClassbookHandler : public CmdHandlerBase { public: CmdClassbookHandler(); virtual void handle(ModelRequest *pRequest); }; #endif // CMD_HADNLERS_CLASSBOOK_H
mahmuttaskiran/Opcon
app/src/main/java/com/opcon/ui/views/DialogView.java
package com.opcon.ui.views; import android.content.Context; import android.content.res.Resources; import android.graphics.PorterDuff; import android.graphics.drawable.Drawable; import android.text.Html; import android.text.Spanned; import android.text.TextUtils; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.TextView; import com.opcon.ui.utils.AvatarLoader; import com.opcon.R; import com.opcon.components.Dialog; import com.opcon.components.Message; import com.opcon.database.MessageProvider; import com.vanniktech.emoji.EmojiTextView; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import agency.tango.android.avatarview.views.AvatarView; /** * Created by <NAME> on 28/11/2016. */ public class DialogView extends RelativeLayout { private EmojiTextView mName; private TextView mDate; private ImageView mAckState; public AvatarView mAvatar; private TextView mNonSeenLength; private int mNonSeenMessageLength; private static int DIALOG_TITLE_COLOR; private static int DIALOG_CONTENT_COLOR; public DialogView(Context context, AttributeSet attrs) { super(context, attrs); LayoutInflater inflater = LayoutInflater.from(context); View root = inflater.inflate(R.layout.dialog_view, this, true); this.mNonSeenLength = (TextView) root.findViewById(R.id.dialog_non_seen_length); this.mName = (EmojiTextView) root.findViewById(R.id.dialog_text); this.mDate = (TextView) root.findViewById(R.id.dialog_date); this.mAckState = (ImageView) root.findViewById(R.id.dialog_ack_state); this.mAvatar = (AvatarView) root.findViewById(R.id.dialog_avatar); DIALOG_CONTENT_COLOR = root.getResources().getColor(R.color.dialogContentColor); DIALOG_TITLE_COLOR = root.getResources().getColor(R.color.dialogTitleColor); } public void forDialog(Dialog dialog) { String dialogLastStr = dialog.content != null ? dialog.content: MessageProvider.DialogUtils.getDialogLastStr(getContext(), dialog.lastNotifier, dialog.lastMessage); if (dialog.lastMessage != null && dialog.lastMessage.isWaiting()) { this.mName.setText(Html.fromHtml(String.format("<font color='" + DIALOG_TITLE_COLOR + "'>%s</font><br><font color='black'>%s</font>", dialog.name, getContext().getString(R.string.there_are_waiting_message)))); } else { if (TextUtils.isEmpty(dialogLastStr)) { this.mName.setText(Html.fromHtml(String.format("<font color='" + DIALOG_TITLE_COLOR + "'>%s</font>", dialog.name))); } else { this.mName.setText(Html.fromHtml(String.format("<font color='" + DIALOG_TITLE_COLOR + "'>%s</font><br><font color='" + DIALOG_CONTENT_COLOR + "'>%s</font>", dialog.name, dialogLastStr))); } } AvatarLoader.load(mAvatar, dialog.avatarPath, dialog.name); mAckState.setVisibility(GONE); if (dialog.lastMessage != null && dialog.lastMessage.isSenderAmI()) { boolean b = setupAckStateFor(dialog.lastMessage, mAckState); if (b){ mAckState.setVisibility(VISIBLE); } } if (dialog.getLastTime() > 1 && !dialog.isAssistant()) { mDate.setVisibility(VISIBLE); mDate.setText(SimpleDateFormat.getTimeInstance(DateFormat.SHORT).format(new Date(dialog.getLastTime()))); } else { mDate.setVisibility(GONE); } mNonSeenMessageLength = dialog.nonSeenMessageLength; decideNonSeenVisibility(); } private void decideNonSeenVisibility() { if (mNonSeenMessageLength > 0) { mNonSeenLength.setVisibility(VISIBLE); mNonSeenLength.setText(String.valueOf(mNonSeenMessageLength)); } else { mNonSeenLength.setVisibility(GONE); } } public void setName(Spanned name) { this.mName.setText(name); } // imageView'in varsayılan olarak hiçbir görsel // öğe taşımadığını varsayarak, server'a iletilmiş iletiler // boş mavi işaretlenir; henüz server'a iletilmemiş iletiler // bir görsel öğe belirtmez; kullanıcıya iletişler mavi icon ve // mavi kenarlıkla belirtilir. Görülen iletilerse // mavi dolgu ve beyaz ikonla gösterilir. public static void color (Drawable ic, int color) { ic.setColorFilter(color, PorterDuff.Mode.MULTIPLY); } public static boolean setupAckStateFor(Message msg, ImageView imageView) { Resources res = imageView.getContext().getResources(); Drawable bg = null, ic = null; if (msg.isWaiting()) { bg = res.getDrawable(R.drawable.white_bg_blue_border); ic = res.getDrawable(R.drawable.ic_question); color(ic, res.getColor(R.color.colorPrimary)); } else if (msg.isImageMessage() && !msg.getBoolean(Message.Picture.DONE)) { ic = null; bg = null; } else { if (msg.isSeen()) { ic = res.getDrawable(R.drawable.ic_check_18_white); bg = imageView.getContext().getResources().getDrawable(R.drawable.blue_circle); } else if (msg.isReceived()) { ic = res.getDrawable(R.drawable.ic_check_primary); bg = imageView.getContext().getResources().getDrawable(R.drawable.white_bg_blue_border); } else if (msg.isSent()) { ic = res.getDrawable(R.drawable.ic_check_primary); bg = imageView.getContext().getResources().getDrawable(R.drawable.circle_white); } } if (ic != null) { imageView.setImageDrawable(ic); imageView.setBackgroundDrawable(bg); return true; } else { imageView.setBackgroundDrawable(null); imageView.setImageDrawable(null); return false; } } }
bireports/nextreports-server
src/ro/nextreports/server/report/next/NextUtil.java
<gh_stars>10-100 /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ro.nextreports.server.report.next; import java.io.ByteArrayInputStream; import java.io.UnsupportedEncodingException; import java.sql.Connection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import org.jcrom.JcrDataProviderImpl; import org.jcrom.JcrFile; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ro.nextreports.engine.Report; import ro.nextreports.engine.band.BandElement; import ro.nextreports.engine.band.ColumnBandElement; import ro.nextreports.engine.chart.Chart; import ro.nextreports.engine.util.LoadReportException; import ro.nextreports.engine.util.NextChartUtil; import ro.nextreports.engine.util.ParameterUtil; import ro.nextreports.engine.util.ReportUtil; import ro.nextreports.server.domain.ChartContent; import ro.nextreports.server.domain.Entity; import ro.nextreports.server.domain.NextContent; import ro.nextreports.server.domain.QueryRuntime; import ro.nextreports.server.domain.Settings; import ro.nextreports.server.service.StorageService; import ro.nextreports.server.util.ConnectionUtil; /** * @author <NAME> */ public class NextUtil { private static final Logger LOG = LoggerFactory.getLogger(NextUtil.class); public static byte[] getNextReportBytes(Settings settings, ro.nextreports.server.domain.Report report) { NextContent reportContent = (NextContent) report.getContent(); return getNextReportBytes(settings, reportContent); } public static Report getNextReport(Settings settings, ro.nextreports.server.domain.Report report) { NextContent reportContent = (NextContent) report.getContent(); return getNextReport(settings, reportContent); } public static Report getNextReport(Settings settings, ro.nextreports.server.domain.Chart chart) { ChartContent reportContent = (ChartContent) chart.getContent(); return getChart(reportContent).getReport(); } public static Report getNextReport(Settings settings, NextContent reportContent) { try { copyImages(settings, reportContent.getImageFiles()); } catch (Exception e) { e.printStackTrace(); LOG.error(e.getMessage(), e); } try { copyTemplate(settings, reportContent.getTemplateFile()); } catch (Exception e) { e.printStackTrace(); LOG.error(e.getMessage(), e); } byte[] bytes = reportContent.getNextFile().getDataProvider().getBytes(); try { return ReportUtil.loadReport(new ByteArrayInputStream(bytes)); } catch (LoadReportException e) { LOG.error(e.getMessage(), e); } return null; } public static boolean hasMacroTemplate(Settings settings, ro.nextreports.server.domain.Report report) { Report rep = getNextReport(settings, report); String templateName = rep.getLayout().getTemplateName(); return ((templateName != null) && templateName.endsWith(".xlsm")); } public static byte[] getNextReportBytes(Settings settings, NextContent reportContent) { return reportContent.getNextFile().getDataProvider().getBytes(); } public static Report getNextReport(ro.nextreports.server.domain.Chart chart) { ChartContent reportContent = chart.getContent(); return getChart(reportContent).getReport(); } public static Chart getNextChart(ro.nextreports.server.domain.Chart chart) { ChartContent reportContent = chart.getContent(); return getChart(reportContent); } public static Report getNextReport(Settings settings, Entity entity) { if (entity instanceof ro.nextreports.server.domain.Report) { return getNextReport(settings, (ro.nextreports.server.domain.Report) entity); } else if (entity instanceof ro.nextreports.server.domain.Chart) { return getNextReport(settings, (ro.nextreports.server.domain.Chart) entity); } return null; } public static Chart getChart(ChartContent chartContent) { byte[] bytes = chartContent.getChartFile().getDataProvider().getBytes(); return NextChartUtil.loadChart(new ByteArrayInputStream(bytes)); } public static void copyImages(Settings settings, List<JcrFile> images) throws Exception { ro.nextreports.server.report.util.ReportUtil.copyImages(settings.getReportsHome(), images); } public static void copyTemplate(Settings settings, JcrFile template) throws Exception { ro.nextreports.server.report.util.ReportUtil.copyTemplate(settings.getReportsHome(), template); } // Rename images so that their name is unique public static ro.nextreports.server.domain.Report renameImagesAsUnique(ro.nextreports.server.domain.Report report) { NextContent reportContent = (NextContent) report.getContent(); try { String masterContent = new String(reportContent.getNextFile().getDataProvider().getBytes(), "UTF-8"); for (JcrFile imageFile : reportContent.getImageFiles()) { String oldName = imageFile.getName(); int index = oldName.lastIndexOf(ro.nextreports.server.report.util.ReportUtil.EXTENSION_SEPARATOR); String newName = oldName.substring(0, index) + ro.nextreports.server.report.util.ReportUtil.IMAGE_DELIM + UUID.randomUUID().toString() + oldName.substring(index); masterContent = masterContent.replaceAll(oldName, newName); imageFile.setName(newName); } JcrFile templateFile = reportContent.getTemplateFile(); if (templateFile != null) { String oldName = templateFile.getName(); int index = oldName.lastIndexOf(ro.nextreports.server.report.util.ReportUtil.EXTENSION_SEPARATOR); String newName = oldName.substring(0, index) + ro.nextreports.server.report.util.ReportUtil.IMAGE_DELIM + UUID.randomUUID().toString() + oldName.substring(index); masterContent = masterContent.replaceAll(oldName, newName); templateFile.setName(newName); } reportContent.getNextFile().setDataProvider(new JcrDataProviderImpl(masterContent.getBytes("UTF-8"))); } catch (UnsupportedEncodingException e) { LOG.error("Error inside renameImagesAsUnique: " + e.getMessage(), e); e.printStackTrace(); } return report; } // Restore images names // Images are in the master file public static ro.nextreports.server.domain.Report restoreImagesName(ro.nextreports.server.domain.Report report) { NextContent reportContent = (NextContent) report.getContent(); JcrFile masterFile = reportContent.getNextFile(); try { String masterContent = new String(masterFile.getDataProvider().getBytes(), "UTF-8"); if (reportContent.getImageFiles() != null) { for (JcrFile imageFile : reportContent.getImageFiles()) { String oldName = imageFile.getName(); int startIndex = oldName.indexOf(ro.nextreports.server.report.util.ReportUtil.IMAGE_DELIM); int extIndex = oldName.lastIndexOf(ro.nextreports.server.report.util.ReportUtil.EXTENSION_SEPARATOR); String newName; if (startIndex < 0) { newName = oldName; } else { newName = oldName.substring(0, startIndex) + oldName.substring(extIndex); } masterContent = masterContent.replaceAll(oldName, newName); imageFile.setName(newName); if (LOG.isDebugEnabled()) { LOG.debug("Image " + ": " + oldName + " > " + newName); // LOG.debug("master = " + master); } } } if (reportContent.getTemplateFile() != null) { String oldName = reportContent.getTemplateFile().getName(); int startIndex = oldName.indexOf(ro.nextreports.server.report.util.ReportUtil.IMAGE_DELIM); int extIndex = oldName.lastIndexOf(ro.nextreports.server.report.util.ReportUtil.EXTENSION_SEPARATOR); String newName; if (startIndex < 0) { newName = oldName; } else { newName = oldName.substring(0, startIndex) + oldName.substring(extIndex); } masterContent = masterContent.replaceAll(oldName, newName); reportContent.getTemplateFile().setName(newName); if (LOG.isDebugEnabled()) { LOG.debug("Template " + ": " + oldName + " > " + newName); // LOG.debug("master = " + master); } } masterFile.setDataProvider(new JcrDataProviderImpl(masterContent.getBytes("UTF-8"))); } catch (UnsupportedEncodingException e) { LOG.error("Error inside renameImagesAsUnique: " + e.getMessage(), e); e.printStackTrace(); } return report; } public static QueryRuntime createQueryRuntime(StorageService storageService, ro.nextreports.server.domain.Report report) { QueryRuntime queryRuntime = new QueryRuntime(); Connection connection = null; try { connection = ConnectionUtil.createConnection(storageService, report.getDataSource()); Map<String, Object> map = new HashMap<String, Object>(); ParameterUtil.initNotHiddenDefaultParameterValues(connection, NextUtil.getNextReport(storageService.getSettings(), report), map); queryRuntime.setParametersValues(map); } catch (Exception e) { LOG.error(e.getMessage(), e); e.printStackTrace(); } finally { ConnectionUtil.closeConnection(connection); } return queryRuntime; } public static boolean reportHasHeader(Report report) { return (report.getLayout().getHeaderBand().getRowCount() > 0); } public static String getDetailColumnPattern(Report report, int column) { BandElement be = report.getLayout().getDetailBand().getElementAt(0, column); if (be instanceof ColumnBandElement) { return ((ColumnBandElement)be).getPattern(); } return null; } }
bgerxx/woodpecker
integrationtest/vm/multihosts/migrate/test_migrate_vm_with_iso.py
<reponame>bgerxx/woodpecker<gh_stars>0 ''' New Integration test for testing vm migration between hosts when attch ISO. @author: ChenyuanXu ''' import zstackwoodpecker.test_util as test_util import zstackwoodpecker.test_lib as test_lib import zstackwoodpecker.operations.volume_operations as vol_ops import zstackwoodpecker.operations.resource_operations as res_ops import zstackwoodpecker.operations.image_operations as img_ops import zstackwoodpecker.zstack_test.zstack_test_image as test_image import zstackwoodpecker.test_state as test_state import apibinding.inventory as inventory import os vm = None test_stub = test_lib.lib_get_test_stub() test_obj_dict = test_state.TestStateDict() def test(): global vm vm = test_stub.create_vr_vm('migrate_vm', 'imageName_s', 'l3VlanNetwork2') vm.check() vm_inv = vm.get_vm() vm_uuid = vm_inv.uuid test_util.test_dsc('Add ISO Image') #cond = res_ops.gen_query_conditions('name', '=', 'sftp') bs_uuid = res_ops.query_resource(res_ops.BACKUP_STORAGE)[0].uuid img_option = test_util.ImageOption() img_option.set_name('iso') img_option.set_backup_storage_uuid_list([bs_uuid]) testIsoUrl = os.environ.get('testIsoUrl') img_option.set_url(testIsoUrl) image_inv = img_ops.add_iso_template(img_option) image = test_image.ZstackTestImage() image.set_image(image_inv) image.set_creation_option(img_option) test_obj_dict.add_image(image) test_util.test_dsc('Attach ISO to VM') cond = res_ops.gen_query_conditions('name', '=', 'iso') iso_uuid = res_ops.query_resource(res_ops.IMAGE, cond)[0].uuid img_ops.attach_iso(iso_uuid, vm_uuid) test_util.test_dsc('Migrate VM') test_stub.migrate_vm_to_random_host(vm) vm.check() img_ops.detach_iso(vm_uuid) image.delete() image.expunge() test_obj_dict.rm_image(image) vm.destroy() test_util.test_pass('Migrate VM Test Success When Attach ISO') #Will be called only if exception happens in test(). def error_cleanup(): test_lib.lib_error_cleanup(test_obj_dict)
mubashshirjamal/code
webpack.config.js
var path = require("path"); var webpack = require("webpack"); var ExtractTextPlugin = require("extract-text-webpack-plugin"); var nodePath = path.resolve(__dirname, "node_modules"); var distPath = path.resolve(__dirname, "dist/js/mod"); var jsPath = path.resolve(__dirname, "hub/static/jsx"); module.exports = { entry: { register : path.join(jsPath, 'register.jsx'), login : path.join(jsPath, 'login.jsx'), vendors : ['jquery'] }, output: { path: distPath, filename: "[name].js", sourceMapFilename: "[file].map", publicPath: distPath }, resolve: { extensions: ["", ".js", ".jsx", ".json", ".coffee", ".css", ".scss"] }, module: { loaders: [ { test: /\.jsx$/, exclude: /node_modules/, loader: "babel-loader", query: {stage: 0} }, { test: /\.html$/, loader: "file?name=[name].[ext]" }, { test: /\.(woff2|woff|ttf|eot)$/, loader: "file?name=fonts/[name].[ext]" }, { test: /\.scss$/, loader: ExtractTextPlugin.extract( 'css-loader?sourceMap!sass-loader?sourceMap=true&sourceMapContents=true' ) }, { test: /\.(png|jpg|svg|ico)$/, loader: "file-loader?name=[path][name].[ext]" } ] }, plugins: [ new ExtractTextPlugin("css/[name].css"), new webpack.optimize.CommonsChunkPlugin("vendors", "vendors.js", Infinity), new webpack.ProvidePlugin({ $: "jquery", jQuery: "jquery", "window.jQuery": "jquery", "root.jQuery": "jquery" }) ], devtool: "source-map" }
praisetompane/3_programming
1_languages/c/src/the-c-programming-language/0. tutorial/exercises/1.16 character-arrays-book.c
<gh_stars>0 /* Context: Have a program that determines the longest text line from a text stream Definitions: Objective: Revise the main routine of the longest-line program so it will correctly print the length of arbitrary long input lines and as much as possible of the text. Assumptions: Constraints: Algorithm flow: Example(s): input = state: */ #include <stdio.h> #define MAXLINE 1000 int getLine(char line[], int maxline); void copy(char to[], char from[]); int main() { int len; int max; char line[MAXLINE]; char longest[MAXLINE]; max = 0; while ((len = getLine(line, MAXLINE))) { if(len > max) { max = len; copy(longest, line); } } if(max > 0) printf("As much of the longest string: %s\n", longest); printf("Longest string length: %d\n", max); return 0; } int getLine(char s[], int lim) { int c, i; for (i = 0; i < lim - 1 && (c = getchar()) != EOF && c != '\n'; ++i) { s[i] = c; } if(c == '\n') { s[i] = c; ++i; } s[i] = '\0'; while((c = getchar()) != EOF && c != '\n') ++i; return i; } void copy(char to[], char from[]) { int i; i = 0; while((to[i] = from[i]) != '\0') ++i; } /* Performance N = Length of text stream Time = O(N) Need to inspect each character in character stream Space = O(1000) => O(1) Largest number of characters stored is 1000 */
redaktor/nlp
build/textAMD/main.js
<reponame>redaktor/nlp<filename>build/textAMD/main.js<gh_stars>1-10 define(["require", "exports", './text/nlp/index'], function (require, exports, nlp) { return nlp; });
Krozark/SFML-book
04_Physics/Gravitris/include/SFML-Book/Configuration.hpp
<filename>04_Physics/Gravitris/include/SFML-Book/Configuration.hpp #ifndef BOOK_CONFIGURATION_HPP #define BOOK_CONFIGURATION_HPP #include <SFML/Graphics.hpp> //Texture #include <SFML/Audio.hpp> //SoundBuffer #include <SFML-Book/ResourceManager.hpp> //ResourceManager #include <SFML-Book/ActionMap.hpp> //ActionMap //#define BOOK_DEBUG namespace book { class Player; class Configuration { public: Configuration() = delete; Configuration(const Configuration&) = delete; Configuration& operator=(const Configuration&) = delete; enum Fonts : int {Gui}; static ResourceManager<sf::Font,int> fonts; enum PlayerInputs : int { TurnLeft, TurnRight, MoveLeft, MoveRight, HardDrop, }; static ActionMap<int> playerInputs; enum Sounds : int { Spawn, Explosion, LevelUp, }; static ResourceManager<sf::SoundBuffer,int> sounds; enum Musics : int { Theme }; static ResourceManager<sf::Music,int> musics; static void initialize(); private: static void initTextures(); static void initFonts(); static void initSounds(); static void initMusics(); static void initPlayerInputs(); }; } #endif
jamhgit/pesc-transcript-jar
pesccoltrn/src/main/java/org/pesc/core/coremain/v1_14/AcademicProgramType.java
<gh_stars>1-10 // // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2016.02.25 at 04:54:23 PM PST // package org.pesc.core.coremain.v1_14; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for AcademicProgramType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="AcademicProgramType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;group ref="{urn:org:pesc:core:CoreMain:v1.14.0}AcademicProgramCodeGroup" minOccurs="0"/> * &lt;element name="AcademicProgramType" type="{urn:org:pesc:core:CoreMain:v1.14.0}AcademicProgramTypeType" minOccurs="0"/> * &lt;element name="AcademicProgramName" type="{urn:org:pesc:core:CoreMain:v1.14.0}AcademicProgramNameType" minOccurs="0"/> * &lt;element name="NoteMessage" type="{urn:org:pesc:core:CoreMain:v1.14.0}NoteMessageType" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "AcademicProgramType", propOrder = { "programSecondarySchoolCode", "programCIPCode", "programHEGISCode", "programCSISCode", "programUSISCode", "programESISCode", "programLocalCode", "academicProgramType", "academicProgramName", "noteMessage" }) public class AcademicProgramType { @XmlElement(name = "ProgramSecondarySchoolCode") @XmlSchemaType(name = "string") protected ProgramSecondarySchoolCodeType programSecondarySchoolCode; @XmlElement(name = "ProgramCIPCode") protected String programCIPCode; @XmlElement(name = "ProgramHEGISCode") protected String programHEGISCode; @XmlElement(name = "ProgramCSISCode") protected String programCSISCode; @XmlElement(name = "ProgramUSISCode") protected String programUSISCode; @XmlElement(name = "ProgramESISCode") protected String programESISCode; @XmlElement(name = "ProgramLocalCode") protected String programLocalCode; @XmlElement(name = "AcademicProgramType") @XmlSchemaType(name = "string") protected AcademicProgramTypeType academicProgramType; @XmlElement(name = "AcademicProgramName") protected String academicProgramName; @XmlElement(name = "NoteMessage") protected List<String> noteMessage; /** * Gets the value of the programSecondarySchoolCode property. * * @return * possible object is * {@link ProgramSecondarySchoolCodeType } * */ public ProgramSecondarySchoolCodeType getProgramSecondarySchoolCode() { return programSecondarySchoolCode; } /** * Sets the value of the programSecondarySchoolCode property. * * @param value * allowed object is * {@link ProgramSecondarySchoolCodeType } * */ public void setProgramSecondarySchoolCode(ProgramSecondarySchoolCodeType value) { this.programSecondarySchoolCode = value; } /** * Gets the value of the programCIPCode property. * * @return * possible object is * {@link String } * */ public String getProgramCIPCode() { return programCIPCode; } /** * Sets the value of the programCIPCode property. * * @param value * allowed object is * {@link String } * */ public void setProgramCIPCode(String value) { this.programCIPCode = value; } /** * Gets the value of the programHEGISCode property. * * @return * possible object is * {@link String } * */ public String getProgramHEGISCode() { return programHEGISCode; } /** * Sets the value of the programHEGISCode property. * * @param value * allowed object is * {@link String } * */ public void setProgramHEGISCode(String value) { this.programHEGISCode = value; } /** * Gets the value of the programCSISCode property. * * @return * possible object is * {@link String } * */ public String getProgramCSISCode() { return programCSISCode; } /** * Sets the value of the programCSISCode property. * * @param value * allowed object is * {@link String } * */ public void setProgramCSISCode(String value) { this.programCSISCode = value; } /** * Gets the value of the programUSISCode property. * * @return * possible object is * {@link String } * */ public String getProgramUSISCode() { return programUSISCode; } /** * Sets the value of the programUSISCode property. * * @param value * allowed object is * {@link String } * */ public void setProgramUSISCode(String value) { this.programUSISCode = value; } /** * Gets the value of the programESISCode property. * * @return * possible object is * {@link String } * */ public String getProgramESISCode() { return programESISCode; } /** * Sets the value of the programESISCode property. * * @param value * allowed object is * {@link String } * */ public void setProgramESISCode(String value) { this.programESISCode = value; } /** * Gets the value of the programLocalCode property. * * @return * possible object is * {@link String } * */ public String getProgramLocalCode() { return programLocalCode; } /** * Sets the value of the programLocalCode property. * * @param value * allowed object is * {@link String } * */ public void setProgramLocalCode(String value) { this.programLocalCode = value; } /** * Gets the value of the academicProgramType property. * * @return * possible object is * {@link AcademicProgramTypeType } * */ public AcademicProgramTypeType getAcademicProgramType() { return academicProgramType; } /** * Sets the value of the academicProgramType property. * * @param value * allowed object is * {@link AcademicProgramTypeType } * */ public void setAcademicProgramType(AcademicProgramTypeType value) { this.academicProgramType = value; } /** * Gets the value of the academicProgramName property. * * @return * possible object is * {@link String } * */ public String getAcademicProgramName() { return academicProgramName; } /** * Sets the value of the academicProgramName property. * * @param value * allowed object is * {@link String } * */ public void setAcademicProgramName(String value) { this.academicProgramName = value; } /** * Gets the value of the noteMessage property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the noteMessage property. * * <p> * For example, to add a new item, do as follows: * <pre> * getNoteMessage().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getNoteMessage() { if (noteMessage == null) { noteMessage = new ArrayList<String>(); } return this.noteMessage; } }
chrismattmann/oodt
workflow/src/main/java/org/apache/oodt/cas/workflow/util/ScriptFile.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oodt.cas.workflow.util; //JDK imports import java.util.List; import java.util.Vector; import java.util.Iterator; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.io.FileOutputStream; import java.io.OutputStreamWriter; /** * @author mattmann * @version $Revision$ * * <p> * A script file represents a set of commands (which are just * <code>String</code>s) to be run through a command shell. * </p> * */ public class ScriptFile { private String commandShell = null; private List commands = null; /** * */ public ScriptFile() { commandShell = "/bin/sh"; commands = new Vector(); } public ScriptFile(String shell) { commandShell = shell; commands = new Vector(); } public ScriptFile(String shell, List cmds) { commandShell = shell; commands = cmds; } /** * @return Returns the commands. */ public List getCommands() { return commands; } /** * @param commands * The commands to set. */ public void setCommands(List commands) { this.commands = commands; } /** * @return Returns the commandShell. */ public String getCommandShell() { return commandShell; } /** * @param commandShell * The commandShell to set. */ public void setCommandShell(String commandShell) { this.commandShell = commandShell; } public String toString() { String rStr = ""; rStr += "#!" + commandShell + "\n"; for (Iterator i = commands.iterator(); i.hasNext();) { String cmd = (String) i.next(); rStr += cmd + "\n"; } return rStr; } public void writeScriptFile(String filePath) throws Exception { PrintWriter pw = null; try { pw = new PrintWriter(new OutputStreamWriter(new FileOutputStream( new File(filePath)))); pw.println(toString()); } catch (IOException e) { e.printStackTrace(); throw new Exception("Error writing script file!: " + e.getMessage()); } finally { try { pw.close(); pw = null; } catch (Exception ignore) { } } } }
tormath1/jclouds
providers/google-compute-engine/src/main/java/org/jclouds/googlecomputeengine/domain/Quota.java
<reponame>tormath1/jclouds<filename>providers/google-compute-engine/src/main/java/org/jclouds/googlecomputeengine/domain/Quota.java /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.googlecomputeengine.domain; import org.jclouds.javax.annotation.Nullable; import org.jclouds.json.SerializedNames; import com.google.auto.value.AutoValue; /** Quotas assigned to a given project or region. */ @AutoValue public abstract class Quota { @Nullable public abstract String metric(); // Nullable?! really?! public abstract double usage(); public abstract double limit(); @SerializedNames({ "metric", "usage", "limit" }) public static Quota create(String metric, double usage, double limit) { return new AutoValue_Quota(metric, usage, limit); } Quota() { } }
MiroslavGannoha/fantasy-exchange
src/app/components/panel/examples/LoginForm.js
<gh_stars>0 import * as React from 'react'; import PropTypes from 'prop-types' import EPanel from '../index' const propTypes = { className: PropTypes.string } class LoginForm extends React.Component { render() { const Title = () => ( <div className="h6 text-center w-100 mb-2">Sign In / Login</div> ) return ( <EPanel titleNode={<Title/>} className={this.props.className}> <form> <div className="form-group"> <input type="email" className="form-control" placeholder="Email"/> </div> <div className="form-group"> <input type="text" className="form-control" placeholder="Password"/> </div> <div className="form-group d-flex justify-content-between align-items-center"> <a href="" className="text-muted btn btn-link" onClick={(e) => e.preventDefault()}>Need Help?</a> <button type="submit" className="btn btn-primary">Submit</button> </div> <div className="form-group text-center m-0"> <div className="btn-group"> <button type="button" className="btn btn-outline-secondary"> <i className="fa fa-fw fa-twitter"></i> </button> <button type="button" className="btn btn-outline-secondary"> <i className="fa fa-fw fa-facebook"></i> </button> <button type="button" className="btn btn-outline-secondary"> <i className="fa fa-fw fa-github"></i> </button> </div> </div> </form> </EPanel> ) } } LoginForm.propTypes = propTypes export default LoginForm
azmigproject/OpenAs2App
Server/src/main/java/org/openas2/processor/ProcessorException.java
<reponame>azmigproject/OpenAs2App package org.openas2.processor; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.openas2.OpenAS2Exception; public class ProcessorException extends OpenAS2Exception { /** * */ private static final long serialVersionUID = 1L; private Processor processor; private List<Exception> causes; public ProcessorException(Processor processor) { super(); this.processor = processor; } public List<Exception> getCauses() { if (causes == null) { causes = new ArrayList<Exception>(); } return causes; } public Processor getProcessor() { return processor; } public void setCauses(List<Exception> list) { causes = list; } public void setProcessor(Processor processor) { this.processor = processor; } public String getMessage() { StringWriter strWriter = new StringWriter(); PrintWriter writer = new PrintWriter(strWriter); writer.print(super.getMessage()); Iterator<?> causesIt = getCauses().iterator(); while (causesIt.hasNext()) { Exception e = (Exception) causesIt.next(); writer.println(); e.printStackTrace(writer); } writer.flush(); return strWriter.toString(); } }
StudyForCoding/BEAKJOON
16_DynamicProgramming1/Step13/gamjapark.py
import sys N = int(sys.stdin.readline()) arr1=[] for i in range(N): arr1.append(list(map(int, sys.stdin.readline().split()))) result = [1 for _ in range(N)] arr1.sort(key = lambda x: x[0]) for i in range(N): for j in range(i): if arr1[i][1] > arr1[j][1]: result[i] = max(result[i], result[j] + 1) print(N - max(result))
czosel/caluma
caluma/workflow/migrations/0005_auto_20181228_1243.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2018-12-28 12:43 from __future__ import unicode_literals import django.contrib.postgres.fields import django.contrib.postgres.fields.jsonb import django.contrib.postgres.indexes from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [("workflow", "0004_auto_20181218_1352")] operations = [ migrations.AddField( model_name="task", name="address_groups", field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name="workitem", name="addressed_groups", field=django.contrib.postgres.fields.ArrayField( base_field=models.CharField(max_length=150), default=list, help_text="Offer work item to be processed by a group of users, such are not committed to process it though.", size=None, ), ), migrations.AddField( model_name="workitem", name="assigned_users", field=django.contrib.postgres.fields.ArrayField( base_field=models.CharField(max_length=150), default=list, help_text="Users responsible to undertake given work item.", size=None, ), ), migrations.AlterField( model_name="case", name="created_by_group", field=models.CharField( blank=True, db_index=True, max_length=150, null=True ), ), migrations.AlterField( model_name="case", name="meta", field=django.contrib.postgres.fields.jsonb.JSONField(default=dict), ), migrations.AlterField( model_name="flow", name="created_by_group", field=models.CharField( blank=True, db_index=True, max_length=150, null=True ), ), migrations.AlterField( model_name="task", name="created_by_group", field=models.CharField( blank=True, db_index=True, max_length=150, null=True ), ), migrations.AlterField( model_name="task", name="meta", field=django.contrib.postgres.fields.jsonb.JSONField(default=dict), ), migrations.AlterField( model_name="taskflow", name="created_by_group", field=models.CharField( blank=True, db_index=True, max_length=150, null=True ), ), migrations.AlterField( model_name="workflow", name="created_by_group", field=models.CharField( blank=True, db_index=True, max_length=150, null=True ), ), migrations.AlterField( model_name="workflow", name="meta", field=django.contrib.postgres.fields.jsonb.JSONField(default=dict), ), migrations.AlterField( model_name="workitem", name="child_case", field=models.OneToOneField( blank=True, help_text="Defines case of a sub-workflow", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="parent_work_item", to="workflow.Case", ), ), migrations.AlterField( model_name="workitem", name="created_by_group", field=models.CharField( blank=True, db_index=True, max_length=150, null=True ), ), migrations.AlterField( model_name="workitem", name="meta", field=django.contrib.postgres.fields.jsonb.JSONField(default=dict), ), migrations.AddIndex( model_name="workitem", index=django.contrib.postgres.indexes.GinIndex( fields=["addressed_groups"], name="workflow_wo_address_679262_gin" ), ), migrations.AddIndex( model_name="workitem", index=django.contrib.postgres.indexes.GinIndex( fields=["assigned_users"], name="workflow_wo_assigne_76d859_gin" ), ), ]
vincealdrin/Tutu
server/db.js
<reponame>vincealdrin/Tutu const r = require('rethinkdb'); const bcrypt = require('bcrypt-nodejs'); const { DB_NAME, DB_HOST, DB_PORT, SUPERADMIN_USERNAME, SUPERADMIN_PASS, SUPERADMIN_ROLE, SUPERADMIN_NAME, } = process.env; module.exports = async (cb) => { try { const conn = await r.connect({ db: DB_NAME, host: DB_HOST, port: DB_PORT, }); const tables = [ 'sources', 'articles', 'users', 'pendingSources', 'locations', 'provinces', 'crawlerLogs', 'usersFeed', 'visitors', 'errorArticles', 'pendingSourceVotes', 'sourceRevotes', ]; r.dbCreate(DB_NAME).run(conn, async (err) => { if (err) console.log(`${DB_NAME} database has already been created`); console.log(`${DB_NAME} database created`); tables.forEach(async (name) => { try { await r.db(DB_NAME).tableCreate(name).run(conn); console.log(`${name} table created`); } catch (e) { console.log(`${name} table has already been created`); } }); bcrypt.hash(SUPERADMIN_PASS, null, null, async (hashErr, hashedPassword) => { if (hashErr) console.log(hashErr); try { await r.table('users').wait().run(conn); await r.table('users').insert({ id: await r.uuid(SUPERADMIN_USERNAME).run(conn), password: <PASSWORD>, username: SUPERADMIN_USERNAME, name: SUPERADMIN_NAME, role: SUPERADMIN_ROLE, }).run(conn); } catch (e) { console.log(e); } }); try { await r.table('articles').wait(); await r.table('articles').indexCreate( 'positions', r.row('locations')('location')('position'), { geo: true, multi: true } ).run(conn); console.log('positions index created on articles table'); } catch (e) { console.log('positions index already exists on articles table'); } try { await r.table('articles').wait(); await r.table('articles').indexCreate('locationPublishDate', (article) => [ article('locations')('location')('position'), article('publishDate'), ], { geo: true, multi: true }).run(conn); console.log('locationPublishDate index created on articles table'); } catch (e) { console.log('locationPublishDate index already exists on articles table'); } try { await r.table('articles').wait(); await r.table('articles').indexCreate('popularityScore', r.row('popularity')('totalScore')).run(conn); console.log('popularityScore index created on articles table'); } catch (e) { console.log('popularityScore index already exists on articles table'); } try { await r.table('articles').wait(); await r.table('articles').indexCreate('timestamp', r.row('timestamp')).run(conn); console.log('timestamp index created on articles table'); } catch (e) { console.log('timestamp index already exists on articles table'); } try { await r.table('articles').wait(); await r.table('articles').indexCreate('publishDate', r.row('publishDate')).run(conn); console.log('publishDate index created on articles table'); } catch (e) { console.log('publishDate index already exists on articles table'); } try { await r.table('crawlerLogs').wait(); await r.table('crawlerLogs') .indexCreate('status', (article) => article('timestamp').date()) .run(conn); console.log('status index created on crawlerLogs table'); } catch (e) { console.log('status index already exists on crawlerLogs table'); } // try { // await r.table('pendingSourceVotes').wait(); // await r.table('pendingSourceVotes') // .indexCreate('vote', [r.row('pendingSourceId'), r.row('userId')]) // .run(conn); // console.log('vote index created on pendingSourceVotes table'); // } catch (e) { // console.log('vote index already exists on pendingSourceVotes table'); // } cb(conn); }); } catch (e) { console.log(e); } };
Drakandes/Portfolio_NicolasPaulBonneau
Code en C++ de Mage War Online/IncludeAllTalents.h
<gh_stars>0 #pragma once #include "TalentAfterEffect.h" #include "TalentBeastly.h" #include "TalentBoosted.h" #include "TalentDenseShield.h" #include "TalentDisablingAttack.h" #include "TalentEnlightened.h" #include "TalentEnraged.h" #include "TalentBlockade.h" #include "TalentHappyMeal.h" #include "TalentHardyMan.h" #include "TalentHasty.h" #include "TalentHealthCare.h" #include "TalentImmuneSystem.h" #include "TalentIronTip.h" #include "TalentNaturalBornKiller.h" #include "TalentOvercomeTheBarriers.h" #include "TalentPeaceLoving.h" #include "TalentQuickAttack.h" #include "TalentRampage.h" #include "TalentReinforcedShield.h" #include "TalentRestless.h" #include "TalentSharpShooter.h" #include "TalentStoneMan.h" #include "TalentTranquilizerAttack.h" #include "TalentSwifty.h" #include "TalentUnleashedPower.h" #include "TalentUnstoppable.h"
kodePhile/unify-framework
unify-core/src/main/java/com/tcdng/unify/core/database/sql/AbstractSqlDataSource.java
/* * Copyright 2018-2020 The Code Department. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.tcdng.unify.core.database.sql; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.tcdng.unify.core.UnifyCoreErrorConstants; import com.tcdng.unify.core.UnifyException; import com.tcdng.unify.core.annotation.Configurable; import com.tcdng.unify.core.data.AbstractPool; import com.tcdng.unify.core.database.AbstractDataSource; import com.tcdng.unify.core.database.NativeQuery; import com.tcdng.unify.core.security.Authentication; import com.tcdng.unify.core.util.SqlUtils; import com.tcdng.unify.core.util.StringUtils; /** * Abstract SQL data source. * * @author <NAME> * @since 1.0 */ public abstract class AbstractSqlDataSource extends AbstractDataSource implements SqlDataSource { @Configurable private String driver; @Configurable private String connectionUrl; @Configurable(resolve = false) private Authentication passwordAuthentication; @Configurable private String appSchema; @Configurable private String username; @Configurable(hidden = true) private String password; @Configurable("2000") private long getConnectionTimeout; @Configurable("32") private int maxConnections; @Configurable("1") private int minConnections; @Configurable("false") private boolean shutdownOnTerminate; private SqlConnectionPool sqlConnectionPool; @Override public String getAppSchema() throws UnifyException { if(StringUtils.isBlank(appSchema)) { return getDialect().getDefaultSchema(); } return appSchema; } @Override public List<String> getSchemaList() throws UnifyException { Connection connection = getConnection(); ResultSet rs = null; try { List<String> schemaList = new ArrayList<String>(); rs = connection.getMetaData().getSchemas(); while (rs.next()) { schemaList.add(rs.getString("TABLE_SCHEM")); } return schemaList; } catch (SQLException e) { throwOperationErrorException(e); } finally { SqlUtils.close(rs); restoreConnection(connection); } return Collections.emptyList(); } @Override public Map<String, SqlTableInfo> getTableMap(String schemaName, SqlTableType sqlTableType) throws UnifyException { if (schemaName != null) { Map<String, SqlTableInfo> map = new LinkedHashMap<String, SqlTableInfo>(); for (SqlTableInfo sqlTableInfo : getTableList(schemaName, sqlTableType)) { map.put(sqlTableInfo.getTableName(), sqlTableInfo); } return map; } return Collections.emptyMap(); } @Override public List<SqlTableInfo> getTableList(String schemaName, SqlTableType sqlTableType) throws UnifyException { if (schemaName != null) { Connection connection = getConnection(); ResultSet rs = null; try { List<SqlTableInfo> tableInfoList = new ArrayList<SqlTableInfo>(); String[] tableType = new String[] { SqlTableType.TABLE.code(), SqlTableType.VIEW.code() }; if (sqlTableType != null) { tableType = new String[] { sqlTableType.code() }; } rs = connection.getMetaData().getTables(null, schemaName, null, tableType); while (rs.next()) { String type = rs.getString("TABLE_TYPE"); String tableName = rs.getString("TABLE_NAME"); tableInfoList.add(new SqlTableInfo(SqlTableType.fromCode(type), tableName)); } return tableInfoList; } catch (SQLException e) { throwOperationErrorException(e); } finally { SqlUtils.close(rs); restoreConnection(connection); } } return Collections.emptyList(); } @Override public List<SqlColumnInfo> getColumnList(String schemaName, String tableName) throws UnifyException { if (schemaName != null && tableName != null) { Connection connection = getConnection(); ResultSet rs = null; try { List<SqlColumnInfo> columnInfoList = new ArrayList<SqlColumnInfo>(); rs = connection.getMetaData().getColumns(null, schemaName, tableName, null); while (rs.next()) { String columnName = rs.getString("COLUMN_NAME"); int sqlType = rs.getInt("DATA_TYPE"); if (SqlUtils.isSupportedSqlType(sqlType)) { Class<?> type = SqlUtils.getJavaType(sqlType); String defaultVal = rs.getString("COLUMN_DEF"); if (defaultVal != null) { defaultVal = getDialect().normalizeDefault(defaultVal.trim()); } String decimalDigitsStr = rs.getString("DECIMAL_DIGITS"); int decimalDigits = decimalDigitsStr == null ? 0 : Integer.valueOf(decimalDigitsStr); columnInfoList.add(new SqlColumnInfo(type, rs.getString("TYPE_NAME"), columnName, defaultVal, sqlType, rs.getInt("COLUMN_SIZE"), decimalDigits, "YES".equals(rs.getString("IS_NULLABLE")))); } else { logDebug( "Column [{0}] of type [{1}] skipped when obtaining column list for table [{2}] in schema [{3}].", columnName, sqlType, tableName, schemaName); } } return columnInfoList; } catch (SQLException e) { throwOperationErrorException(e); } finally { SqlUtils.close(rs); restoreConnection(connection); } } return Collections.emptyList(); } @Override public Map<String, SqlColumnInfo> getColumnMap(String schemaName, String tableName) throws UnifyException { List<SqlColumnInfo> list = getColumnList(schemaName, tableName); if (!list.isEmpty()) { Map<String, SqlColumnInfo> map = new LinkedHashMap<String, SqlColumnInfo>(); for (SqlColumnInfo sqlColumnInfo : list) { map.put(sqlColumnInfo.getColumnName(), sqlColumnInfo); } return map; } return Collections.emptyMap(); } @Override public Set<String> getColumns(String schemaName, String tableName) throws UnifyException { if (schemaName != null && tableName != null) { Connection connection = getConnection(); ResultSet rs = null; try { Set<String> columnNames = new LinkedHashSet<String>(); rs = connection.getMetaData().getColumns(null, schemaName, tableName, null); if (getDialect().isAllObjectsInLowerCase()) { while (rs.next()) { columnNames.add(rs.getString("COLUMN_NAME").toLowerCase()); } } else { while (rs.next()) { columnNames.add(rs.getString("COLUMN_NAME").toUpperCase()); } } return columnNames; } catch (SQLException e) { throwOperationErrorException(e); } finally { SqlUtils.close(rs); restoreConnection(connection); } } return Collections.emptySet(); } @Override public int testNativeQuery(NativeQuery query) throws UnifyException { String nativeSql = getDialect().generateNativeQuery(query); return testNativeQuery(nativeSql); } @Override public int testNativeQuery(String nativeSql) throws UnifyException { logDebug("Testing native query [{0}]...", nativeSql); Connection connection = getConnection(); PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = connection.prepareStatement(nativeSql); rs = pstmt.executeQuery(); while (rs.next()) ; return rs.getRow(); } catch (SQLException e) { throwOperationErrorException(e); } finally { SqlUtils.close(rs); SqlUtils.close(pstmt); restoreConnection(connection); } logDebug("Native query [{0}] successfully tested.", nativeSql); return 0; } @Override public int testNativeUpdate(String nativeSql) throws UnifyException { logDebug("Testing upteda query [{0}]...", nativeSql); Connection connection = getConnection(); PreparedStatement pstmt = null; try { pstmt = connection.prepareStatement(nativeSql); return pstmt.executeUpdate(); } catch (SQLException e) { throwOperationErrorException(e); } finally { SqlUtils.close(pstmt); restoreConnection(connection); } logDebug("Native update [{0}] successfully tested.", nativeSql); return 0; } @Override public List<Object[]> getRows(NativeQuery query) throws UnifyException { String nativeSql = getDialect().generateNativeQuery(query); List<Object[]> resultList = new ArrayList<Object[]>(); Connection connection = getConnection(); PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = connection.prepareStatement(nativeSql); rs = pstmt.executeQuery(); int columns = query.columns(); while (rs.next()) { Object[] item = new Object[columns]; for (int i = 0; i < columns; i++) { item[i] = rs.getObject(i + 1); } resultList.add(item); } } catch (SQLException e) { throwOperationErrorException(e); } finally { SqlUtils.close(rs); SqlUtils.close(pstmt); restoreConnection(connection); } return resultList; } @Override public SqlDataSourceDialect getDialect() throws UnifyException { return (SqlDataSourceDialect) super.getDialect(); } @Override public boolean testConnection() throws UnifyException { sqlConnectionPool.returnObject(sqlConnectionPool.borrowObject()); return true; } @Override public Connection getConnection() throws UnifyException { return sqlConnectionPool.borrowObject(); } @Override public boolean restoreConnection(Connection connection) throws UnifyException { return sqlConnectionPool.returnObject(connection); } @Override public boolean restoreConnection(Object connection) throws UnifyException { return sqlConnectionPool.returnObject((Connection) connection); } @Override public int getAvailableConnections() throws UnifyException { return sqlConnectionPool.available(); } @Override protected void onInitialize() throws UnifyException { logInfo("Initializing datasource [{0}]...", getName()); super.onInitialize(); if (driver != null) { doInitConnectionPool(); } logInfo("Initialization of datasource [{0}] completed.", getName()); } @Override protected void onTerminate() throws UnifyException { logInfo("Terminating datasource [{0}]...", getName()); if (shutdownOnTerminate) { SqlShutdownHook sqlShutdownHook = getDialect().getShutdownHook(); if (sqlShutdownHook != null) { Connection connection = getConnection(); try { sqlShutdownHook.commandShutdown(connection); } finally { restoreConnection(connection); } } } if (sqlConnectionPool != null) { sqlConnectionPool.terminate(); } super.onTerminate(); logInfo("Datasource [{0}] terminated.", getName()); } protected void doInitConnectionPool() throws UnifyException { try { logInfo("Setting up connection pool for [{0}]...", getName()); Class.forName(driver); sqlConnectionPool = createSqlConnectionPool(); sqlConnectionPool.initialize(); } catch (ClassNotFoundException e) { throw new UnifyException(UnifyCoreErrorConstants.DATASOURCE_MISSING_DRIVER, getName(), driver); } } protected SqlConnectionPool getSqlConnectionPool() { return sqlConnectionPool; } protected void setDriver(String driver) { this.driver = driver; } protected void setConnectionUrl(String connectionUrl) { this.connectionUrl = connectionUrl; } protected void setUsername(String username) { this.username = username; } protected void setPassword(String password) { this.password = password; } protected void setGetConnectionTimeout(long getConnectionTimeout) { this.getConnectionTimeout = getConnectionTimeout; } protected void setMaxConnections(int maxConnections) { this.maxConnections = maxConnections; } protected void setMinConnections(int minConnections) { this.minConnections = minConnections; } protected String getDriver() { return driver; } protected String getConnectionUrl() { return connectionUrl; } protected Authentication getPasswordAuthentication() { return passwordAuthentication; } protected void setAppSchema(String appSchema) { this.appSchema = appSchema; } protected String getUsername() { return username; } protected String getPassword() { return password; } protected long getGetConnectionTimeout() { return getConnectionTimeout; } protected int getMaxConnections() { return maxConnections; } protected int getMinConnections() { return minConnections; } protected boolean isShutdownOnTerminate() { return shutdownOnTerminate; } protected void setShutdownOnTerminate(boolean shutdownOnTerminate) { this.shutdownOnTerminate = shutdownOnTerminate; } private SqlConnectionPool createSqlConnectionPool() throws UnifyException { String xUsername = null; String xPassword = <PASSWORD>; if (!StringUtils.isBlank(username)) { xUsername = username; xPassword = password; } else if (passwordAuthentication != null) { xUsername = passwordAuthentication.getUsername(); xPassword = passwordAuthentication.getPassword(); } return new SqlConnectionPool(connectionUrl, xUsername, xPassword, getConnectionTimeout, minConnections, maxConnections); } protected class SqlConnectionPool extends AbstractPool<Connection> { private String connectionURL; private String username; private String password; private String testSql; public SqlConnectionPool(String connectionURL, String username, String password, long getTimeout, int minObjects, int maxObjects) { super(getTimeout, minObjects, maxObjects, true); this.connectionURL = connectionURL; this.username = username; this.password = password; } @Override public void initialize() throws UnifyException { testSql = getDialect().generateTestSql(); super.initialize(); } @Override protected Connection createObject(Object... params) throws Exception { Connection connection = null; if (username != null) { connection = DriverManager.getConnection(connectionURL, username, password); } else { connection = DriverManager.getConnection(connectionURL); } connection.setAutoCommit(false); return connection; } @Override protected void onGetObject(Connection connection, Object... params) throws Exception { if (connection.isClosed()) { throw new UnifyException(UnifyCoreErrorConstants.DATASOURCE_BAD_CONNECTION, getName()); } // Full connection test PreparedStatement pStmt = null; ResultSet rs = null; try { pStmt = connection.prepareStatement(testSql); rs = pStmt.executeQuery(); } finally { SqlUtils.close(rs); SqlUtils.close(pStmt); } } @Override protected void destroyObject(Connection connection) { try { connection.rollback(); logDebug("Destroyed connection..."); } catch (Exception e) { } finally { SqlUtils.close(connection); } } } }
jon-wehner/MyPantry
react-app/src/components/Inventory/InventoryItem.js
<filename>react-app/src/components/Inventory/InventoryItem.js import React, { useState } from 'react'; import PropTypes from 'prop-types'; import { Modal } from '../../context/Modal'; import EditInventoryItem from './forms/EditInventoryItem'; import { getExpirationString, getQuantityString } from '../../services/utils'; export default function InventoryItem({ row }) { const [showModal, setShowModal] = useState(false); return ( <> <tr className="pantryListItem" onClick={() => setShowModal(true)}> <td> ( {row.quantity} ) {getQuantityString(row.quantity, row.measurement.unit)} </td> <td> {row.item.name} </td> <td> {row.expirationDate ? getExpirationString(row.expirationDate) : null} </td> </tr> {showModal && ( <Modal onClose={() => setShowModal(false)}> <EditInventoryItem row={row} setShowModal={setShowModal} /> </Modal> )} </> ); } InventoryItem.propTypes = { row: PropTypes.shape({ id: PropTypes.number.isRequired, quantity: PropTypes.number.isRequired, expirationDate: PropTypes.string, measurement: PropTypes.shape({ id: PropTypes.number.isRequired, unit: PropTypes.string.isRequired, }), item: PropTypes.shape({ id: PropTypes.number.isRequired, name: PropTypes.string.isRequired, fridge: PropTypes.bool.isRequired, categoryId: PropTypes.number.isRequired, }).isRequired, }).isRequired, };
Soyandroid/bemanitools
src/main/aciodrv/kfca.c
<filename>src/main/aciodrv/kfca.c #define LOG_MODULE "aciodrv-kfca" #include <stdio.h> #include <string.h> #include "aciodrv/device.h" #include "util/log.h" static bool aciodrv_kfca_watchdog_start( struct aciodrv_device_ctx *device, uint8_t node_id) { struct ac_io_message msg; log_assert(device); msg.addr = node_id + 1; msg.cmd.code = ac_io_u16(AC_IO_CMD_KFCA_WATCHDOG); msg.cmd.nbytes = 2; // uint16_t: 6000 msg.cmd.raw[0] = 23; msg.cmd.raw[1] = 112; if (!aciodrv_send_and_recv( device, &msg, offsetof(struct ac_io_message, cmd.raw) + 2)) { log_warning("Starting watchdog failed"); return false; } log_warning("Started watchdog of node %d, sz: %d, status: %d", node_id, msg.cmd.nbytes, msg.cmd.status); return true; } bool aciodrv_kfca_amp( struct aciodrv_device_ctx *device, uint8_t node_id, uint8_t primary, uint8_t headphone, uint8_t unused, uint8_t subwoofer) { struct ac_io_message msg; log_assert(device); msg.addr = node_id + 1; msg.cmd.code = ac_io_u16(AC_IO_CMD_KFCA_AMP_CONTROL); msg.cmd.nbytes = 4; msg.cmd.raw[0] = primary; msg.cmd.raw[1] = headphone; msg.cmd.raw[2] = unused; msg.cmd.raw[3] = subwoofer; if (!aciodrv_send_and_recv( device, &msg, offsetof(struct ac_io_message, cmd.raw) + 1)) { log_warning("Setting AMP failed"); return false; } log_warning("Started AMP node %d", node_id); return true; } bool aciodrv_kfca_init( struct aciodrv_device_ctx *device, uint8_t node_id) { log_assert(device); if (!aciodrv_kfca_watchdog_start(device, node_id)) { return false; } if (!aciodrv_kfca_amp(device, node_id, 0, 0, 0, 0)) { return false; } return true; } bool aciodrv_kfca_poll( struct aciodrv_device_ctx *device, uint8_t node_id, const struct ac_io_kfca_poll_out *pout, struct ac_io_kfca_poll_in *pin) { struct ac_io_message msg; log_assert(device); msg.addr = node_id + 1; msg.cmd.code = ac_io_u16(AC_IO_CMD_KFCA_POLL); msg.cmd.nbytes = sizeof(*pout); /* buffer size of data we expect */ msg.cmd.kfca_poll_out = *pout; if (!aciodrv_send_and_recv( device, &msg, offsetof(struct ac_io_message, cmd.raw) + sizeof(*pin))) { log_warning("Polling of node %d failed", node_id + 1); return false; } if (pin != NULL) { memcpy(pin, &msg.cmd.kfca_poll_in, sizeof(*pin)); } return true; }
soezen/sur-snapps-sentoff
sentoff-rest/src/main/java/sur/snapps/sentoff/domain/table/PurchasesTable.java
<filename>sentoff-rest/src/main/java/sur/snapps/sentoff/domain/table/PurchasesTable.java package sur.snapps.sentoff.domain.table; import sur.snapps.sentoff.domain.Spending; import sur.snapps.sentoff.domain.repo.SpendingRowMapper; import sur.snapps.sentoff.domain.repo.Table; import java.util.HashMap; import java.util.Map; import org.springframework.jdbc.core.RowMapper; /** * @author sur * @since 01/04/2016 */ public class PurchasesTable implements Table<Spending> { @Override public RowMapper<Spending> getRowMapper() { return new SpendingRowMapper(); } public Map<String, Object> getInsertValues(Spending spending) { Map<String, Object> values = new HashMap<>(); values.put("date", spending.getDate()); values.put("amount", spending.getAmount()); values.put("store_location_id", spending.getStoreLocationId()); return values; } @Override public String getTableName() { return "PURCHASES"; } @Override public Map<String, Object> getUpdateValues(Spending row) { // TODO Auto-generated method stub return null; } }
Mu-L/kotlin
compiler/testData/asJava/lightClasses/publicField/Simple.java
public final class C /* C*/ { @kotlin.jvm.JvmField() @org.jetbrains.annotations.NotNull() public final java.lang.String foo; public C();// .ctor() }
DIXTRA/slack-bookshelf
src/helpers/topics.helper.js
const { Topic } = require('../models'); async function topicExists(name, TeamId) { const count = await Topic.count({ where: { name, TeamId } }); return count != 0; } module.exports = { topicExists };
JulienPradet/pigment
packages/pigment-demo/src/client/pages/index.js
<filename>packages/pigment-demo/src/client/pages/index.js import React from "react"; import DefaultLayout from "./_layout"; const Home = () => { return <div>Welcome to the first page of your Pigment.js application !</div>; }; Home.layout = DefaultLayout; export default Home;
diogocs1/comps
web/addons/website_blog/models/website_blog.py
<reponame>diogocs1/comps<gh_stars>0 # -*- coding: utf-8 -*- from datetime import datetime import difflib import lxml import random from openerp import tools from openerp import SUPERUSER_ID from openerp.osv import osv, fields from openerp.tools.translate import _ class Blog(osv.Model): _name = 'blog.blog' _description = 'Blogs' _inherit = ['mail.thread', 'website.seo.metadata'] _order = 'name' _columns = { 'name': fields.char('Blog Name', required=True), 'subtitle': fields.char('Blog Subtitle'), 'description': fields.text('Description'), } class BlogTag(osv.Model): _name = 'blog.tag' _description = 'Blog Tag' _inherit = ['website.seo.metadata'] _order = 'name' _columns = { 'name': fields.char('Name', required=True), } class BlogPost(osv.Model): _name = "blog.post" _description = "Blog Post" _inherit = ['mail.thread', 'website.seo.metadata'] _order = 'id DESC' def _compute_ranking(self, cr, uid, ids, name, arg, context=None): res = {} for blog_post in self.browse(cr, uid, ids, context=context): age = datetime.now() - datetime.strptime(blog_post.create_date, tools.DEFAULT_SERVER_DATETIME_FORMAT) res[blog_post.id] = blog_post.visits * (0.5+random.random()) / max(3, age.days) return res _columns = { 'name': fields.char('Title', required=True, translate=True), 'subtitle': fields.char('Sub Title', translate=True), 'author_id': fields.many2one('res.partner', 'Author'), 'background_image': fields.binary('Background Image', oldname='content_image'), 'blog_id': fields.many2one( 'blog.blog', 'Blog', required=True, ondelete='cascade', ), 'tag_ids': fields.many2many( 'blog.tag', string='Tags', ), 'content': fields.html('Content', translate=True, sanitize=False), # website control 'website_published': fields.boolean( 'Publish', help="Publish on the website", copy=False, ), 'website_message_ids': fields.one2many( 'mail.message', 'res_id', domain=lambda self: [ '&', '&', ('model', '=', self._name), ('type', '=', 'comment'), ('path', '=', False) ], string='Website Messages', help="Website communication history", ), 'history_ids': fields.one2many( 'blog.post.history', 'post_id', 'History', help='Last post modifications', ), # creation / update stuff 'create_date': fields.datetime( 'Created on', select=True, readonly=True, ), 'create_uid': fields.many2one( 'res.users', 'Author', select=True, readonly=True, ), 'write_date': fields.datetime( 'Last Modified on', select=True, readonly=True, ), 'write_uid': fields.many2one( 'res.users', '<NAME>', select=True, readonly=True, ), 'visits': fields.integer('No of Views'), 'ranking': fields.function(_compute_ranking, string='Ranking', type='float'), } _defaults = { 'name': _('Blog Post Title'), 'subtitle': _('Subtitle'), 'author_id': lambda self, cr, uid, ctx=None: self.pool['res.users'].browse(cr, uid, uid, context=ctx).partner_id.id, } def html_tag_nodes(self, html, attribute=None, tags=None, context=None): """ Processing of html content to tag paragraphs and set them an unique ID. :return result: (html, mappin), where html is the updated html with ID and mapping is a list of (old_ID, new_ID), where old_ID is None is the paragraph is a new one. """ mapping = [] if not html: return html, mapping if tags is None: tags = ['p'] if attribute is None: attribute = 'data-unique-id' counter = 0 # form a tree root = lxml.html.fragment_fromstring(html, create_parent='div') if not len(root) and root.text is None and root.tail is None: return html, mapping # check all nodes, replace : # - img src -> check URL # - a href -> check URL for node in root.iter(): if not node.tag in tags: continue ancestor_tags = [parent.tag for parent in node.iterancestors()] if ancestor_tags: ancestor_tags.pop() ancestor_tags.append('counter_%s' % counter) new_attribute = '/'.join(reversed(ancestor_tags)) old_attribute = node.get(attribute) node.set(attribute, new_attribute) mapping.append((old_attribute, counter)) counter += 1 html = lxml.html.tostring(root, pretty_print=False, method='html') # this is ugly, but lxml/etree tostring want to put everything in a 'div' that breaks the editor -> remove that if html.startswith('<div>') and html.endswith('</div>'): html = html[5:-6] return html, mapping def _postproces_content(self, cr, uid, id, content=None, context=None): if content is None: content = self.browse(cr, uid, id, context=context).content if content is False: return content content, mapping = self.html_tag_nodes(content, attribute='data-chatter-id', tags=['p'], context=context) for old_attribute, new_attribute in mapping: if not old_attribute: continue msg_ids = self.pool['mail.message'].search(cr, SUPERUSER_ID, [('path', '=', old_attribute)], context=context) self.pool['mail.message'].write(cr, SUPERUSER_ID, msg_ids, {'path': new_attribute}, context=context) return content def create_history(self, cr, uid, ids, vals, context=None): for i in ids: history = self.pool.get('blog.post.history') if vals.get('content'): res = { 'content': vals.get('content', ''), 'post_id': i, } history.create(cr, uid, res) def create(self, cr, uid, vals, context=None): if context is None: context = {} if 'content' in vals: vals['content'] = self._postproces_content(cr, uid, None, vals['content'], context=context) create_context = dict(context, mail_create_nolog=True) post_id = super(BlogPost, self).create(cr, uid, vals, context=create_context) self.create_history(cr, uid, [post_id], vals, context) return post_id def write(self, cr, uid, ids, vals, context=None): if 'content' in vals: vals['content'] = self._postproces_content(cr, uid, None, vals['content'], context=context) result = super(BlogPost, self).write(cr, uid, ids, vals, context) self.create_history(cr, uid, ids, vals, context) return result class BlogPostHistory(osv.Model): _name = "blog.post.history" _description = "Blog Post History" _order = 'id DESC' _rec_name = "create_date" _columns = { 'post_id': fields.many2one('blog.post', 'Blog Post'), 'summary': fields.char('Summary', select=True), 'content': fields.text("Content"), 'create_date': fields.datetime("Date"), 'create_uid': fields.many2one('res.users', "Modified By"), } def getDiff(self, cr, uid, v1, v2, context=None): history_pool = self.pool.get('blog.post.history') text1 = history_pool.read(cr, uid, [v1], ['content'])[0]['content'] text2 = history_pool.read(cr, uid, [v2], ['content'])[0]['content'] line1 = line2 = '' if text1: line1 = text1.splitlines(1) if text2: line2 = text2.splitlines(1) if (not line1 and not line2) or (line1 == line2): raise osv.except_osv(_('Warning!'), _('There are no changes in revisions.')) diff = difflib.HtmlDiff() return diff.make_table(line1, line2, "Revision-%s" % (v1), "Revision-%s" % (v2), context=True)
colinw7/CJavaScript
qsrc/CQJDialog.cpp
#include <CQJDialog.h> #include <CJavaScript.h> #include <CQHistoryLineEdit.h> #include <CJavaScript.h> #include <QVBoxLayout> #include <QTabWidget> #include <QTextEdit> #include <QListWidget> #include <QPushButton> #include <QFileDialog> #include <QDir> CQJDialog:: CQJDialog(CJavaScript *js) : js_(js) { setWindowTitle("JavaScript"); QVBoxLayout *layout = new QVBoxLayout(this); layout->setMargin(0); layout->setSpacing(2); //--- QTabWidget *tab = new QTabWidget; log_ = new QTextEdit; log_->setReadOnly(true); tab->addTab(log_, "Log"); history_ = new QListWidget; connect(history_, SIGNAL(itemClicked(QListWidgetItem *)), this, SLOT(historySlot(QListWidgetItem *))); tab->addTab(history_, "History"); //---- QFrame *variables = new QFrame; QVBoxLayout *variablesLayout = new QVBoxLayout(variables); variablesLayout->setMargin(0); variablesLayout->setSpacing(2); variablesList_ = new QListWidget; variablesLayout->addWidget(variablesList_); QPushButton *variablesLoad = new QPushButton("Load"); connect(variablesLoad, SIGNAL(clicked()), this, SLOT(loadVariables())); variablesLayout->addWidget(variablesLoad); tab->addTab(variables, "Variables"); //---- QFrame *functions = new QFrame; QVBoxLayout *functionsLayout = new QVBoxLayout(functions); functionsLayout->setMargin(0); functionsLayout->setSpacing(2); functionsList_ = new QListWidget; functionsLayout->addWidget(functionsList_); QPushButton *functionsLoad = new QPushButton("Load"); connect(functionsLoad, SIGNAL(clicked()), this, SLOT(loadFunctions())); functionsLayout->addWidget(functionsLoad); tab->addTab(functions, "Functions"); //--- layout->addWidget(tab); //--- QFrame *entryFrame = new QFrame; layout->addWidget(entryFrame); QHBoxLayout *entryLayout = new QHBoxLayout(entryFrame); entryLayout->setMargin(0); entryLayout->setSpacing(2); input_ = new CQHistoryLineEdit; entryLayout->addWidget(input_); connect(input_, SIGNAL(exec(const QString &)), this, SLOT(execCmd(const QString &))); QPushButton *load = new QPushButton("Load"); entryLayout->addWidget(load); connect(load, SIGNAL(clicked()), this, SLOT(loadFile())); } void CQJDialog:: historySlot(QListWidgetItem *item) { input_->setText(item->text()); } void CQJDialog:: execCmd(const QString &cmd) { if (! cmd.length()) return; history_->addItem(cmd); //--- js_->loadString(cmd.toStdString()); CJValueP value = js_->exec(); if (value) { std::string str = value->toString(); QTextCursor cursor = log_->textCursor(); cursor.insertText(str.c_str()); cursor.insertText("\n"); } } void CQJDialog:: loadFile() { QString title = "Load File"; QString cwd = QDir::currentPath(); QString filter = "JavaScript Files (*.js)"; QString filename = QFileDialog::getOpenFileName(this, title, cwd, filter); if (filename == "") return; js_->loadFile(filename.toStdString()); js_->exec(); } void CQJDialog:: loadVariables() { variablesList_->clear(); std::vector<std::string> names = js_->getVariableNames(); for (const auto &n : names) variablesList_->addItem(n.c_str()); } void CQJDialog:: loadFunctions() { functionsList_->clear(); std::vector<std::string> names = js_->getFunctionNames(); for (const auto &n : names) functionsList_->addItem(n.c_str()); } QSize CQJDialog:: sizeHint() const { QFontMetrics fm(font()); int w = fm.width("X")*50; int h = fm.height() *25; return QSize(w, h); }
zillionn/core
packages/core-graphql/__tests__/graphql.test.js
const app = require('./__support__/setup') let graphql beforeAll(async () => { const container = await app.setUp() graphql = await container.resolvePlugin('graphql') }) afterAll(() => { app.tearDown() }) describe('GraphQL', () => { it('should be an object', () => { expect(graphql).toBeObject() }) })
MaxSobolMark/mbrl-lib
mbrl/env/reward_functions/faucet_close_reward_function.py
from .base_reward_function import BaseRewardFunction import torch import numpy as np from .metaworld_reward_utils import tolerance class FaucetCloseRewardFunction(BaseRewardFunction): OBS_DIM = 39 def get_reward(self, observations: torch.Tensor, actions: torch.Tensor, device: str): gripper = observations[:, :3] obj = observations[:, 4:7] tcp = gripper # self.tcp_center target = torch.Tensor( self._env.unwrapped._target_pos.copy()).to(device) target_to_obj = (obj - target) target_to_obj = torch.norm(target_to_obj, dim=-1) target_to_obj_init = ( torch.Tensor(self._env.unwrapped.obj_init_pos).to(device) - target) target_to_obj_init = torch.norm(target_to_obj_init, dim=-1) in_place = tolerance( target_to_obj, bounds=(0, self._env.unwrapped._target_radius), margin=torch.Tensor([ abs(target_to_obj_init - self._env.unwrapped._target_radius) ]).to(device), sigmoid='long_tail', device=device, ) faucet_reach_radius = 0.01 tcp_to_obj = torch.norm(obj - tcp, dim=-1) tcp_to_obj_init = np.linalg.norm(self._env.unwrapped.obj_init_pos - self._env.unwrapped.init_tcp, axis=-1) reach = tolerance(tcp_to_obj, bounds=(0, faucet_reach_radius), margin=torch.Tensor([ abs(tcp_to_obj_init - faucet_reach_radius) ]).to(device), sigmoid='gaussian', device=device) reward = 2 * reach + 3 * in_place reward *= 2 reward = torch.where( target_to_obj <= self._env.unwrapped._target_radius, torch.Tensor([10.]).to(device), reward) # reward = 10 if target_to_obj <= self._target_radius else reward return reward
Chromico/bk-base
src/api/dataflow/tests/test_modeling/test_algorithm/test_algorithm_controller.py
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available. Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. BK-BASE 蓝鲸基础平台 is licensed under the MIT License. License for BK-BASE 蓝鲸基础平台: -------------------------------------------------------------------- Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from datetime import datetime import pytest from rest_framework.test import APITestCase from dataflow.modeling.algorithm.algorithm_controller import AlgorithmController from dataflow.modeling.model.platform_model import PlatformModel from dataflow.modeling.models import Algorithm, AlgorithmVersion from dataflow.shared.log import modeling_logger as logger def init_algrithm_list(): def mock_func(*args, **kwargs): algorithm = Algorithm() algorithm.algorithm_name = "spark_decision_tree_classifier" algorithm.algorithm_alias = "decision_tree_classifier" algorithm.description = "description" algorithm.algorithm_original_name = "decision_tree_classifier" algorithm.algorithm_type = "classification" algorithm.generate_type = "system" algorithm.sensitivity = "public" algorithm.project_id = 0 algorithm.run_env = "spark_cluster" algorithm.framework = "spark_mllib" algorithm.created_by = "xxxx" algorithm.created_at = datetime.now() algorithm.updated_by = "xxxx" algorithm.updated_at = datetime.now() return [algorithm] return mock_func def init_algorithm_detail(): def mock_func(*args, **kwargs): algorithm_version = AlgorithmVersion() algorithm_version.id = 1 algorithm_version.algorithm_name = "spark_decision_tree_classifier" algorithm_version.version = 1 algorithm_version.logic = "{}" algorithm_version.config = ( "{" '"feature_columns": [], ' '"label_columns": [' '{"field_name": "label", ' '"field_type": "double", ' '"field_alias": "alias", ' '"field_index": 1, ' '"value": null, ' '"default_value": "label", ' '"allowed_values": null, ' '"sample_value": null, ' '"real_name": "label_col"}' "], " '"predict_output": [' '{"field_name": "features_col", ' '"field_type": "double", ' '"field_alias": "alias", ' '"field_index": 0, ' '"value": null, ' '"default_value": "prediction", ' '"allowed_values": null, ' '"sample_value": null, ' '"real_name": "features_col"}], ' '"training_args": [' '{"field_name": "cache_node_ids", ' '"field_type": "boolean", ' '"field_alias": "alias", ' '"field_index": 0, "value": null, ' '"default_value": "false", ' '"allowed_values": ["true", "false"], ' '"sample_value": null, "real_name": "cache_node_ids", "is_advanced": false}], ' '"predict_args": [' '{"field_name": "thresholds", ' '"field_type": "double[]", ' '"field_alias": "alias", ' '"field_index": 0, ' '"value": null, "default_value": null, "allowed_values": null, ' '"sample_value": null, "real_name": "thresholds"}], ' '"feature_columns_mapping": {"spark": ' '{"multi": [{' '"field_name": "features_col", "field_type": "double", ' '"field_alias": "alias", "field_index": 0, "value": null, ' '"default_value": "features", ' '"allowed_values": null, "sample_value": null, ' '"real_name": "features_col", "composite_type": "Vector", ' '"need_interprete": true}], ' '"single": []}}, "feature_colunms_changeable": true}' ) algorithm_version.execute_config = "" algorithm_version.properties = "" algorithm_version.created_by = "xxxx" algorithm_version.created_at = datetime.now() algorithm_version.updated_by = "xxxx" algorithm_version.updated_at = datetime.now() algorithm_version.description = "description" return algorithm_version return mock_func @pytest.fixture(scope="function") def mock_get_user_algorithm_list(): PlatformModel.get_algrithm_by_user = init_algrithm_list() @pytest.fixture(scope="function") def mock_get_algorithm_by_name(): PlatformModel.get_algorithm_by_name = init_algorithm_detail() class TestAlgorithmController(APITestCase): def setUp(self): pass @pytest.mark.usefixtures("mock_get_user_algorithm_list") @pytest.mark.usefixtures("mock_get_algorithm_by_name") def test_get_algorithm_list(self): controller = AlgorithmController() result = controller.get_algorithm_list("spark_mllib") assert "spark_mllib" in result assert len(result["spark_mllib"]) == 1 group = result["spark_mllib"][0] assert len(group["alg_info"]) == 1 algorithm_item = group["alg_info"][0] sql = algorithm_item["sql"] logger.info("result sql:" + sql) expected_sql = "train model 模型名称 \n options(algorithm='{algorithm_name}', {param_list})\n from 查询结果集名称".format( algorithm_name="decision_tree_classifier", param_list="features_col=<字段1,字段2,字段3>, label_col=字段名, cache_node_ids='false'", ) logger.info("expected sql:" + expected_sql) assert expected_sql == sql
scipionyx/butterfly-effect
butterfly-effect-frontend-configuration/src/main/java/com/scipionyx/butterflyeffect/frontend/configuration/services/LeftConfigurationMenuService.java
<reponame>scipionyx/butterfly-effect package com.scipionyx.butterflyeffect.frontend.configuration.services; import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.scipionyx.butterflyeffect.configuration.model.leftmenu.LeftConfigurationMenuItem; /** * TODO: This must be changed to comply with a API capable of creating such * extra menu on the right side of the top menu bar. * * * @author <NAME> * */ @Component() public class LeftConfigurationMenuService extends AbstractConfigurationMenuService<LeftConfigurationMenuItem> { private static final Logger LOGGER = LoggerFactory.getLogger(LeftConfigurationMenuService.class); /** * */ @Override public void readConfigurations() throws IOException { // List<InputStream> configurations = loadResources("left_configuration_menu.info", null); // LOGGER.info("loading configuration menus, {} found", configurations.size()); // for (InputStream inputStream : configurations) { LeftConfigurationMenuItem[] menuGroups = objectMapper.readValue(inputStream, LeftConfigurationMenuItem[].class); // Add Navigation Collections.addAll(getConfigurations(), menuGroups); } } @Override public Class<LeftConfigurationMenuItem[]> getArrayJavaType() { return null; } }
kdoomsday/kaminalapp
test/daos/doobie/MascotaDaoDoobieSpec.scala
package daos.doobie import doobie.specs2.imports.AnalysisSpec import org.joda.time.DateTime import org.specs2.mutable.Specification import testutil.TestUtil object MascotaDaoDoobieSpec extends Specification with AnalysisSpec { val transactor = TestUtil.transactor() check(MascotaDaoDoobie.qGuardarMascota("", Some(""), Some(0), Some(new DateTime()), 1L)) check(MascotaDaoDoobie.qById(0L)) check(MascotaDaoDoobie.qEditMascota(0L, "", Some(""), Some(0), Some(new DateTime()))) check(MascotaDaoDoobie.qMascotaConCliente(0L)) check(MascotaDaoDoobie.qByLoginCliente("")) }
markazmierczak/Polonite
Stp/Base/Containers/Join.h
// Copyright 2017 Polonite Authors. All rights reserved. // Distributed under MIT license that can be found in the LICENSE file. #ifndef STP_BASE_CONTAINERS_JOIN_H_ #define STP_BASE_CONTAINERS_JOIN_H_ #include "Base/Containers/List.h" namespace stp { template<typename TResult, typename TArray> inline TResult Concat(const TArray& inputs) { TResult result; int result_length = 0; for (int i = 0; i < inputs.size(); ++i) result_length += inputs[i].size(); result.ensureCapacity(result_length); for (int i = 0; i < inputs.size(); ++i) result.append(inputs[i]); return result; } template<typename TResult, typename... TArgs> inline TResult ConcatMany(const TArgs&... args) { using ItemType = typename TResult::ItemType; InitializerList<Span<ItemType>> ilist = { args... }; return Concat<TResult>(makeSpan(ilist)); } template<typename TResult, typename TArray> inline TResult Join(typename TResult::ItemType separator, const TArray& inputs) { TResult result; // Must check for empty due to later decrement when computing output length. if (!inputs.isEmpty()) { int result_length = 0; for (int i = 0; i < inputs.size(); ++i) result_length += inputs[i].size(); result_length += inputs.size() - 1; result.ensureCapacity(result_length); for (int i = 0; i < inputs.size(); ++i) { if (i != 0) { result.add(separator); } result.append(inputs[i]); } } return result; } template<typename TResult, typename... TArgs> inline TResult JoinMany(typename TResult::ItemType separator, const TArgs&... args) { using ItemType = typename TResult::ItemType; InitializerList<Span<ItemType>> ilist = { args... }; return Join<TResult>(move(separator), makeSpan(ilist)); } template<typename TResult, typename TArray> inline TResult Join(Span<typename TResult::ItemType> separator, const TArray& inputs) { TResult result; // Must check for empty due to later decrement when computing output length. if (!inputs.isEmpty()) { int result_length = 0; for (int i = 0; i < inputs.size(); ++i) result_length += inputs[i].size(); result_length += (inputs.size() - 1) * separator.size(); result.ensureCapacity(result_length); for (int i = 0; i < inputs.size(); ++i) { if (i != 0) { result.append(separator); } result.append(inputs[i]); } } return result; } template<typename TResult, typename... TArgs> inline TResult JoinMany(Span<typename TResult::ItemType> separator, const TArgs&... args) { using ItemType = typename TResult::ItemType; InitializerList<Span<ItemType>> ilist = { args... }; return Join<TResult>(separator, makeSpan(ilist)); } } // namespace stp #endif // STP_BASE_CONTAINERS_JOIN_H_
AgeOfLearning/material-design-icons
av/icon-twotone-album-element/index.js
<reponame>AgeOfLearning/material-design-icons import styles from './template.css'; import template from './template'; import AoflElement from '@aofl/web-components/aofl-element'; /** * @summary IconTwotoneAlbumElement * @class IconTwotoneAlbumElement * @extends {AoflElement} */ class IconTwotoneAlbumElement extends AoflElement { /** * Creates an instance of IconTwotoneAlbumElement. */ constructor() { super(); } /** * @readonly */ static get is() { return 'icon-twotone-album'; } /** * * @return {Object} */ render() { return super.render(template, [styles]); } } window.customElements.define(IconTwotoneAlbumElement.is, IconTwotoneAlbumElement); export default IconTwotoneAlbumElement;
sshcherbakov/cf-service-tester
src/main/java/io/pivotal/cf/tester/service/StateService.java
package io.pivotal.cf.tester.service; import java.util.concurrent.atomic.AtomicLong; import javax.annotation.PostConstruct; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.data.redis.core.RedisTemplate; import org.springframework.stereotype.Component; @Component public class StateService implements InitializingBean { private static Logger log = LoggerFactory.getLogger(StateService.class); private volatile boolean isRabbitUp = false; private volatile boolean isRedisUp = false; private volatile boolean isDatabaseUp = false; private AtomicLong nextId = new AtomicLong(); @Value("${vcap.application.name:cf-tester}") private String applicationName; @Value("${max.message.id:10000}") private int maxMessageId = 10000; @Autowired(required=false) RedisTemplate<String, String> redisTemplate; @Autowired JdbcChecker jdbcChecker; @PostConstruct public void init() { resetCheckpoints(); } public boolean isRabbitUp() { return isRabbitUp; } public void setRabbitUp() { this.isRabbitUp = true; } public void setRabbitDown() { this.isRabbitUp = false; } public boolean isRedisUp() { return isRedisUp; } public boolean isDatabaseUp() { return isDatabaseUp; } public void setRedisUp() { this.isRedisUp = true; } public void setRedisDown() { this.isRedisUp = false; } public void setDatabaseUp(){ this.isDatabaseUp = true; } public void setDatabaseDown(){ this.isDatabaseUp = false; } @Override public void afterPropertiesSet() throws Exception { if( redisTemplate == null ) { isRedisUp = false; return; } try { isRedisUp = redisTemplate.getConnectionFactory().getConnection() != null; } catch(Exception ex) { isRedisUp = false; } } public long getNextId() { long id = nextId.getAndIncrement(); if( id == maxMessageId ) { // only one thread will see maxMessageId id = 0; nextId.set(id + 1); resetCheckpoints(); } return id; } private void resetCheckpoints() { try { if( redisTemplate != null ) { redisTemplate.boundValueOps(applicationName).getOperations().delete(applicationName); } } catch(Exception ex) { log.error("Redis cannot be located. Is it down?"); } } }
joaquinfilipic/get-out
server-side/interface/src/main/java/ar/edu/itba/paw/interfaces/functional/Mergeable.java
<reponame>joaquinfilipic/get-out package ar.edu.itba.paw.interfaces.functional; /** * <p>Esta interfaz permite proporcionar un método para sincronizar el * estado de un entidad modificada con el estado almacenado en el contexto * de persistencia. De esta forma los cambios se reflejan inmediatamente en * los modelos.</p> */ @FunctionalInterface public interface Mergeable<T> { public T update(final T client); }
xwder/health-shop-manage
gd-pojo/src/main/java/com/cqust/pojo/TUserlog.java
package com.cqust.pojo; import java.util.Date; public class TUserlog { private Integer id; private Integer uid; private String username; private Date logindate; private String loginip; private Integer loginstatus; private String loginps; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Integer getUid() { return uid; } public void setUid(Integer uid) { this.uid = uid; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username == null ? null : username.trim(); } public Date getLogindate() { return logindate; } public void setLogindate(Date logindate) { this.logindate = logindate; } public String getLoginip() { return loginip; } public void setLoginip(String loginip) { this.loginip = loginip == null ? null : loginip.trim(); } public Integer getLoginstatus() { return loginstatus; } public void setLoginstatus(Integer loginstatus) { this.loginstatus = loginstatus; } public String getLoginps() { return loginps; } public void setLoginps(String loginps) { this.loginps = loginps == null ? null : loginps.trim(); } }
helene-t/SciDataTool
SciDataTool/Methods/DataND/transform.py
<gh_stars>0 # -*- coding: utf-8 -*- from SciDataTool.Functions.fft_functions import comp_fft, comp_ifft from SciDataTool.Functions.conversions import rphiz_to_xyz_field, xyz_to_rphiz_field from numpy import apply_along_axis def transform(self, values, axes_list): """Returns the values of the field transformed or converted. Parameters ---------- self: Data a Data object values: ndarray array of the field axes_list: list a list of RequestedAxis objects Returns ------- values: ndarray values of the transformed field """ for axis_requested in axes_list: # Transform (fft, coordinates, etc) if axis_requested.transform == "fft": values = apply_along_axis(comp_fft, axis_requested.index, values) elif axis_requested.transform == "ifft": values = apply_along_axis(comp_ifft, axis_requested.index, values) elif axis_requested.transform == "pol2cart": values = apply_along_axis(rphiz_to_xyz_field, axis_requested.index, values, axis_requested.values[:,1]) elif axis_requested.transform == "cart2pol": values = apply_along_axis(xyz_to_rphiz_field, axis_requested.index, values, axis_requested.values[:,1]) return values
kant/Hackerrank
Tutorials/30DaysofCode/Day14:Scope.java
Difference(int[] a){ elements = new int[a.length]; elements=a; } public void computeDifference(){ int min=100; int max=0; for(int i=0;i<elements.length;i++){ if(elements[i]>max) max=elements[i]; if(elements[i]<min) min=elements[i]; } maximumDifference=max-min; }
arezae4/adversarial-multivariate
mpg_java/src/test/java/edu/uic/cs/purposeful/mpg/target/linear_chain/f1/TestLinearChainF1Classifier.java
<reponame>arezae4/adversarial-multivariate<filename>mpg_java/src/test/java/edu/uic/cs/purposeful/mpg/target/linear_chain/f1/TestLinearChainF1Classifier.java package edu.uic.cs.purposeful.mpg.target.linear_chain.f1; import static org.junit.Assert.assertEquals; import java.io.File; import java.util.BitSet; import java.util.List; import org.apache.commons.lang3.tuple.Triple; import org.junit.Test; import org.junit.Ignore; import com.google.common.collect.Iterables; import edu.uic.cs.purposeful.common.assertion.Assert; import edu.uic.cs.purposeful.mpg.common.Misc; import edu.uic.cs.purposeful.mpg.common.Regularization; import edu.uic.cs.purposeful.mpg.common.ValuePrecision; import edu.uic.cs.purposeful.mpg.learning.MaximizerPredictor.Prediction; import edu.uic.cs.purposeful.mpg.learning.linear_chain.LinearChainDataSet; import edu.uic.cs.purposeful.mpg.learning.linear_chain.MPGLinearChainClassifer; import edu.uic.cs.purposeful.mpg.target.linear_chain.LinearChain; public class TestLinearChainF1Classifier { @Ignore @Test public void testLearnAndPredictOnSameData_binarized() throws Exception { int[] targetTags = new int[] {1, 2, 5}; for (int targetTag : targetTags) { MPGLinearChainClassifer classifier = new MPGLinearChainClassifer(LinearChainF1.class, targetTag); File file = new File(TestLinearChainF1Classifier.class .getResource("TestLinearChainF1Classifier.train").toURI()); LinearChainDataSet dataSet = LinearChainDataSet.loadFromFile(file); dataSet = LinearChainDataSet.binarize(dataSet, targetTag); double[] thetas = classifier.learn(dataSet, Regularization.l2(0.001)); List<Prediction<LinearChain>> predictions = classifier.predict(dataSet); Prediction<LinearChain> prediction = Iterables.getOnlyElement(predictions); Triple<Double, Double, Double> precisionRecallF1 = evaluate(predictions, targetTag); System.out.println("targetTag:\t" + targetTag); System.out.println("Thetas:\t" + Misc.toDisplay(thetas)); System.out.println("Golden:\t" + prediction.getGoldenPermutation()); System.out.println("Prediction:\t" + prediction.getPredictionPermutation()); System.out.println("Precision:\t" + precisionRecallF1.getLeft()); System.out.println("Recall:\t" + precisionRecallF1.getMiddle()); System.out.println("F1:\t" + precisionRecallF1.getRight()); assertEquals(prediction.getScore(), precisionRecallF1.getRight(), ValuePrecision.POINT_8_ZEROS_ONE.getValuePrecision()); System.out.println("Probability:\t" + prediction.getProbability()); assertEquals(prediction.getGoldenPermutation().getExistenceSequence(targetTag), prediction.getPredictionPermutation().getExistenceSequence(targetTag)); System.out.println(); } } //@Ignore @Test public void testLearnAndPredictOnSameData() throws Exception { int[] targetTags = new int[] {1, 2, 5}; for (int targetTag : targetTags) { MPGLinearChainClassifer classifier = new MPGLinearChainClassifer(LinearChainF1.class, targetTag); File file = new File(TestLinearChainF1Classifier.class .getResource("TestLinearChainF1Classifier.train").toURI()); LinearChainDataSet dataSet = LinearChainDataSet.loadFromFile(file); double[] thetas = classifier.learn(dataSet, Regularization.l2(0.001)); List<Prediction<LinearChain>> predictions = classifier.predict(dataSet); Prediction<LinearChain> prediction = Iterables.getOnlyElement(predictions); Triple<Double, Double, Double> precisionRecallF1 = evaluate(predictions, targetTag); System.out.println("targetTag:\t" + targetTag); System.out.println("Thetas:\t" + Misc.toDisplay(thetas)); System.out.println("Golden:\t" + prediction.getGoldenPermutation()); System.out.println("Prediction:\t" + prediction.getPredictionPermutation()); System.out.println("Precision:\t" + precisionRecallF1.getLeft()); System.out.println("Recall:\t" + precisionRecallF1.getMiddle()); System.out.println("F1:\t" + precisionRecallF1.getRight()); assertEquals(prediction.getScore(), precisionRecallF1.getRight(), ValuePrecision.POINT_8_ZEROS_ONE.getValuePrecision()); System.out.println("Probability:\t" + prediction.getProbability()); assertEquals(prediction.getGoldenPermutation().getExistenceSequence(targetTag), prediction.getPredictionPermutation().getExistenceSequence(targetTag)); System.out.println(); } } private static Triple<Double, Double, Double> evaluate(List<Prediction<LinearChain>> predictions, int targetTag) { int[][] confusionMatrix = new int[2][2]; for (Prediction<LinearChain> prediction : predictions) { LinearChain goldenLinearChain = prediction.getGoldenPermutation(); LinearChain predictedLinearChain = prediction.getPredictionPermutation(); Assert.isTrue(goldenLinearChain.getLength() == predictedLinearChain.getLength()); BitSet goldenBinarySequence = goldenLinearChain.getExistenceSequence(targetTag); BitSet predictedBinarySequence = predictedLinearChain.getExistenceSequence(targetTag); for (int index = 0; index < goldenLinearChain.getLength(); index++) { int golden = goldenBinarySequence.get(index) ? 1 : 0; int predicted = predictedBinarySequence.get(index) ? 1 : 0; confusionMatrix[golden][predicted] += 1; } } double precision = precision(confusionMatrix); double recall = recall(confusionMatrix); double f1 = f1(precision, recall); return Triple.of(precision, recall, f1); } private static double f1(double precision, double recall) { if ((precision + recall) == 0) { return 0; } return 2 * precision * recall / (precision + recall); } private static double precision(int[][] confusionMatrix) { double correct = 0; double total = 0; for (int i = 0; i < 2; i++) { if (i == 1) { correct += confusionMatrix[i][1]; } total += confusionMatrix[i][1]; } if (total == 0) { return 0; } return correct / total; } private static double recall(int[][] confusionMatrix) { double correct = 0; double total = 0; for (int j = 0; j < 2; j++) { if (j == 1) { correct += confusionMatrix[1][j]; } total += confusionMatrix[1][j]; } if (total == 0) { return 0; } return correct / total; } }
Soum-Soum/Tensorflow_Face_Finder
venv1/Lib/site-packages/tensorflow/include/tensorflow/core/framework/reader_interface.h
<reponame>Soum-Soum/Tensorflow_Face_Finder /* Copyright 2015 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_FRAMEWORK_READER_INTERFACE_H_ #define TENSORFLOW_FRAMEWORK_READER_INTERFACE_H_ #include <memory> #include <string> #include "tensorflow/core/framework/op_kernel.h" #include "tensorflow/core/framework/resource_mgr.h" #include "tensorflow/core/framework/tensor.h" #include "tensorflow/core/lib/core/status.h" #include "tensorflow/core/platform/types.h" namespace tensorflow { class QueueInterface; class ReaderInterface; // Readers are the mechanism for reading records from files in // TensorFlow graphs. Each supported file format has a corresponding // ReaderInterface descendant and a corresponding Op & OpKernel // (implemented using ReaderOpKernel from reader_op_kernel.h). // // To use a Reader, you first encode "work" (some string, typically a // filename) in the Reader's "work queue". It then processes the // "work" (reading records from the file), to produce key/value // strings. The methods of this class are called by ReaderFoo ops, // so see ../ops/io_ops.cc for detailed descriptions. // // All descendants of this class must be thread-safe. class ReaderInterface : public ResourceBase { public: // Read a single record into *key / *value. May get more work from // *queue if the current work is complete. Sets the status on // *context with an OutOfRange Status if the current work is // complete and the queue is done (closed and empty). // This method may block. virtual void Read(QueueInterface* queue, string* key, string* value, OpKernelContext* context) = 0; // Read up to num_records records into keys / values. May get more work from // *queue if the current work is complete. Sets the status on // *context with an OutOfRange Status if the current work is // complete and the queue is done (closed and empty). // This method may block. // The std::vector keys/value pointers are assumed to point to empty // structures (that have most likely been reserve(num_records)). // Returns how many records were actually read. virtual int64 ReadUpTo(const int64 num_records, QueueInterface* queue, std::vector<string>* keys, std::vector<string>* value, OpKernelContext* context) = 0; // Restore this reader to its newly-constructed state. virtual Status Reset() = 0; // Accessors virtual int64 NumRecordsProduced() = 0; virtual int64 NumWorkUnitsCompleted() = 0; // -- Serialization/Restoration support -- // Not all readers will support saving and restoring state. virtual Status SerializeState(string* state) = 0; // Note: Must Reset on error. virtual Status RestoreState(const string& state) = 0; string DebugString() override { return "a reader"; } protected: virtual ~ReaderInterface() {} }; } // namespace tensorflow #endif // TENSORFLOW_FRAMEWORK_READER_INTERFACE_H_
joshvh/ruote
lib/openwfe/expool/threaded_expstorage.rb
<gh_stars>1-10 #-- # Copyright (c) 2007-2009, <NAME>, <EMAIL> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # # Made in Japan. #++ require 'thread' module OpenWFE # # This mixin gathers all the logic for a threaded expression storage, # one that doesn't immediately stores workitems (removes overriding # operations). # Using this threaded storage brings a very important perf benefit. # module ThreadedStorageMixin # # Will take care of stopping the 'queue processing' thread. # def stop @stopped = true @queue.push :stop end # # makes sure that the queue isn't actually preparing a batch # before returning a result. # def find_expressions (options={}) Thread.pass @mutex.synchronize do super(options) end end protected # # starts the thread that does the actual persistence. # def start_queue @mutex = Mutex.new @queue = Queue.new Thread.new do loop do events = [ @queue.pop ] @mutex.synchronize do 14.times { Thread.pass } # # gather some steam : # let jobs accumulate @queue.size.times do events << @queue.pop end process_events events end break if events.include?(:stop) end end end # # queues an event for later (well within a second) persistence # def queue (event, fei, fexp=nil) if @stopped process_event event, fei, fexp else @queue.push [ event, fei, fexp ] end end def process_events (events) ldebug { "process_events() #{events.size} events" } # reducing the operation count events = events.inject({}) do |r, event| r[event[1]] = event if event != :stop r end ldebug { "process_events() #{events.size} events remaining" } # perform the remaining operations events.each_value do |event, fei, fexp| process_event event, fei, fexp end end def process_event (event, fei, fexp) begin if event == :update self[fei] = fexp else self.delete(fei) end rescue Exception => e lwarn do "process_event() ':#{event}' exception\n" + OpenWFE::exception_to_s(e) end end end # # Adds the queue() method as an observer to the update and remove # events of the expression pool. # :update and :remove mean changes to expressions in the persistence # that's why they are observed. # def observe_expool get_expression_pool.add_observer(:update) do |event, fei, fe| #ldebug { ":update for #{fei.to_debug_s}" } queue(event, fei, fe) end get_expression_pool.add_observer(:remove) do |event, fei| #ldebug { ":remove for #{fei.to_debug_s}" } queue(event, fei) end end end end
antonioalfa22/Compilador
Compilador/src/ast/tipo/Char.java
<reponame>antonioalfa22/Compilador<filename>Compilador/src/ast/tipo/Char.java<gh_stars>0 package ast.tipo; import visitor.Visitor; /** * @author <NAME> * */ public class Char extends AbstractTipo { private static Char instancia; public Char(int line, int column) { super(line, column); } public static Char getInstancia() { if (instancia == null) { instancia = new Char(0, 0); } return instancia; } @Override public Object accept(Visitor v, Object object) { return v.visit(this, object); } @Override public boolean esLogico() { return true; } @Override public Tipo aritmetica(Tipo t) { if((t instanceof Entero) || (t instanceof TipoError) || (t instanceof Real)) return t; else if(t instanceof Char) return Entero.getInstancia(); return null; } @Override public Tipo aritmetica() { return this; } @Override public Tipo comparacion(Tipo t) { if((t instanceof Entero) || (t instanceof Char) || (t instanceof Real)) return Entero.getInstancia(); else if(t instanceof TipoError) return t; return null; } @Override public Tipo logica(Tipo t) { if((t instanceof Entero) || (t instanceof Char)) return Entero.getInstancia(); else if(t instanceof TipoError) return t; return null; } @Override public Tipo logica() { return Entero.getInstancia(); } @Override public Tipo cast(Tipo t) { return t; } @Override public boolean esBasico() { return true; } @Override public Tipo promocionaA(Tipo t) { if((t instanceof TipoError) || (t instanceof Entero) || (t instanceof Char)) return t; return null; } @Override public int getNumeroBytes() { return 1; } @Override public String sufijo() { return "b"; } @Override public Tipo esMayor(Tipo c) { if(c instanceof Entero || c instanceof Real) return c; else if(c instanceof Char) return Entero.getInstancia(); return null; } @Override public String toString() { return "char"; } }
zhaoze1991/dropbox-sdk-obj-c
Source/ObjectiveDropboxOfficial/Shared/Generated/ApiObjects/Sharing/Headers/DBSHARINGAlphaResolvedVisibility.h
/// /// Copyright (c) 2016 Dropbox, Inc. All rights reserved. /// /// Auto-generated by Stone, do not modify. /// #import <Foundation/Foundation.h> #import "DBSerializableProtocol.h" @class DBSHARINGAlphaResolvedVisibility; NS_ASSUME_NONNULL_BEGIN #pragma mark - API Object /// /// The `AlphaResolvedVisibility` union. /// /// check documentation for ResolvedVisibility. /// /// This class implements the `DBSerializable` protocol (serialize and /// deserialize instance methods), which is required for all Obj-C SDK API route /// objects. /// @interface DBSHARINGAlphaResolvedVisibility : NSObject <DBSerializable, NSCopying> #pragma mark - Instance fields /// The `DBSHARINGAlphaResolvedVisibilityTag` enum type represents the possible /// tag states with which the `DBSHARINGAlphaResolvedVisibility` union can /// exist. typedef NS_CLOSED_ENUM(NSInteger, DBSHARINGAlphaResolvedVisibilityTag){ /// Anyone who has received the link can access it. No login required. DBSHARINGAlphaResolvedVisibilityPublic, /// Only members of the same team can access the link. Login is required. DBSHARINGAlphaResolvedVisibilityTeamOnly, /// A link-specific password is required to access the link. Login is not /// required. DBSHARINGAlphaResolvedVisibilityPassword, /// Only members of the same team who have the link-specific password can /// access the link. Login is required. DBSHARINGAlphaResolvedVisibilityTeamAndPassword, /// Only members of the shared folder containing the linked file can access /// the link. Login is required. DBSHARINGAlphaResolvedVisibilitySharedFolderOnly, /// The link merely points the user to the content, and does not grant any /// additional rights. Existing members of the content who use this link can /// only access the content with their pre-existing access rights. Either on /// the file directly, or inherited from a parent folder. DBSHARINGAlphaResolvedVisibilityNoOne, /// Only the current user can view this link. DBSHARINGAlphaResolvedVisibilityOnlyYou, /// (no description). DBSHARINGAlphaResolvedVisibilityOther, }; /// Represents the union's current tag state. @property (nonatomic, readonly) DBSHARINGAlphaResolvedVisibilityTag tag; #pragma mark - Constructors /// /// Initializes union class with tag state of "public". /// /// Description of the "public" tag state: Anyone who has received the link can /// access it. No login required. /// /// @return An initialized instance. /// - (instancetype)initWithPublic; /// /// Initializes union class with tag state of "team_only". /// /// Description of the "team_only" tag state: Only members of the same team can /// access the link. Login is required. /// /// @return An initialized instance. /// - (instancetype)initWithTeamOnly; /// /// Initializes union class with tag state of "password". /// /// Description of the "password" tag state: A link-specific password is /// required to access the link. Login is not required. /// /// @return An initialized instance. /// - (instancetype)initWithPassword; /// /// Initializes union class with tag state of "team_and_password". /// /// Description of the "team_and_password" tag state: Only members of the same /// team who have the link-specific password can access the link. Login is /// required. /// /// @return An initialized instance. /// - (instancetype)initWithTeamAndPassword; /// /// Initializes union class with tag state of "shared_folder_only". /// /// Description of the "shared_folder_only" tag state: Only members of the /// shared folder containing the linked file can access the link. Login is /// required. /// /// @return An initialized instance. /// - (instancetype)initWithSharedFolderOnly; /// /// Initializes union class with tag state of "no_one". /// /// Description of the "no_one" tag state: The link merely points the user to /// the content, and does not grant any additional rights. Existing members of /// the content who use this link can only access the content with their /// pre-existing access rights. Either on the file directly, or inherited from a /// parent folder. /// /// @return An initialized instance. /// - (instancetype)initWithNoOne; /// /// Initializes union class with tag state of "only_you". /// /// Description of the "only_you" tag state: Only the current user can view this /// link. /// /// @return An initialized instance. /// - (instancetype)initWithOnlyYou; /// /// Initializes union class with tag state of "other". /// /// @return An initialized instance. /// - (instancetype)initWithOther; - (instancetype)init NS_UNAVAILABLE; #pragma mark - Tag state methods /// /// Retrieves whether the union's current tag state has value "public". /// /// @return Whether the union's current tag state has value "public". /// - (BOOL)isPublic; /// /// Retrieves whether the union's current tag state has value "team_only". /// /// @return Whether the union's current tag state has value "team_only". /// - (BOOL)isTeamOnly; /// /// Retrieves whether the union's current tag state has value "password". /// /// @return Whether the union's current tag state has value "password". /// - (BOOL)isPassword; /// /// Retrieves whether the union's current tag state has value /// "team_and_password". /// /// @return Whether the union's current tag state has value "team_and_password". /// - (BOOL)isTeamAndPassword; /// /// Retrieves whether the union's current tag state has value /// "shared_folder_only". /// /// @return Whether the union's current tag state has value /// "shared_folder_only". /// - (BOOL)isSharedFolderOnly; /// /// Retrieves whether the union's current tag state has value "no_one". /// /// @return Whether the union's current tag state has value "no_one". /// - (BOOL)isNoOne; /// /// Retrieves whether the union's current tag state has value "only_you". /// /// @return Whether the union's current tag state has value "only_you". /// - (BOOL)isOnlyYou; /// /// Retrieves whether the union's current tag state has value "other". /// /// @return Whether the union's current tag state has value "other". /// - (BOOL)isOther; /// /// Retrieves string value of union's current tag state. /// /// @return A human-readable string representing the union's current tag state. /// - (NSString *)tagName; @end #pragma mark - Serializer Object /// /// The serialization class for the `DBSHARINGAlphaResolvedVisibility` union. /// @interface DBSHARINGAlphaResolvedVisibilitySerializer : NSObject /// /// Serializes `DBSHARINGAlphaResolvedVisibility` instances. /// /// @param instance An instance of the `DBSHARINGAlphaResolvedVisibility` API /// object. /// /// @return A json-compatible dictionary representation of the /// `DBSHARINGAlphaResolvedVisibility` API object. /// + (nullable NSDictionary<NSString *, id> *)serialize:(DBSHARINGAlphaResolvedVisibility *)instance; /// /// Deserializes `DBSHARINGAlphaResolvedVisibility` instances. /// /// @param dict A json-compatible dictionary representation of the /// `DBSHARINGAlphaResolvedVisibility` API object. /// /// @return An instantiation of the `DBSHARINGAlphaResolvedVisibility` object. /// + (DBSHARINGAlphaResolvedVisibility *)deserialize:(NSDictionary<NSString *, id> *)dict; @end NS_ASSUME_NONNULL_END
rlugojr/goojs
visual-test/goo/fsmpack/LightSwitchGame/LightSwitchGame-vtest.js
goo.V.attachToGlobal(); function getFSMComponent(entity) { var fsmComponent = new StateMachineComponent(); var machine = new Machine('switch'); var stateOn = new State('on'); stateOn.addAction(new MouseDownAction(null, { transitions: { mouseLeftDown: 'toOff' } })); stateOn.addAction(new SetLightRangeAction(null, { entity: entity, range: 1, everyFrame: false })); stateOn.setTransition('toOff', 'off'); var stateOff = new State('off'); stateOff.addAction(new MouseDownAction(null, { transitions: { mouseLeftDown: 'toOn' } })); stateOff.addAction(new SetLightRangeAction(null, { entity: entity, range: 10, everyFrame: false })); stateOff.setTransition('toOn', 'on'); machine.addState(stateOn); machine.addState(stateOff); fsmComponent.addMachine(machine); return fsmComponent; } function addBox(x, y, z, lightEntity) { var boxEntity = world.createEntity(new Box(), new Material(ShaderLib.simpleLit), [x, y, z]); boxEntity.setComponent(getFSMComponent(lightEntity)); boxEntity.addToWorld(); } function addBoxes(lightEntity) { var nBoxes = 1; for (var i = 0; i < nBoxes; i++) { addBox((i - ((nBoxes - 1) / 2)) * 4, 0, 0, lightEntity); } } function getColor(x, y, z) { var step = 1.9; return [ Math.cos(x + y + z) / 2 + 0.5, Math.cos(x + y + z + step) / 2 + 0.5, Math.cos(x + y + z + step * 2) / 2 + 0.5]; } function addLamp(x, y, z) { var color = getColor(x, y, z); var lampMeshData = new Sphere(32, 32); var lampMaterial = new Material(ShaderLib.simpleColored); lampMaterial.uniforms.color = color; var light = new PointLight(Vector3.fromArray(color)); light.range = 10; var lampEntity = world.createEntity(lampMeshData, lampMaterial, light, 'lamp1', [x, y, z]).addToWorld(); return lampEntity; } function addLamps() { var nLamps = 1; var lampEntities = []; for (var i = 0; i < nLamps; i++) { lampEntities.push(addLamp((i - ((nLamps - 1) / 2)) * 4, 5, 0)); } return lampEntities; } var gooRunner = V.initGoo(); var world = gooRunner.world; world.setSystem(new StateMachineSystem(gooRunner)); V.addOrbitCamera(); var lampEntities = addLamps(); addBoxes(lampEntities[0]); V.process();
liubaixing/febs-dev
src/main/java/com/febs/shangpin/mapper/ColorMapper.java
package com.febs.shangpin.mapper; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.febs.shangpin.entity.Color; /** * 颜色 Mapper * * @author liubaixing * @date 2020-05-22 00:59:30 */ public interface ColorMapper extends BaseMapper<Color> { }