code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
/**
* Copyright (c) 2015-2022, Michael Yang 杨福海 (fuhai999@gmail.com).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.jboot.support.metric.annotation;
import java.lang.annotation.*;
@Documented
@Target(ElementType.METHOD)
@Inherited
@Retention(RetentionPolicy.RUNTIME)
public @interface EnableMetricTimer {
String value() default "";
}
| yangfuhai/jboot | src/main/java/io/jboot/support/metric/annotation/EnableMetricTimer.java | Java | apache-2.0 | 888 |
<?php
header("Content-Type: text/html;charset=utf-8");
$name = $_POST['first_name'];
$email = $_POST['email'];
$message = $_POST['comments'];
$to = "jgo@camaraderepresentantes.org";
$subject = "Estimado Representante";
$body = '
<html>
<head>
<title>Estimado Representante</title>
</head>
<body>
<p><b>Name: </b> '.$name.'</p>
<p><b>Email: </b> '.$email.'</p>
<p><b>Message: </b> '.$message.'</p>
</body>
</html>
';
$headers = "MIME-Version: 1.0\r\n";
$headers .= "Content-type: text/html; charset=utf-8\r\n";
$headers .= "Bcc: estimadosenador@gmail.com" . "\r\n";
$headers .= "From: ".$name." <".$email.">\r\n";
$sended = mail($to, $subject, $body, $headers);
?>
<html>
<head>
<title>Estimado Representante</title>
<link rel="stylesheet" type="text/css" href="style.css">
</head>
<body>
<div class="bigone">
<div class="menu clearfix">
<a href="index.html"><h2>Home</h2></a>
<a href="about.html"><h2>About</h2></a>
</div>
<div class="line"></div>
<div class="cta">
<h1>Estimado Representante,</h1>
<h4>
Esta página es dedicada al pueblo puertorriqueño para ejercer nuestro derecho de libertad de expresión <br>y exigir el más alto respeto y cumplimiento a nuestros representantes legislativos. Este canal será una fuente de ideas, uno que fomente la unidad y el progreso, uno que demande fiscalización, responsabilidad, ética, e igualdad.<br>
<br><b>Puertorriqueño</b>, felicita, comenta, y protesta pero siempre con propiedad y respeto. Di lo que ves. Di lo que piensas. La libertad de expresión te lo permite. La democracia te lo pide. Porque todo representante tiene que escuchar para poder cumplir.
</h4>
</div>
<div class="about">
<h5><b>Tu mensaje fue enviado.</b><br>
<br>Si te gustó la página, por favor compártela, y así lograremos que más puertorriqueños se expresen.<br>
<br>Gracias por usar Estimado Representante.</h5>
</div>
</body>
</html>
| efrenpagan/estimadolegislador | jgo.php | PHP | apache-2.0 | 2,041 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.fhuss.kafka.streams.cep.core.state.internal;
import java.util.Objects;
/**
* Class for aggregated state.
*
* @param <K> the record key type.
*/
public class Aggregated<K> {
private final K key;
private final Aggregate aggregate;
/**
* Creates a new {@link Aggregated} instance.
* @param key the record key
* @param aggregate the instance of {@link Aggregate}.
*/
public Aggregated(final K key, final Aggregate aggregate) {
this.key = key;
this.aggregate = aggregate;
}
public K getKey() {
return key;
}
public Aggregate getAggregate() {
return aggregate;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Aggregated<?> that = (Aggregated<?>) o;
return Objects.equals(key, that.key) &&
Objects.equals(aggregate, that.aggregate);
}
@Override
public int hashCode() {
return Objects.hash(key, aggregate);
}
@Override
public String toString() {
return "Aggregated{" +
"key=" + key +
", aggregate=" + aggregate +
'}';
}
}
| fhussonnois/kafkastreams-cep | core/src/main/java/com/github/fhuss/kafka/streams/cep/core/state/internal/Aggregated.java | Java | apache-2.0 | 2,076 |
/*
* ! ${copyright}
*/
sap.ui.define([
"delegates/odata/v4/TableDelegate",
"sap/ui/core/Core"
], function(
TableDelegate,
Core
) {
"use strict";
/**
* Test delegate for OData V4.
*/
var ODataTableDelegate = Object.assign({}, TableDelegate);
/**
* Updates the binding info with the relevant path and model from the metadata.
*
* @param {Object} oTable The MDC table instance
* @param {Object} oBindingInfo The bindingInfo of the table
*/
ODataTableDelegate.updateBindingInfo = function(oTable, oBindingInfo) {
TableDelegate.updateBindingInfo.apply(this, arguments);
var oFilterBar = Core.byId(oTable.getFilter());
if (oFilterBar) {
// get the basic search
var sSearchText = oFilterBar.getSearch instanceof Function ? oFilterBar.getSearch() : "";
if (sSearchText && sSearchText.indexOf(" ") === -1) { // to allow search for "(".....
sSearchText = '"' + sSearchText + '"'; // TODO: escape " in string
} // if it contains spaces allow opeartors like OR...
oBindingInfo.parameters.$search = sSearchText || undefined;
}
};
return ODataTableDelegate;
});
| SAP/openui5 | src/sap.ui.mdc/test/sap/ui/mdc/internal/TableWithFilterBar/delegate/GridTable.delegate.js | JavaScript | apache-2.0 | 1,110 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.apigateway.model.transform;
import java.util.List;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.apigateway.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* UpdateMethodResponseRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class UpdateMethodResponseRequestMarshaller {
private static final MarshallingInfo<String> RESTAPIID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("restapi_id").build();
private static final MarshallingInfo<String> RESOURCEID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("resource_id").build();
private static final MarshallingInfo<String> HTTPMETHOD_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("http_method").build();
private static final MarshallingInfo<String> STATUSCODE_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("status_code").build();
private static final MarshallingInfo<List> PATCHOPERATIONS_BINDING = MarshallingInfo.builder(MarshallingType.LIST)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("patchOperations").build();
private static final UpdateMethodResponseRequestMarshaller instance = new UpdateMethodResponseRequestMarshaller();
public static UpdateMethodResponseRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(UpdateMethodResponseRequest updateMethodResponseRequest, ProtocolMarshaller protocolMarshaller) {
if (updateMethodResponseRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateMethodResponseRequest.getRestApiId(), RESTAPIID_BINDING);
protocolMarshaller.marshall(updateMethodResponseRequest.getResourceId(), RESOURCEID_BINDING);
protocolMarshaller.marshall(updateMethodResponseRequest.getHttpMethod(), HTTPMETHOD_BINDING);
protocolMarshaller.marshall(updateMethodResponseRequest.getStatusCode(), STATUSCODE_BINDING);
protocolMarshaller.marshall(updateMethodResponseRequest.getPatchOperations(), PATCHOPERATIONS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-api-gateway/src/main/java/com/amazonaws/services/apigateway/model/transform/UpdateMethodResponseRequestMarshaller.java | Java | apache-2.0 | 3,397 |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import str
from past.builtins import basestring
from datetime import datetime
import logging
from urllib.parse import urlparse
from time import sleep
import airflow
from airflow import hooks, settings
from airflow.exceptions import AirflowException, AirflowSensorTimeout, AirflowSkipException
from airflow.models import BaseOperator, TaskInstance, Connection as DB
from airflow.hooks.base_hook import BaseHook
from airflow.utils.state import State
from airflow.utils.decorators import apply_defaults
class BaseSensorOperator(BaseOperator):
'''
Sensor operators are derived from this class an inherit these attributes.
Sensor operators keep executing at a time interval and succeed when
a criteria is met and fail if and when they time out.
:param soft_fail: Set to true to mark the task as SKIPPED on failure
:type soft_fail: bool
:param poke_interval: Time in seconds that the job should wait in
between each tries
:type poke_interval: int
:param timeout: Time, in seconds before the task times out and fails.
:type timeout: int
'''
ui_color = '#e6f1f2'
@apply_defaults
def __init__(
self,
poke_interval=60,
timeout=60*60*24*7,
soft_fail=False,
*args, **kwargs):
super(BaseSensorOperator, self).__init__(*args, **kwargs)
self.poke_interval = poke_interval
self.soft_fail = soft_fail
self.timeout = timeout
def poke(self, context):
'''
Function that the sensors defined while deriving this class should
override.
'''
raise AirflowException('Override me.')
def execute(self, context):
started_at = datetime.now()
while not self.poke(context):
if (datetime.now() - started_at).total_seconds() > self.timeout:
if self.soft_fail:
raise AirflowSkipException('Snap. Time is OUT.')
else:
raise AirflowSensorTimeout('Snap. Time is OUT.')
sleep(self.poke_interval)
logging.info("Success criteria met. Exiting.")
class SqlSensor(BaseSensorOperator):
"""
Runs a sql statement until a criteria is met. It will keep trying until
sql returns no row, or if the first cell in (0, '0', '').
:param conn_id: The connection to run the sensor against
:type conn_id: string
:param sql: The sql to run. To pass, it needs to return at least one cell
that contains a non-zero / empty string value.
"""
template_fields = ('sql',)
template_ext = ('.hql', '.sql',)
@apply_defaults
def __init__(self, conn_id, sql, *args, **kwargs):
self.sql = sql
self.conn_id = conn_id
super(SqlSensor, self).__init__(*args, **kwargs)
def poke(self, context):
hook = BaseHook.get_connection(self.conn_id).get_hook()
logging.info('Poking: ' + self.sql)
records = hook.get_records(self.sql)
if not records:
return False
else:
if str(records[0][0]) in ('0', '',):
return False
else:
return True
print(records[0][0])
class MetastorePartitionSensor(SqlSensor):
"""
An alternative to the HivePartitionSensor that talk directly to the
MySQL db. This was created as a result of observing sub optimal
queries generated by the Metastore thrift service when hitting
subpartitioned tables. The Thrift service's queries were written in a
way that wouldn't leverage the indexes.
:param schema: the schema
:type schema: str
:param table: the table
:type table: str
:param partition_name: the partition name, as defined in the PARTITIONS
table of the Metastore. Order of the fields does matter.
Examples: ``ds=2016-01-01`` or
``ds=2016-01-01/sub=foo`` for a sub partitioned table
:type partition_name: str
:param mysql_conn_id: a reference to the MySQL conn_id for the metastore
:type mysql_conn_id: str
"""
template_fields = ('partition_name', 'table', 'schema')
@apply_defaults
def __init__(
self, table, partition_name, schema="default",
mysql_conn_id="metastore_mysql",
*args, **kwargs):
self.partition_name = partition_name
self.table = table
self.schema = schema
self.first_poke = True
self.conn_id = mysql_conn_id
super(SqlSensor, self).__init__(*args, **kwargs)
def poke(self, context):
if self.first_poke:
self.first_poke = False
if '.' in self.table:
self.schema, self.table = self.table.split('.')
self.sql = """
SELECT 'X'
FROM PARTITIONS A0
LEFT OUTER JOIN TBLS B0 ON A0.TBL_ID = B0.TBL_ID
LEFT OUTER JOIN DBS C0 ON B0.DB_ID = C0.DB_ID
WHERE
B0.TBL_NAME = '{self.table}' AND
C0.NAME = '{self.schema}' AND
A0.PART_NAME = '{self.partition_name}';
""".format(self=self)
return super(MetastorePartitionSensor, self).poke(context)
class ExternalTaskSensor(BaseSensorOperator):
"""
Waits for a task to complete in a different DAG
:param external_dag_id: The dag_id that contains the task you want to
wait for
:type external_dag_id: string
:param external_task_id: The task_id that contains the task you want to
wait for
:type external_task_id: string
:param allowed_states: list of allowed states, default is ``['success']``
:type allowed_states: list
:param execution_delta: time difference with the previous execution to
look at, the default is the same execution_date as the current task.
For yesterday, use [positive!] datetime.timedelta(days=1). Either
execution_delta or execution_date_fn can be passed to
ExternalTaskSensor, but not both.
:type execution_delta: datetime.timedelta
:param execution_date_fn: function that receives the current execution date
and returns the desired execution date to query. Either execution_delta
or execution_date_fn can be passed to ExternalTaskSensor, but not both.
:type execution_date_fn: callable
"""
@apply_defaults
def __init__(
self,
external_dag_id,
external_task_id,
allowed_states=None,
execution_delta=None,
execution_date_fn=None,
*args, **kwargs):
super(ExternalTaskSensor, self).__init__(*args, **kwargs)
self.allowed_states = allowed_states or [State.SUCCESS]
if execution_delta is not None and execution_date_fn is not None:
raise ValueError(
'Only one of `execution_date` or `execution_date_fn` may'
'be provided to ExternalTaskSensor; not both.')
self.execution_delta = execution_delta
self.execution_date_fn = execution_date_fn
self.external_dag_id = external_dag_id
self.external_task_id = external_task_id
def poke(self, context):
if self.execution_delta:
dttm = context['execution_date'] - self.execution_delta
elif self.execution_date_fn:
dttm = self.execution_date_fn(context['execution_date'])
else:
dttm = context['execution_date']
logging.info(
'Poking for '
'{self.external_dag_id}.'
'{self.external_task_id} on '
'{dttm} ... '.format(**locals()))
TI = TaskInstance
session = settings.Session()
count = session.query(TI).filter(
TI.dag_id == self.external_dag_id,
TI.task_id == self.external_task_id,
TI.state.in_(self.allowed_states),
TI.execution_date == dttm,
).count()
session.commit()
session.close()
return count
class NamedHivePartitionSensor(BaseSensorOperator):
"""
Waits for a set of partitions to show up in Hive.
:param partition_names: List of fully qualified names of the
partitions to wait for. A fully qualified name is of the
form schema.table/pk1=pv1/pk2=pv2, for example,
default.users/ds=2016-01-01. This is passed as is to the metastore
Thrift client "get_partitions_by_name" method. Note that
you cannot use logical operators as in HivePartitionSensor.
:type partition_names: list of strings
:param metastore_conn_id: reference to the metastore thrift service
connection id
:type metastore_conn_id: str
"""
template_fields = ('partition_names', )
@apply_defaults
def __init__(
self,
partition_names,
metastore_conn_id='metastore_default',
poke_interval=60*3,
*args,
**kwargs):
super(NamedHivePartitionSensor, self).__init__(
poke_interval=poke_interval, *args, **kwargs)
if isinstance(partition_names, basestring):
raise TypeError('partition_names must be an array of strings')
self.metastore_conn_id = metastore_conn_id
self.partition_names = partition_names
self.next_poke_idx = 0
def parse_partition_name(self, partition):
try:
schema, table_partition = partition.split('.')
table, partition = table_partition.split('/', 1)
return schema, table, partition
except ValueError as e:
raise ValueError('Could not parse ' + partition)
def poke(self, context):
if not hasattr(self, 'hook'):
self.hook = airflow.hooks.hive_hooks.HiveMetastoreHook(
metastore_conn_id=self.metastore_conn_id)
def poke_partition(partition):
schema, table, partition = self.parse_partition_name(partition)
logging.info(
'Poking for {schema}.{table}/{partition}'.format(**locals())
)
return self.hook.check_for_named_partition(
schema, table, partition)
while self.next_poke_idx < len(self.partition_names):
if poke_partition(self.partition_names[self.next_poke_idx]):
self.next_poke_idx += 1
else:
return False
return True
class HivePartitionSensor(BaseSensorOperator):
"""
Waits for a partition to show up in Hive.
Note: Because @partition supports general logical operators, it
can be inefficient. Consider using NamedHivePartitionSensor instead if
you don't need the full flexibility of HivePartitionSensor.
:param table: The name of the table to wait for, supports the dot
notation (my_database.my_table)
:type table: string
:param partition: The partition clause to wait for. This is passed as
is to the metastore Thrift client "get_partitions_by_filter" method,
and apparently supports SQL like notation as in `ds='2015-01-01'
AND type='value'` and > < sings as in "ds>=2015-01-01"
:type partition: string
:param metastore_conn_id: reference to the metastore thrift service
connection id
:type metastore_conn_id: str
"""
template_fields = ('schema', 'table', 'partition',)
@apply_defaults
def __init__(
self,
table, partition="ds='{{ ds }}'",
metastore_conn_id='metastore_default',
schema='default',
poke_interval=60*3,
*args, **kwargs):
super(HivePartitionSensor, self).__init__(
poke_interval=poke_interval, *args, **kwargs)
if not partition:
partition = "ds='{{ ds }}'"
self.metastore_conn_id = metastore_conn_id
self.table = table
self.partition = partition
self.schema = schema
def poke(self, context):
if '.' in self.table:
self.schema, self.table = self.table.split('.')
logging.info(
'Poking for table {self.schema}.{self.table}, '
'partition {self.partition}'.format(**locals()))
if not hasattr(self, 'hook'):
self.hook = airflow.hooks.hive_hooks.HiveMetastoreHook(
metastore_conn_id=self.metastore_conn_id)
return self.hook.check_for_partition(
self.schema, self.table, self.partition)
class HdfsSensor(BaseSensorOperator):
"""
Waits for a file or folder to land in HDFS
"""
template_fields = ('filepath',)
@apply_defaults
def __init__(
self,
filepath,
hdfs_conn_id='hdfs_default',
*args, **kwargs):
super(HdfsSensor, self).__init__(*args, **kwargs)
self.filepath = filepath
self.hdfs_conn_id = hdfs_conn_id
def poke(self, context):
import airflow.hooks.hdfs_hook
sb = airflow.hooks.hdfs_hook.HDFSHook(self.hdfs_conn_id).get_conn()
logging.getLogger("snakebite").setLevel(logging.WARNING)
logging.info(
'Poking for file {self.filepath} '.format(**locals()))
try:
files = [f for f in sb.ls([self.filepath])]
except:
return False
return True
class WebHdfsSensor(BaseSensorOperator):
"""
Waits for a file or folder to land in HDFS
"""
template_fields = ('filepath',)
@apply_defaults
def __init__(
self,
filepath,
webhdfs_conn_id='webhdfs_default',
*args, **kwargs):
super(WebHdfsSensor, self).__init__(*args, **kwargs)
self.filepath = filepath
self.webhdfs_conn_id = webhdfs_conn_id
def poke(self, context):
c = airflow.hooks.webhdfs_hook.WebHDFSHook(self.webhdfs_conn_id)
logging.info(
'Poking for file {self.filepath} '.format(**locals()))
return c.check_for_path(hdfs_path=self.filepath)
class S3KeySensor(BaseSensorOperator):
"""
Waits for a key (a file-like instance on S3) to be present in a S3 bucket.
S3 being a key/value it does not support folders. The path is just a key
a resource.
:param bucket_key: The key being waited on. Supports full s3:// style url
or relative path from root level.
:type bucket_key: str
:param bucket_name: Name of the S3 bucket
:type bucket_name: str
:param wildcard_match: whether the bucket_key should be interpreted as a
Unix wildcard pattern
:type wildcard_match: bool
:param s3_conn_id: a reference to the s3 connection
:type s3_conn_id: str
"""
template_fields = ('bucket_key', 'bucket_name')
@apply_defaults
def __init__(
self, bucket_key,
bucket_name=None,
wildcard_match=False,
s3_conn_id='s3_default',
*args, **kwargs):
super(S3KeySensor, self).__init__(*args, **kwargs)
session = settings.Session()
db = session.query(DB).filter(DB.conn_id == s3_conn_id).first()
if not db:
raise AirflowException("conn_id doesn't exist in the repository")
# Parse
if bucket_name is None:
parsed_url = urlparse(bucket_key)
if parsed_url.netloc == '':
raise AirflowException('Please provide a bucket_name')
else:
bucket_name = parsed_url.netloc
if parsed_url.path[0] == '/':
bucket_key = parsed_url.path[1:]
else:
bucket_key = parsed_url.path
self.bucket_name = bucket_name
self.bucket_key = bucket_key
self.wildcard_match = wildcard_match
self.s3_conn_id = s3_conn_id
session.commit()
session.close()
def poke(self, context):
import airflow.hooks.S3_hook
hook = airflow.hooks.S3_hook.S3Hook(s3_conn_id=self.s3_conn_id)
full_url = "s3://" + self.bucket_name + "/" + self.bucket_key
logging.info('Poking for key : {full_url}'.format(**locals()))
if self.wildcard_match:
return hook.check_for_wildcard_key(self.bucket_key,
self.bucket_name)
else:
return hook.check_for_key(self.bucket_key, self.bucket_name)
class S3PrefixSensor(BaseSensorOperator):
"""
Waits for a prefix to exist. A prefix is the first part of a key,
thus enabling checking of constructs similar to glob airfl* or
SQL LIKE 'airfl%'. There is the possibility to precise a delimiter to
indicate the hierarchy or keys, meaning that the match will stop at that
delimiter. Current code accepts sane delimiters, i.e. characters that
are NOT special characters in the Python regex engine.
:param bucket_name: Name of the S3 bucket
:type bucket_name: str
:param prefix: The prefix being waited on. Relative path from bucket root level.
:type prefix: str
:param delimiter: The delimiter intended to show hierarchy.
Defaults to '/'.
:type delimiter: str
"""
template_fields = ('prefix', 'bucket_name')
@apply_defaults
def __init__(
self, bucket_name,
prefix, delimiter='/',
s3_conn_id='s3_default',
*args, **kwargs):
super(S3PrefixSensor, self).__init__(*args, **kwargs)
session = settings.Session()
db = session.query(DB).filter(DB.conn_id == s3_conn_id).first()
if not db:
raise AirflowException("conn_id doesn't exist in the repository")
# Parse
self.bucket_name = bucket_name
self.prefix = prefix
self.delimiter = delimiter
self.full_url = "s3://" + bucket_name + '/' + prefix
self.s3_conn_id = s3_conn_id
session.commit()
session.close()
def poke(self, context):
logging.info('Poking for prefix : {self.prefix}\n'
'in bucket s3://{self.bucket_name}'.format(**locals()))
import airflow.hooks.S3_hook
hook = airflow.hooks.S3_hook.S3Hook(s3_conn_id=self.s3_conn_id)
return hook.check_for_prefix(
prefix=self.prefix,
delimiter=self.delimiter,
bucket_name=self.bucket_name)
class TimeSensor(BaseSensorOperator):
"""
Waits until the specified time of the day.
:param target_time: time after which the job succeeds
:type target_time: datetime.time
"""
template_fields = tuple()
@apply_defaults
def __init__(self, target_time, *args, **kwargs):
super(TimeSensor, self).__init__(*args, **kwargs)
self.target_time = target_time
def poke(self, context):
logging.info(
'Checking if the time ({0}) has come'.format(self.target_time))
return datetime.now().time() > self.target_time
class TimeDeltaSensor(BaseSensorOperator):
"""
Waits for a timedelta after the task's execution_date + schedule_interval.
In Airflow, the daily task stamped with ``execution_date``
2016-01-01 can only start running on 2016-01-02. The timedelta here
represents the time after the execution period has closed.
:param delta: time length to wait after execution_date before succeeding
:type delta: datetime.timedelta
"""
template_fields = tuple()
@apply_defaults
def __init__(self, delta, *args, **kwargs):
super(TimeDeltaSensor, self).__init__(*args, **kwargs)
self.delta = delta
def poke(self, context):
dag = context['dag']
target_dttm = dag.following_schedule(context['execution_date'])
target_dttm += self.delta
logging.info('Checking if the time ({0}) has come'.format(target_dttm))
return datetime.now() > target_dttm
class HttpSensor(BaseSensorOperator):
"""
Executes a HTTP get statement and returns False on failure:
404 not found or response_check function returned False
:param http_conn_id: The connection to run the sensor against
:type http_conn_id: string
:param endpoint: The relative part of the full url
:type endpoint: string
:param params: The parameters to be added to the GET url
:type params: a dictionary of string key/value pairs
:param headers: The HTTP headers to be added to the GET request
:type headers: a dictionary of string key/value pairs
:param response_check: A check against the 'requests' response object.
Returns True for 'pass' and False otherwise.
:type response_check: A lambda or defined function.
:param extra_options: Extra options for the 'requests' library, see the
'requests' documentation (options to modify timeout, ssl, etc.)
:type extra_options: A dictionary of options, where key is string and value
depends on the option that's being modified.
"""
template_fields = ('endpoint',)
@apply_defaults
def __init__(self,
endpoint,
http_conn_id='http_default',
params=None,
headers=None,
response_check=None,
extra_options=None, *args, **kwargs):
super(HttpSensor, self).__init__(*args, **kwargs)
self.endpoint = endpoint
self.http_conn_id = http_conn_id
self.params = params or {}
self.headers = headers or {}
self.extra_options = extra_options or {}
self.response_check = response_check
self.hook = hooks.http_hook.HttpHook(method='GET', http_conn_id=http_conn_id)
def poke(self, context):
logging.info('Poking: ' + self.endpoint)
try:
response = self.hook.run(self.endpoint,
data=self.params,
headers=self.headers,
extra_options=self.extra_options)
if self.response_check:
# run content check on response
return self.response_check(response)
except AirflowException as ae:
if str(ae).startswith("404"):
return False
raise ae
return True
| d-lee/airflow | airflow/operators/sensors.py | Python | apache-2.0 | 22,890 |
/*
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package multiwriter provides an io.Writer that duplicates its writes to multiple writers concurrently.
package multiwriter
import (
"io"
)
// multiWriter duplicates writes to multiple writers.
type multiWriter []io.Writer
// New returns an io.Writer that duplicates writes to all provided writers.
func New(w ...io.Writer) io.Writer {
return multiWriter(w)
}
// Write writes p to all writers concurrently. If any errors occur, the shortest write is returned.
func (mw multiWriter) Write(p []byte) (int, error) {
done := make(chan result, len(mw))
for _, w := range mw {
go send(w, p, done)
}
endResult := result{n: len(p)}
for _ = range mw {
res := <-done
if res.err != nil && (endResult.err == nil || res.n < endResult.n) {
endResult = res
}
}
return endResult.n, endResult.err
}
func send(w io.Writer, p []byte, done chan<- result) {
var res result
res.n, res.err = w.Write(p)
if res.n < len(p) && res.err == nil {
res.err = io.ErrShortWrite
}
done <- res
}
type result struct {
n int
err error
}
| zombiezen/cardcpx | multiwriter/multiwriter.go | GO | apache-2.0 | 1,627 |
package org.aksw.servicecat.web.api;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import org.aksw.servicecat.core.ServiceAnalyzerProcessor;
import org.springframework.beans.factory.annotation.Autowired;
@org.springframework.stereotype.Service
@Path("/services")
public class ServletServiceApi {
@Autowired
private ServiceAnalyzerProcessor processor;
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/put")
public String registerService(@QueryParam("url") String serviceUrl)
{
processor.process(serviceUrl);
String result = "{}";
return result;
}
}
| GeoKnow/SparqlServiceCatalogue | servicecat-webapp/src/main/java/org/aksw/servicecat/web/api/ServletServiceApi.java | Java | apache-2.0 | 728 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.macie2.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.macie2.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* AccountDetailMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class AccountDetailMarshaller {
private static final MarshallingInfo<String> ACCOUNTID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("accountId").build();
private static final MarshallingInfo<String> EMAIL_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("email").build();
private static final AccountDetailMarshaller instance = new AccountDetailMarshaller();
public static AccountDetailMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(AccountDetail accountDetail, ProtocolMarshaller protocolMarshaller) {
if (accountDetail == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(accountDetail.getAccountId(), ACCOUNTID_BINDING);
protocolMarshaller.marshall(accountDetail.getEmail(), EMAIL_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| aws/aws-sdk-java | aws-java-sdk-macie2/src/main/java/com/amazonaws/services/macie2/model/transform/AccountDetailMarshaller.java | Java | apache-2.0 | 2,226 |
// Copyright 2007-2008 The Apache Software Foundation.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
namespace MassTransit
{
using System;
/// <summary>
/// Specifies the elapsed time before a message expires. When a message expires, the content is no longer
/// // important and it can be automatically discarded by the message service.
/// </summary>
[AttributeUsage(AttributeTargets.Class)]
public class ExpiresInAttribute : Attribute
{
private readonly TimeSpan _timeToLive;
/// <summary>
/// Specifies the elapsed time before the message expires.
/// </summary>
/// <param name="timeToLive">The duration of the time period.</param>
public ExpiresInAttribute(string timeToLive)
{
TimeSpan value;
if (!TimeSpan.TryParse(timeToLive, out value))
throw new ArgumentException("Unable to convert string to TimeSpan", "timeToLive");
_timeToLive = value;
}
/// <summary>
/// Returns the TimeSpan for the message expiration
/// </summary>
public TimeSpan TimeToLive
{
get { return _timeToLive; }
}
}
} | rodolfograve/MassTransit | src/MassTransit/ExpiresInAttribute.cs | C# | apache-2.0 | 1,778 |
# aws included via metadata.rb
# if node[:ebs_volumes]
# node[:ebs_volumes].each do |name, conf|
# aws_ebs_volume "attach hdfs volume #{conf.inspect}" do
# provider "aws_ebs_volume"
# aws_access_key node[:aws][:aws_access_key]
# aws_secret_access_key node[:aws][:aws_secret_access_key]
# aws_region node[:aws][:aws_region]
# availability_zone node[:aws][:availability_zone]
# volume_id conf[:volume_id]
# device conf[:device]
# action :attach
# end
# end
# end
| infochimps-away/cluster_chef | site-cookbooks/ebs/recipes/attach_volumes.rb | Ruby | apache-2.0 | 571 |
package org.dominokit.domino.api.client;
@FunctionalInterface
public
interface ApplicationStartHandler {
void onApplicationStarted();
}
| GwtDomino/domino | domino-api-client/src/main/java/org/dominokit/domino/api/client/ApplicationStartHandler.java | Java | apache-2.0 | 141 |
Arkivo::Engine.routes.draw do
namespace :api, defaults: { format: :json } do
resources :items, except: :index
end
end
| psu-stewardship/hydra-arkivo | config/routes.rb | Ruby | apache-2.0 | 126 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.arquillian.warp.utils;
import java.io.UnsupportedEncodingException;
import org.apache.commons.codec.DecoderException;
/**
* Abstract superclass for Base-N encoders and decoders.
*
* <p>
* This class is not thread-safe. Each thread should use its own instance.
* </p>
*/
public abstract class BaseNCodec {
/**
* MIME chunk size per RFC 2045 section 6.8.
*
* <p>
* The {@value} character limit does not count the trailing CRLF, but counts all other characters, including any equal
* signs.
* </p>
*
* @see <a href="http://www.ietf.org/rfc/rfc2045.txt">RFC 2045 section 6.8</a>
*/
public static final int MIME_CHUNK_SIZE = 76;
/**
* PEM chunk size per RFC 1421 section 4.3.2.4.
*
* <p>
* The {@value} character limit does not count the trailing CRLF, but counts all other characters, including any equal
* signs.
* </p>
*
* @see <a href="http://tools.ietf.org/html/rfc1421">RFC 1421 section 4.3.2.4</a>
*/
public static final int PEM_CHUNK_SIZE = 64;
private static final int DEFAULT_BUFFER_RESIZE_FACTOR = 2;
/**
* Defines the default buffer size - currently {@value} - must be large enough for at least one encoded block+separator
*/
private static final int DEFAULT_BUFFER_SIZE = 8192;
/** Mask used to extract 8 bits, used in decoding bytes */
protected static final int MASK_8BITS = 0xff;
/**
* Byte used to pad output.
*/
protected static final byte PAD_DEFAULT = '='; // Allow static access to default
protected final byte PAD = PAD_DEFAULT; // instance variable just in case it needs to vary later
/** Number of bytes in each full block of unencoded data, e.g. 4 for Base64 and 5 for Base32 */
private final int unencodedBlockSize;
/** Number of bytes in each full block of encoded data, e.g. 3 for Base64 and 8 for Base32 */
private final int encodedBlockSize;
/**
* Chunksize for encoding. Not used when decoding. A value of zero or less implies no chunking of the encoded data. Rounded
* down to nearest multiple of encodedBlockSize.
*/
protected final int lineLength;
/**
* Size of chunk separator. Not used unless {@link #lineLength} > 0.
*/
private final int chunkSeparatorLength;
/**
* Buffer for streaming.
*/
protected byte[] buffer;
/**
* Position where next character should be written in the buffer.
*/
protected int pos;
/**
* Position where next character should be read from the buffer.
*/
private int readPos;
/**
* Boolean flag to indicate the EOF has been reached. Once EOF has been reached, this object becomes useless, and must be
* thrown away.
*/
protected boolean eof;
/**
* Variable tracks how many characters have been written to the current line. Only used when encoding. We use it to make
* sure each encoded line never goes beyond lineLength (if lineLength > 0).
*/
protected int currentLinePos;
/**
* Writes to the buffer only occur after every 3/5 reads when encoding, and every 4/8 reads when decoding. This variable
* helps track that.
*/
protected int modulus;
/**
* Note <code>lineLength</code> is rounded down to the nearest multiple of {@link #encodedBlockSize} If
* <code>chunkSeparatorLength</code> is zero, then chunking is disabled.
*
* @param unencodedBlockSize the size of an unencoded block (e.g. Base64 = 3)
* @param encodedBlockSize the size of an encoded block (e.g. Base64 = 4)
* @param lineLength if > 0, use chunking with a length <code>lineLength</code>
* @param chunkSeparatorLength the chunk separator length, if relevant
*/
protected BaseNCodec(int unencodedBlockSize, int encodedBlockSize, int lineLength, int chunkSeparatorLength) {
this.unencodedBlockSize = unencodedBlockSize;
this.encodedBlockSize = encodedBlockSize;
this.lineLength = (lineLength > 0 && chunkSeparatorLength > 0) ? (lineLength / encodedBlockSize) * encodedBlockSize : 0;
this.chunkSeparatorLength = chunkSeparatorLength;
}
/**
* Returns true if this object has buffered data for reading.
*
* @return true if there is data still available for reading.
*/
boolean hasData() { // package protected for access from I/O streams
return this.buffer != null;
}
/**
* Returns the amount of buffered data available for reading.
*
* @return The amount of buffered data available for reading.
*/
int available() { // package protected for access from I/O streams
return buffer != null ? pos - readPos : 0;
}
/**
* Get the default buffer size. Can be overridden.
*
* @return {@link #DEFAULT_BUFFER_SIZE}
*/
protected int getDefaultBufferSize() {
return DEFAULT_BUFFER_SIZE;
}
/** Increases our buffer by the {@link #DEFAULT_BUFFER_RESIZE_FACTOR}. */
private void resizeBuffer() {
if (buffer == null) {
buffer = new byte[getDefaultBufferSize()];
pos = 0;
readPos = 0;
} else {
byte[] b = new byte[buffer.length * DEFAULT_BUFFER_RESIZE_FACTOR];
System.arraycopy(buffer, 0, b, 0, buffer.length);
buffer = b;
}
}
/**
* Ensure that the buffer has room for <code>size</code> bytes
*
* @param size minimum spare space required
*/
protected void ensureBufferSize(int size) {
if ((buffer == null) || (buffer.length < pos + size)) {
resizeBuffer();
}
}
/**
* Extracts buffered data into the provided byte[] array, starting at position bPos, up to a maximum of bAvail bytes.
* Returns how many bytes were actually extracted.
*
* @param b byte[] array to extract the buffered data into.
* @param bPos position in byte[] array to start extraction at.
* @param bAvail amount of bytes we're allowed to extract. We may extract fewer (if fewer are available).
* @return The number of bytes successfully extracted into the provided byte[] array.
*/
int readResults(byte[] b, int bPos, int bAvail) { // package protected for access from I/O streams
if (buffer != null) {
int len = Math.min(available(), bAvail);
System.arraycopy(buffer, readPos, b, bPos, len);
readPos += len;
if (readPos >= pos) {
buffer = null; // so hasData() will return false, and this method can return -1
}
return len;
}
return eof ? -1 : 0;
}
/**
* Checks if a byte value is whitespace or not. Whitespace is taken to mean: space, tab, CR, LF
*
* @param byteToCheck the byte to check
* @return true if byte is whitespace, false otherwise
*/
protected static boolean isWhiteSpace(byte byteToCheck) {
switch (byteToCheck) {
case ' ':
case '\n':
case '\r':
case '\t':
return true;
default:
return false;
}
}
/**
* Resets this object to its initial newly constructed state.
*/
private void reset() {
buffer = null;
pos = 0;
readPos = 0;
currentLinePos = 0;
modulus = 0;
eof = false;
}
/**
* Encodes an Object using the Base-N algorithm. This method is provided in order to satisfy the requirements of the Encoder
* interface, and will throw an IllegalStateException if the supplied object is not of type byte[].
*
* @param pObject Object to encode
* @return An object (of type byte[]) containing the Base-N encoded data which corresponds to the byte[] supplied.
* @throws IllegalStateException if the parameter supplied is not of type byte[]
*/
public Object encode(Object pObject) {
if (!(pObject instanceof byte[])) {
throw new IllegalStateException("Parameter supplied to Base-N encode is not a byte[]");
}
return encode((byte[]) pObject);
}
/**
* Encodes a byte[] containing binary data, into a String containing characters in the Base-N alphabet.
*
* @param pArray a byte array containing binary data
* @return A String containing only Base-N character data
*/
public String encodeToString(byte[] pArray) {
return newStringUtf8(encode(pArray));
}
/**
* Decodes an Object using the Base-N algorithm. This method is provided in order to satisfy the requirements of the Decoder
* interface, and will throw a DecoderException if the supplied object is not of type byte[] or String.
*
* @param pObject Object to decode
* @return An object (of type byte[]) containing the binary data which corresponds to the byte[] or String supplied.
* @throws DecoderException if the parameter supplied is not of type byte[]
*/
public Object decode(Object pObject) throws IllegalStateException {
if (pObject instanceof byte[]) {
return decode((byte[]) pObject);
} else if (pObject instanceof String) {
return decode((String) pObject);
} else {
throw new IllegalStateException("Parameter supplied to Base-N decode is not a byte[] or a String");
}
}
/**
* Decodes a String containing characters in the Base-N alphabet.
*
* @param pArray A String containing Base-N character data
* @return a byte array containing binary data
*/
public byte[] decode(String pArray) {
return decode(getBytesUtf8(pArray));
}
/**
* Decodes a byte[] containing characters in the Base-N alphabet.
*
* @param pArray A byte array containing Base-N character data
* @return a byte array containing binary data
*/
public byte[] decode(byte[] pArray) {
reset();
if (pArray == null || pArray.length == 0) {
return pArray;
}
decode(pArray, 0, pArray.length);
decode(pArray, 0, -1); // Notify decoder of EOF.
byte[] result = new byte[pos];
readResults(result, 0, result.length);
return result;
}
/**
* Encodes a byte[] containing binary data, into a byte[] containing characters in the alphabet.
*
* @param pArray a byte array containing binary data
* @return A byte array containing only the basen alphabetic character data
*/
public byte[] encode(byte[] pArray) {
reset();
if (pArray == null || pArray.length == 0) {
return pArray;
}
encode(pArray, 0, pArray.length);
encode(pArray, 0, -1); // Notify encoder of EOF.
byte[] buf = new byte[pos - readPos];
readResults(buf, 0, buf.length);
return buf;
}
/**
* Encodes a byte[] containing binary data, into a String containing characters in the appropriate alphabet. Uses UTF8
* encoding.
*
* @param pArray a byte array containing binary data
* @return String containing only character data in the appropriate alphabet.
*/
public String encodeAsString(byte[] pArray) {
return newStringUtf8(encode(pArray));
}
abstract void encode(byte[] pArray, int i, int length); // package protected for access from I/O streams
abstract void decode(byte[] pArray, int i, int length); // package protected for access from I/O streams
/**
* Returns whether or not the <code>octet</code> is in the current alphabet. Does not allow whitespace or pad.
*
* @param value The value to test
*
* @return <code>true</code> if the value is defined in the current alphabet, <code>false</code> otherwise.
*/
protected abstract boolean isInAlphabet(byte value);
/**
* Tests a given byte array to see if it contains only valid characters within the alphabet. The method optionally treats
* whitespace and pad as valid.
*
* @param arrayOctet byte array to test
* @param allowWSPad if <code>true</code>, then whitespace and PAD are also allowed
*
* @return <code>true</code> if all bytes are valid characters in the alphabet or if the byte array is empty;
* <code>false</code>, otherwise
*/
public boolean isInAlphabet(byte[] arrayOctet, boolean allowWSPad) {
for (int i = 0; i < arrayOctet.length; i++) {
if (!isInAlphabet(arrayOctet[i]) && (!allowWSPad || (arrayOctet[i] != PAD) && !isWhiteSpace(arrayOctet[i]))) {
return false;
}
}
return true;
}
/**
* Tests a given String to see if it contains only valid characters within the alphabet. The method treats whitespace and
* PAD as valid.
*
* @param basen String to test
* @return <code>true</code> if all characters in the String are valid characters in the alphabet or if the String is empty;
* <code>false</code>, otherwise
* @see #isInAlphabet(byte[], boolean)
*/
public boolean isInAlphabet(String basen) {
return isInAlphabet(getBytesUtf8(basen), true);
}
/**
* Tests a given byte array to see if it contains any characters within the alphabet or PAD.
*
* Intended for use in checking line-ending arrays
*
* @param arrayOctet byte array to test
* @return <code>true</code> if any byte is a valid character in the alphabet or PAD; <code>false</code> otherwise
*/
protected boolean containsAlphabetOrPad(byte[] arrayOctet) {
if (arrayOctet == null) {
return false;
}
for (byte element : arrayOctet) {
if (PAD == element || isInAlphabet(element)) {
return true;
}
}
return false;
}
/**
* Calculates the amount of space needed to encode the supplied array.
*
* @param pArray byte[] array which will later be encoded
*
* @return amount of space needed to encoded the supplied array. Returns a long since a max-len array will require >
* Integer.MAX_VALUE
*/
public long getEncodedLength(byte[] pArray) {
// Calculate non-chunked size - rounded up to allow for padding
// cast to long is needed to avoid possibility of overflow
long len = ((pArray.length + unencodedBlockSize - 1) / unencodedBlockSize) * (long) encodedBlockSize;
if (lineLength > 0) { // We're using chunking
// Round up to nearest multiple
len += ((len + lineLength - 1) / lineLength) * chunkSeparatorLength;
}
return len;
}
/**
* Constructs a new <code>String</code> by decoding the specified array of bytes using the UTF-8 charset.
*
* @param bytes The bytes to be decoded into characters
* @return A new <code>String</code> decoded from the specified array of bytes using the UTF-8 charset, or <code>null</code>
* if the input byte array was <code>null</code>.
* @throws IllegalStateException Thrown when a {@link UnsupportedEncodingException} is caught, which should never happen
* since the charset is required.
*/
public static String newStringUtf8(byte[] bytes) {
if (bytes == null) {
return null;
}
try {
return new String(bytes, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException("UTF-8", e);
}
}
/**
* Encodes the given string into a sequence of bytes using the UTF-8 charset, storing the result into a new byte array.
*
* @param string the String to encode, may be <code>null</code>
* @return encoded bytes, or <code>null</code> if the input string was <code>null</code>
* @throws IllegalStateException Thrown when the charset is missing, which should be never according the the Java
* specification.
* @see <a href="http://download.oracle.com/javase/1.5.0/docs/api/java/nio/charset/Charset.html">Standard charsets</a>
* @see #getBytesUnchecked(String, String)
*/
public static byte[] getBytesUtf8(String string) {
if (string == null) {
return null;
}
try {
return string.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException("UTF-8", e);
}
}
}
| aslakknutsen/arquillian-extension-warp | impl/src/main/java/org/jboss/arquillian/warp/utils/BaseNCodec.java | Java | apache-2.0 | 17,879 |
public class Customers
{
public static string[] allCustomers = {"Peter Parker", "Klark Kent", "Bruce Vayne"};
} | jasarsoft/ipcs-primjeri | IPCS_02/Vjezba 2/company/customers/Customers.cs | C# | apache-2.0 | 112 |
package dk.lessismore.nojpa.reflection.db.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Created : with IntelliJ IDEA.
* User: seb
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SearchField {
public static final String NULL = "";
public boolean translate() default false;
public boolean searchReverse() default false;
public float boostFactor() default 3f;
public float reverseBoostFactor() default 0.3f;
public String dynamicSolrPostName() default NULL;
}
| NoJPA-LESS-IS-MORE/NoJPA | nojpa_orm/src/main/java/dk/lessismore/nojpa/reflection/db/annotations/SearchField.java | Java | apache-2.0 | 664 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.hoang.fu;
/**
*
* @author hoangpt
*/
public class Teacher extends Employee implements ITeacher {
Teacher(String name) {
this.name = name;
}
@Override
float calculateSalary(){
return 500f;
}
@Override
public int calculateBonus() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public float calculateAllowance() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
| hoangphantich/fu.java192x17.1 | hoangpt/Assignment_21/src/com/hoang/fu/Teacher.java | Java | apache-2.0 | 786 |
// Copyright (c) 1999-2004 Brian Wellington (bwelling@xbill.org)
package org.xbill.DNS;
import java.io.*;
import java.util.*;
import org.xbill.DNS.utils.*;
/**
* Transaction Signature - this record is automatically generated by the
* resolver. TSIG records provide transaction security between the
* sender and receiver of a message, using a shared key.
* @see org.xbill.DNS.Resolver
* @see org.xbill.DNS.TSIG
*
* @author Brian Wellington
*/
public class TSIGRecord extends Record {
private static final long serialVersionUID = -88820909016649306L;
private Name alg;
private Date timeSigned;
private int fudge;
private byte [] signature;
private int originalID;
private int error;
private byte [] other;
TSIGRecord() {}
Record
getObject() {
return new TSIGRecord();
}
/**
* Creates a TSIG Record from the given data. This is normally called by
* the TSIG class
* @param alg The shared key's algorithm
* @param timeSigned The time that this record was generated
* @param fudge The fudge factor for time - if the time that the message is
* received is not in the range [now - fudge, now + fudge], the signature
* fails
* @param signature The signature
* @param originalID The message ID at the time of its generation
* @param error The extended error field. Should be 0 in queries.
* @param other The other data field. Currently used only in BADTIME
* responses.
* @see org.xbill.DNS.TSIG
*/
public
TSIGRecord(Name name, int dclass, long ttl, Name alg, Date timeSigned,
int fudge, byte [] signature, int originalID, int error,
byte other[])
{
super(name, Type.TSIG, dclass, ttl);
this.alg = checkName("alg", alg);
this.timeSigned = timeSigned;
this.fudge = checkU16("fudge", fudge);
this.signature = signature;
this.originalID = checkU16("originalID", originalID);
this.error = checkU16("error", error);
this.other = other;
}
void
rrFromWire(DNSInput in) throws IOException {
alg = new Name(in);
long timeHigh = in.readU16();
long timeLow = in.readU32();
long time = (timeHigh << 32) + timeLow;
timeSigned = new Date(time * 1000);
fudge = in.readU16();
int sigLen = in.readU16();
signature = in.readByteArray(sigLen);
originalID = in.readU16();
error = in.readU16();
int otherLen = in.readU16();
if (otherLen > 0)
other = in.readByteArray(otherLen);
else
other = null;
}
void
rdataFromString(Tokenizer st, Name origin) throws IOException {
throw st.exception("no text format defined for TSIG");
}
/** Converts rdata to a String */
String
rrToString() {
StringBuffer sb = new StringBuffer();
sb.append(alg);
sb.append(" ");
if (Options.check("multiline"))
sb.append("(\n\t");
sb.append (timeSigned.getTime() / 1000);
sb.append (" ");
sb.append (fudge);
sb.append (" ");
sb.append (signature.length);
if (Options.check("multiline")) {
sb.append ("\n");
sb.append (base64.formatString(signature, 64, "\t", false));
} else {
sb.append (" ");
sb.append (base64.toString(signature));
}
sb.append (" ");
sb.append (Rcode.TSIGstring(error));
sb.append (" ");
if (other == null)
sb.append (0);
else {
sb.append (other.length);
if (Options.check("multiline"))
sb.append("\n\n\n\t");
else
sb.append(" ");
if (error == Rcode.BADTIME) {
if (other.length != 6) {
sb.append("<invalid BADTIME other data>");
} else {
long time = ((long)(other[0] & 0xFF) << 40) +
((long)(other[1] & 0xFF) << 32) +
((other[2] & 0xFF) << 24) +
((other[3] & 0xFF) << 16) +
((other[4] & 0xFF) << 8) +
((other[5] & 0xFF) );
sb.append("<server time: ");
sb.append(new Date(time * 1000));
sb.append(">");
}
} else {
sb.append("<");
sb.append(base64.toString(other));
sb.append(">");
}
}
if (Options.check("multiline"))
sb.append(" )");
return sb.toString();
}
/** Returns the shared key's algorithm */
public Name
getAlgorithm() {
return alg;
}
/** Returns the time that this record was generated */
public Date
getTimeSigned() {
return timeSigned;
}
/** Returns the time fudge factor */
public int
getFudge() {
return fudge;
}
/** Returns the signature */
public byte []
getSignature() {
return signature;
}
/** Returns the original message ID */
public int
getOriginalID() {
return originalID;
}
/** Returns the extended error */
public int
getError() {
return error;
}
/** Returns the other data */
public byte []
getOther() {
return other;
}
void
rrToWire(DNSOutput out, Compression c, boolean canonical) {
alg.toWire(out, null, canonical);
long time = timeSigned.getTime() / 1000;
int timeHigh = (int) (time >> 32);
long timeLow = (time & 0xFFFFFFFFL);
out.writeU16(timeHigh);
out.writeU32(timeLow);
out.writeU16(fudge);
out.writeU16(signature.length);
out.writeByteArray(signature);
out.writeU16(originalID);
out.writeU16(error);
if (other != null) {
out.writeU16(other.length);
out.writeByteArray(other);
}
else
out.writeU16(0);
}
}
| msdx/AndroidPNClient | androidpn/src/main/java/org/xbill/DNS/TSIGRecord.java | Java | apache-2.0 | 4,956 |
package app.monitor.job;
import core.framework.internal.log.LogManager;
import core.framework.json.JSON;
import core.framework.kafka.MessagePublisher;
import core.framework.log.message.StatMessage;
import core.framework.scheduler.Job;
import core.framework.scheduler.JobContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Map;
/**
* @author neo
*/
public class KubeMonitorJob implements Job {
public final MessagePublisher<StatMessage> publisher;
public final KubeClient kubeClient;
public final List<String> namespaces;
private final Logger logger = LoggerFactory.getLogger(KubeMonitorJob.class);
public KubeMonitorJob(List<String> namespaces, KubeClient kubeClient, MessagePublisher<StatMessage> publisher) {
this.publisher = publisher;
this.kubeClient = kubeClient;
this.namespaces = namespaces;
}
@Override
public void execute(JobContext context) {
try {
var now = ZonedDateTime.now();
for (String namespace : namespaces) {
KubePodList pods = kubeClient.listPods(namespace);
for (KubePodList.Pod pod : pods.items) {
String errorMessage = check(pod, now);
if (errorMessage != null) {
publishPodFailure(pod, errorMessage);
}
}
}
} catch (Throwable e) {
logger.error(e.getMessage(), e);
publisher.publish(StatMessageFactory.failedToCollect(LogManager.APP_NAME, null, e));
}
}
String check(KubePodList.Pod pod, ZonedDateTime now) {
if (pod.metadata.deletionTimestamp != null) {
Duration elapsed = Duration.between(pod.metadata.deletionTimestamp, now);
if (elapsed.toSeconds() >= 300) {
return "pod is still in deletion, elapsed=" + elapsed;
}
return null;
}
String phase = pod.status.phase;
if ("Succeeded".equals(phase)) return null; // terminated
if ("Failed".equals(phase) || "Unknown".equals(phase)) return "unexpected pod phase, phase=" + phase;
if ("Pending".equals(phase)) {
// newly created pod may not have container status yet, containerStatuses is initialized as empty
for (KubePodList.ContainerStatus status : pod.status.containerStatuses) {
if (status.state.waiting != null && "ImagePullBackOff".equals(status.state.waiting.reason)) {
return "ImagePullBackOff: " + status.state.waiting.message;
}
}
// for unschedulable pod
for (KubePodList.PodCondition condition : pod.status.conditions) {
if ("PodScheduled".equals(condition.type) && "False".equals(condition.status) && Duration.between(condition.lastTransitionTime, now).toSeconds() >= 300) {
return condition.reason + ": " + condition.message;
}
}
}
if ("Running".equals(phase)) {
boolean ready = true;
for (KubePodList.ContainerStatus status : pod.status.containerStatuses) {
if (status.state.waiting != null && "CrashLoopBackOff".equals(status.state.waiting.reason)) {
return "CrashLoopBackOff: " + status.state.waiting.message;
}
boolean containerReady = Boolean.TRUE.equals(status.ready);
if (!containerReady && status.lastState != null && status.lastState.terminated != null) {
var terminated = status.lastState.terminated;
return "pod was terminated, reason=" + terminated.reason + ", exitCode=" + terminated.exitCode;
}
if (!containerReady) {
ready = false;
}
}
if (ready) return null; // all running, all ready
}
ZonedDateTime startTime = pod.status.startTime != null ? pod.status.startTime : pod.metadata.creationTimestamp; // startTime may not be populated yet if pod is just created
Duration elapsed = Duration.between(startTime, now);
if (elapsed.toSeconds() >= 300) {
// can be: 1) took long to be ready after start, or 2) readiness check failed in the middle run
return "pod is not in ready state, uptime=" + elapsed;
}
return null;
}
private void publishPodFailure(KubePodList.Pod pod, String errorMessage) {
var now = Instant.now();
var message = new StatMessage();
message.id = LogManager.ID_GENERATOR.next(now);
message.date = now;
message.result = "ERROR";
message.app = pod.metadata.labels.getOrDefault("app", pod.metadata.name);
message.host = pod.metadata.name;
message.errorCode = "POD_FAILURE";
message.errorMessage = errorMessage;
message.info = Map.of("pod", JSON.toJSON(pod));
publisher.publish(message);
}
}
| neowu/core-ng-project | ext/monitor/src/main/java/app/monitor/job/KubeMonitorJob.java | Java | apache-2.0 | 5,150 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {AfterViewInit, ElementRef, EventEmitter, Injector, OnDestroy, OnInit, Output} from '@angular/core';
import {AbstractComponent} from '@common/component/abstract.component';
import {Field, Rule} from '@domain/data-preparation/pr-dataset';
export abstract class EditRuleComponent extends AbstractComponent implements OnInit, AfterViewInit, OnDestroy {
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Protected Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
public isShow: boolean = false;
public mode: string = 'APPEND';
public ruleVO: Rule;
public colDescs: any;
public fields: Field[];
public selectedFields: Field[] = [];
public forceFormula: string = '';
public forceCondition: string = '';
@Output()
public onEvent: EventEmitter<any> = new EventEmitter();
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Constructor
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
// 생성자
protected constructor(
protected elementRef: ElementRef,
protected injector: Injector) {
super(elementRef, injector);
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Override Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* 컴포넌트 초기 실행
*/
public ngOnInit() {
super.ngOnInit();
} // function - ngOnInit
/**
* 화면 초기화
*/
public ngAfterViewInit() {
super.ngAfterViewInit();
} // function - ngAfterViewInit
/**
* 컴포넌트 제거
*/
public ngOnDestroy() {
super.ngOnDestroy();
} // function - ngOnDestroy
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Method - API
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
public init(fields: Field[], selectedFields: Field[], data?: { ruleString?: string, jsonRuleString: any }) {
this.fields = fields;
this.selectedFields = selectedFields;
if (!this.isNullOrUndefined(data)) {
this.parsingRuleString(data);
}
this.beforeShowComp();
this.isShow = true;
this.safelyDetectChanges();
this.afterShowComp();
this.safelyDetectChanges();
} // function - init
public setValue(key: string, value: any) {
Object.keys(this).some(item => {
if (key === item && 'function' !== typeof this[key]) {
this[key] = value;
return true;
} else {
return false;
}
});
this.safelyDetectChanges();
} // function - setValue
/**
* Apply formula using Advanced formula popup
* @param {{command: string, formula: string}} data
*/
public doneInputFormula(data: { command: string, formula: string }) {
if (data.command === 'setCondition') {
this.setValue('forceCondition', data.formula);
} else {
this.setValue('forceFormula', data.formula);
}
}
/**
* Returns value of variable name equals the key
* @param {string} key
* @returns {string}
*/
public getValue(key: string): string {
let returnValue: string = undefined;
if (!this.isNullOrUndefined(this[key])) {
returnValue = this[key];
}
this.safelyDetectChanges();
return returnValue;
} // function - setValue
/**
* Rule 형식 정의 및 반환
*/
public abstract getRuleData();
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Protected Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* 컴포넌트 표시 전 실행
* @protected
*/
protected abstract beforeShowComp();
/**
* 컴포넌트 표시 후 실행
* @protected
*/
protected abstract afterShowComp();
/**
* rule string 을 분석한다.
* @param ruleString
* @protected
*/
protected abstract parsingRuleString(ruleString: any);
protected getColumnNamesInArray(fields: Field[], isWrap: boolean = false): string[] {
return fields.map((item) => {
if (isWrap) {
return '`' + item.name + '`'
} else {
return item.name
}
});
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
}
| metatron-app/metatron-discovery | discovery-frontend/src/app/data-preparation/dataflow/dataflow-detail/component/edit-dataflow-rule/edit-rule/edit-rule.component.ts | TypeScript | apache-2.0 | 5,051 |
package mahjong
type Hand []Pai
func remove(list []Pai, p Pai) []Pai {
var result []Pai
removed := false
for _, e := range list {
if e == p && !removed {
removed = true
} else {
result = append(result, e)
}
}
return result
}
func contain(list []Pai, p Pai) bool {
for _, a := range list {
if a == p {
return true
}
}
return false
}
func contain2(list []Pai, p Pai) bool {
count := 0
for _, a := range list {
if a == p {
count += 1
}
}
return count >= 2
}
func createCandidates(list []Pai, cand [][][]Pai) [][][]Pai {
if len(list) <= 0 {
return cand
}
current := list[0]
remain := list[1:]
nextOne := current + 1
nextTwo := current + 2
if current.IsNumber() {
if current.Suit() == nextOne.Suit() && current.Suit() == nextTwo.Suit() &&
contain(remain, nextOne) && contain(remain, nextTwo) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[idx] = append(cand[idx], []Pai{current, nextOne, nextTwo})
_remain := remove(remove(remain, nextOne), nextTwo)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
if current.Suit() == nextOne.Suit() && contain(remain, nextOne) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, nextOne})
_remain := remove(remain, nextOne)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
if current.Suit() == nextTwo.Suit() && contain(remain, nextTwo) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, nextTwo})
_remain := remove(remain, nextTwo)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
}
if contain2(remain, current) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, current, current})
_remain := remove(remove(remain, current), current)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
if contain(remain, current) {
idx := len(cand) - 1
tmp := make([][]Pai, len(cand[idx]))
copy(tmp, cand[idx])
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current, current})
_remain := remove(remain, current)
cand = createCandidates(_remain, cand)
cand = append(cand, tmp)
}
cand[len(cand)-1] = append(cand[len(cand)-1], []Pai{current})
return createCandidates(remain, cand)
}
func isUnique(list []Pai) bool {
result := []Pai{}
for _, p := range list {
if contain(result, p) {
// nothing to do
} else {
result = append(result, p)
}
}
return len(list) == len(result)
}
func isSevenPairs(list [][]Pai) bool {
if len(list) != 7 {
return false
}
stack := []Pai{}
for _, pair := range list {
if len(pair) == 2 && pair[0] != pair[1] {
return false
}
stack = append(stack, pair[0])
}
return isUnique(stack)
}
func isThirteenOrphans(list [][]Pai) bool {
if len(list) == 12 || len(list) == 13 {
for _, pair := range list {
for _, pai := range pair {
if !pai.IsOrphan() {
return false
}
}
}
return true
}
return false
}
func (hand *Hand) IsTenpai() bool {
_hand := *hand
cand := [][][]Pai{[][]Pai{}}
cand = createCandidates(_hand, cand)
for _, a := range cand {
// regular type
if len(a) == 5 {
return true
}
// seven pairs
if isSevenPairs(a) {
return true
}
if isThirteenOrphans(a) {
return true
}
}
return false
}
| tanaka51/tenpai-wakaru-man | mahjong/hand.go | GO | apache-2.0 | 3,584 |
/*
** Stack.cpp for cpp_abstractvm in /var/projects/cpp_abstractvm/Stack.cpp
**
** Made by kevin labbe
** Login <labbe_k@epitech.net>
**
** Started on Mar 1, 2014 2:15:13 AM 2014 kevin labbe
** Last update Mar 1, 2014 2:15:13 AM 2014 kevin labbe
*/
#include "EmptyStackException.hpp"
#include "AssertFailedException.hpp"
#include "WrongParameterException.hpp"
#include "Stack.hpp"
namespace Arithmetic
{
Stack::Stack()
{
_funcs["add"] = &Stack::add;
_funcs["sub"] = &Stack::sub;
_funcs["mul"] = &Stack::mul;
_funcs["div"] = &Stack::div;
_funcs["mod"] = &Stack::mod;
_funcs["pop"] = &Stack::pop;
_funcs["dump"] = &Stack::dump;
_funcs["print"] = &Stack::print;
}
Stack::~Stack()
{
}
void
Stack::execFunc(const std::string& name)
{
if (_funcs[name.c_str()])
(this->*_funcs[name.c_str()])();
}
void
Stack::push(IOperand* op)
{
_stack.push_front(op);
}
void
Stack::pop()
{
if (_stack.empty())
throw Exception::EmptyStackException("pop");
delete _stack.front();
_stack.pop_front();
}
void
Stack::assert(IOperand* op)
{
if (_stack.empty())
throw Exception::AssertFailedException("Stack empty");
if (_stack.front()->getPrecision() != op->getPrecision()
|| _stack.front()->getType() != op->getType()
|| _stack.front()->toString() != op->toString())
throw Exception::AssertFailedException("Operand different at the top of the stack");
}
void
Stack::dump()
{
for (std::deque<IOperand*>::iterator it = _stack.begin(); it != _stack.end(); it++)
std::cout << (*it)->toString() << std::endl;
}
void
Stack::print()
{
std::stringstream stream;
char chr;
int nbr;
if (_stack.empty())
throw Exception::EmptyStackException("print");
if (_stack.front()->getType() != INT8)
throw Exception::WrongParameterException("print expects an int8 at the top of the stack");
stream << _stack.front()->toString();
stream >> nbr;
chr = nbr;
std::cout << chr << std::endl;
}
void
Stack::add()
{
_loadOperands();
_pushResult(*_op1 + *_op2);
}
void
Stack::sub()
{
_loadOperands();
_pushResult(*_op1 - *_op2);
}
void
Stack::mul()
{
_loadOperands();
_pushResult(*_op1 * *_op2);
}
void
Stack::div()
{
_loadOperands();
_pushResult(*_op1 / *_op2);
}
void
Stack::mod()
{
_loadOperands();
_pushResult(*_op1 % *_op2);
}
void
Stack::_loadOperands()
{
if (_stack.size() < 2)
throw Exception::EmptyStackException("Calc");
_op1 = _stack.front();
_stack.pop_front();
_op2 = _stack.front();
_stack.pop_front();
}
void
Stack::_pushResult(IOperand* result)
{
_stack.push_front(result);
delete _op1;
delete _op2;
}
} /* namespace Arithmetic */
| Kael95/Epitech | Virtual Machine/src/Arithmetic/Stack.cpp | C++ | apache-2.0 | 2,906 |
import { Upload } from './../models/upload/upload.model';
import { SUPController } from './sup.server.controller';
const yellow = '\x1b[33m%s\x1b[0m: ';
export class SUP {
constructor(private io: SocketIOClient.Manager) { }
registerIO() {
this.io.on('connection', (socket: SocketIOClient.Socket) => {
console.log(yellow, 'Socket connected!');
socket.on('NextChunk', (data) => {
console.log(yellow, 'Receiving data.');
SUPController.nextChunk(data, socket);
});
socket.on('NextFile', (data) => {
console.log(yellow, 'Receiving next File.');
SUPController.nextFile(data, socket);
});
});
}
static handshake(data, cb): void {
SUPController.handshake(data, cb);
}
static pause(data, cb): void {
SUPController.pause(data, cb);
}
static continue(data, cb): void {
SUPController.continue(data, cb);
}
static abort(data, cb): void {
SUPController.abort(data, cb);
}
}
| Stejnar/compressor-angular2 | src/server/protocols/sup.server.protocol.ts | TypeScript | apache-2.0 | 1,052 |
package com.yueny.demo.job.controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import com.yueny.demo.common.example.bo.ModifyDemoBo;
import com.yueny.demo.common.example.service.IDataPrecipitationService;
import lombok.extern.slf4j.Slf4j;
/**
* @author yueny09 <deep_blue_yang@163.com>
*
* @DATE 2016年2月16日 下午8:23:11
*
*/
@Controller
@Slf4j
public class DemoController {
@Autowired
private IDataPrecipitationService dataPrecipitationService;
/**
*
*/
@RequestMapping(value = { "/", "welcome" }, method = RequestMethod.GET)
@ResponseBody
public List<ModifyDemoBo> bar() {
try {
return dataPrecipitationService.queryAll();
} catch (final Exception e) {
log.error("exception:", e);
}
return null;
}
@RequestMapping(value = "/healthy", method = RequestMethod.GET)
@ResponseBody
public String healthy() {
return "OK";
}
}
| yueny/pra | job/job_elastic/src/main/java/com/yueny/demo/job/controller/DemoController.java | Java | apache-2.0 | 1,201 |
/*
* Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#pragma once
#include "plugin_common.hpp"
#include "serialize.hpp"
#include "onnx2trt_common.hpp"
#include <NvInferPlugin.h>
#include <memory>
#include <vector>
namespace onnx2trt {
// A convenient base class for plugins. Provides default implementations of
// some methods.
// Adapts a plugin so that its type is automatically serialized, enabling it
// to be identified when deserializing.
class Plugin : public nvinfer1::IPluginExt, public IOwnable {
public:
virtual const char* getPluginType() const = 0;
nvinfer1::Dims const& getInputDims(int index) const { return _input_dims.at(index); }
size_t getMaxBatchSize() const { return _max_batch_size; }
nvinfer1::DataType getDataType() const { return _data_type; }
nvinfer1::PluginFormat getDataFormat() const { return _data_format; }
size_t getWorkspaceSize(int) const override { return 0; }
int initialize() override { return 0;}
void terminate() override {}
bool supportsFormat(nvinfer1::DataType type,
nvinfer1::PluginFormat format) const override;
void configureWithFormat(const nvinfer1::Dims* inputDims, int nbInputs,
const nvinfer1::Dims* outputDims, int nbOutputs,
nvinfer1::DataType type,
nvinfer1::PluginFormat format,
int maxBatchSize) override;
void destroy() override { delete this; }
protected:
void deserializeBase(void const*& serialData, size_t& serialLength);
size_t getBaseSerializationSize();
void serializeBase(void*& buffer);
std::vector<nvinfer1::Dims> _input_dims;
size_t _max_batch_size;
nvinfer1::DataType _data_type;
nvinfer1::PluginFormat _data_format;
virtual ~Plugin() {}
};
class PluginAdapter : public Plugin {
protected:
nvinfer1::IPlugin* _plugin;
nvinfer1::IPluginExt* _ext;
public:
PluginAdapter(nvinfer1::IPlugin* plugin) :
_plugin(plugin), _ext(dynamic_cast<IPluginExt*>(plugin)) {}
virtual int getNbOutputs() const override;
virtual nvinfer1::Dims getOutputDimensions(int index,
const nvinfer1::Dims *inputDims,
int nbInputs) override ;
virtual void serialize(void* buffer) override;
virtual size_t getSerializationSize() override;
virtual int initialize() override;
virtual void terminate() override;
virtual bool supportsFormat(nvinfer1::DataType type, nvinfer1::PluginFormat format) const override;
virtual void configureWithFormat(const nvinfer1::Dims *inputDims, int nbInputs,
const nvinfer1::Dims *outputDims, int nbOutputs,
nvinfer1::DataType type,
nvinfer1::PluginFormat format,
int maxBatchSize);
virtual size_t getWorkspaceSize(int maxBatchSize) const override;
virtual int enqueue(int batchSize,
const void *const *inputs, void **outputs,
void *workspace, cudaStream_t stream) override;
};
// This makes a plugin compatible with onnx2trt::PluginFactory by serializing
// its plugin type.
class TypeSerializingPlugin : public PluginAdapter {
UniqueOwnable _owned_plugin;
Plugin* _plugin;
public:
TypeSerializingPlugin(Plugin* plugin)
: PluginAdapter(plugin), _owned_plugin(plugin), _plugin(plugin) {}
void serialize(void* buffer) override {
const char* plugin_type = _plugin->getPluginType();
serialize_value(&buffer, (const char*)REGISTERABLE_PLUGIN_MAGIC_STRING);
serialize_value(&buffer, plugin_type);
return _plugin->serialize(buffer);
}
size_t getSerializationSize() override {
const char* plugin_type = _plugin->getPluginType();
// Note: +1 for NULL-terminated string
return (sizeof(REGISTERABLE_PLUGIN_MAGIC_STRING) + 1 +
strlen(plugin_type) +
_plugin->getSerializationSize());
}
const char* getPluginType() const override {
return _plugin->getPluginType();
}
void destroy() override { delete this; }
};
// Adapts nvinfer1::plugin::INvPlugin into onnx2trt::Plugin
// (This enables existing NV plugins to be used in this plugin infrastructure)
class NvPlugin : public PluginAdapter {
nvinfer1::plugin::INvPlugin* _plugin;
public:
NvPlugin(nvinfer1::plugin::INvPlugin* plugin)
: PluginAdapter(plugin), _plugin(plugin) {}
virtual const char* getPluginType() const override;
virtual void destroy() override;
};
} // namespace onnx2trt
| mlperf/training_results_v0.6 | Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/onnx-tensorrt/plugin.hpp | C++ | apache-2.0 | 5,795 |
/**
* Copyright Intellectual Reserve, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gedcomx.build.enunciate;
import org.codehaus.enunciate.main.ClasspathHandler;
import org.codehaus.enunciate.main.ClasspathResource;
import org.codehaus.enunciate.main.Enunciate;
import org.gedcomx.test.Recipe;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/**
* @author Ryan Heaton
*/
public class RecipeClasspathHandler implements ClasspathHandler {
private final Enunciate enunciate;
private final List<Recipe> recipes = new ArrayList<Recipe>();
private final Unmarshaller unmarshaller;
public RecipeClasspathHandler(Enunciate enunciate) {
this.enunciate = enunciate;
try {
unmarshaller = JAXBContext.newInstance(Recipe.class).createUnmarshaller();
}
catch (JAXBException e) {
throw new RuntimeException(e);
}
}
public List<Recipe> getRecipes() {
return recipes;
}
@Override
public void startPathEntry(File pathEntry) {
}
@Override
public void handleResource(ClasspathResource resource) {
if (resource.getPath().endsWith(".recipe.xml")) {
try {
this.recipes.add((Recipe) unmarshaller.unmarshal(resource.read()));
}
catch (Exception e) {
this.enunciate.error("Unable to unmarshal recipe %s: %s.", resource.getPath(), e.getMessage());
}
}
}
@Override
public boolean endPathEntry(File pathEntry) {
return false;
}
}
| brenthale/gedcomx-java | enunciate-gedcomx-support/src/main/java/org/gedcomx/build/enunciate/RecipeClasspathHandler.java | Java | apache-2.0 | 2,169 |
// Generated from /POI/java/org/apache/poi/hssf/record/BoundSheetRecord.java
#include <org/apache/poi/hssf/record/BoundSheetRecord.hpp>
#include <java/lang/ArrayStoreException.hpp>
#include <java/lang/NullPointerException.hpp>
#include <java/lang/String.hpp>
#include <java/lang/StringBuffer.hpp>
#include <java/util/Arrays.hpp>
#include <java/util/Comparator.hpp>
#include <java/util/List.hpp>
#include <org/apache/poi/hssf/record/BoundSheetRecord_1.hpp>
#include <org/apache/poi/hssf/record/Record.hpp>
#include <org/apache/poi/hssf/record/RecordBase.hpp>
#include <org/apache/poi/hssf/record/RecordInputStream.hpp>
#include <org/apache/poi/hssf/record/StandardRecord.hpp>
#include <org/apache/poi/ss/util/WorkbookUtil.hpp>
#include <org/apache/poi/util/BitField.hpp>
#include <org/apache/poi/util/BitFieldFactory.hpp>
#include <org/apache/poi/util/HexDump.hpp>
#include <org/apache/poi/util/LittleEndian.hpp>
#include <org/apache/poi/util/LittleEndianConsts.hpp>
#include <org/apache/poi/util/LittleEndianOutput.hpp>
#include <org/apache/poi/util/StringUtil.hpp>
#include <Array.hpp>
#include <ObjectArray.hpp>
#include <SubArray.hpp>
template<typename ComponentType, typename... Bases> struct SubArray;
namespace poi
{
namespace hssf
{
namespace record
{
typedef ::SubArray< ::poi::hssf::record::RecordBase, ::java::lang::ObjectArray > RecordBaseArray;
typedef ::SubArray< ::poi::hssf::record::Record, RecordBaseArray > RecordArray;
typedef ::SubArray< ::poi::hssf::record::StandardRecord, RecordArray > StandardRecordArray;
typedef ::SubArray< ::poi::hssf::record::BoundSheetRecord, StandardRecordArray > BoundSheetRecordArray;
} // record
} // hssf
} // poi
template<typename T>
static T* npc(T* t)
{
if(!t) throw new ::java::lang::NullPointerException();
return t;
}
poi::hssf::record::BoundSheetRecord::BoundSheetRecord(const ::default_init_tag&)
: super(*static_cast< ::default_init_tag* >(0))
{
clinit();
}
poi::hssf::record::BoundSheetRecord::BoundSheetRecord(::java::lang::String* sheetname)
: BoundSheetRecord(*static_cast< ::default_init_tag* >(0))
{
ctor(sheetname);
}
poi::hssf::record::BoundSheetRecord::BoundSheetRecord(RecordInputStream* in)
: BoundSheetRecord(*static_cast< ::default_init_tag* >(0))
{
ctor(in);
}
constexpr int16_t poi::hssf::record::BoundSheetRecord::sid;
poi::util::BitField*& poi::hssf::record::BoundSheetRecord::hiddenFlag()
{
clinit();
return hiddenFlag_;
}
poi::util::BitField* poi::hssf::record::BoundSheetRecord::hiddenFlag_;
poi::util::BitField*& poi::hssf::record::BoundSheetRecord::veryHiddenFlag()
{
clinit();
return veryHiddenFlag_;
}
poi::util::BitField* poi::hssf::record::BoundSheetRecord::veryHiddenFlag_;
void poi::hssf::record::BoundSheetRecord::ctor(::java::lang::String* sheetname)
{
super::ctor();
field_2_option_flags = 0;
setSheetname(sheetname);
}
void poi::hssf::record::BoundSheetRecord::ctor(RecordInputStream* in)
{
super::ctor();
auto buf = new ::int8_tArray(::poi::util::LittleEndianConsts::INT_SIZE);
npc(in)->readPlain(buf, int32_t(0), npc(buf)->length);
field_1_position_of_BOF = ::poi::util::LittleEndian::getInt(buf);
field_2_option_flags = npc(in)->readUShort();
auto field_3_sheetname_length = npc(in)->readUByte();
field_4_isMultibyteUnicode = npc(in)->readByte();
if(isMultibyte()) {
field_5_sheetname = npc(in)->readUnicodeLEString(field_3_sheetname_length);
} else {
field_5_sheetname = npc(in)->readCompressedUnicode(field_3_sheetname_length);
}
}
void poi::hssf::record::BoundSheetRecord::setPositionOfBof(int32_t pos)
{
field_1_position_of_BOF = pos;
}
void poi::hssf::record::BoundSheetRecord::setSheetname(::java::lang::String* sheetName)
{
::poi::ss::util::WorkbookUtil::validateSheetName(sheetName);
field_5_sheetname = sheetName;
field_4_isMultibyteUnicode = ::poi::util::StringUtil::hasMultibyte(sheetName) ? int32_t(1) : int32_t(0);
}
int32_t poi::hssf::record::BoundSheetRecord::getPositionOfBof()
{
return field_1_position_of_BOF;
}
bool poi::hssf::record::BoundSheetRecord::isMultibyte()
{
return (field_4_isMultibyteUnicode & int32_t(1)) != 0;
}
java::lang::String* poi::hssf::record::BoundSheetRecord::getSheetname()
{
return field_5_sheetname;
}
java::lang::String* poi::hssf::record::BoundSheetRecord::toString()
{
auto buffer = new ::java::lang::StringBuffer();
npc(buffer)->append(u"[BOUNDSHEET]\n"_j);
npc(npc(npc(buffer)->append(u" .bof = "_j))->append(::poi::util::HexDump::intToHex(getPositionOfBof())))->append(u"\n"_j);
npc(npc(npc(buffer)->append(u" .options = "_j))->append(::poi::util::HexDump::shortToHex(field_2_option_flags)))->append(u"\n"_j);
npc(npc(npc(buffer)->append(u" .unicodeflag= "_j))->append(::poi::util::HexDump::byteToHex(field_4_isMultibyteUnicode)))->append(u"\n"_j);
npc(npc(npc(buffer)->append(u" .sheetname = "_j))->append(field_5_sheetname))->append(u"\n"_j);
npc(buffer)->append(u"[/BOUNDSHEET]\n"_j);
return npc(buffer)->toString();
}
int32_t poi::hssf::record::BoundSheetRecord::getDataSize()
{
return int32_t(8) + npc(field_5_sheetname)->length() * (isMultibyte() ? int32_t(2) : int32_t(1));
}
void poi::hssf::record::BoundSheetRecord::serialize(::poi::util::LittleEndianOutput* out)
{
npc(out)->writeInt(getPositionOfBof());
npc(out)->writeShort(field_2_option_flags);
auto name = field_5_sheetname;
npc(out)->writeByte(npc(name)->length());
npc(out)->writeByte(field_4_isMultibyteUnicode);
if(isMultibyte()) {
::poi::util::StringUtil::putUnicodeLE(name, out);
} else {
::poi::util::StringUtil::putCompressedUnicode(name, out);
}
}
int16_t poi::hssf::record::BoundSheetRecord::getSid()
{
return sid;
}
bool poi::hssf::record::BoundSheetRecord::isHidden()
{
return npc(hiddenFlag_)->isSet(field_2_option_flags);
}
void poi::hssf::record::BoundSheetRecord::setHidden(bool hidden)
{
field_2_option_flags = npc(hiddenFlag_)->setBoolean(field_2_option_flags, hidden);
}
bool poi::hssf::record::BoundSheetRecord::isVeryHidden()
{
return npc(veryHiddenFlag_)->isSet(field_2_option_flags);
}
void poi::hssf::record::BoundSheetRecord::setVeryHidden(bool veryHidden)
{
field_2_option_flags = npc(veryHiddenFlag_)->setBoolean(field_2_option_flags, veryHidden);
}
poi::hssf::record::BoundSheetRecordArray* poi::hssf::record::BoundSheetRecord::orderByBofPosition(::java::util::List* boundSheetRecords)
{
clinit();
auto bsrs = new BoundSheetRecordArray(npc(boundSheetRecords)->size());
npc(boundSheetRecords)->toArray_(static_cast< ::java::lang::ObjectArray* >(bsrs));
::java::util::Arrays::sort(bsrs, BOFComparator_);
return bsrs;
}
java::util::Comparator*& poi::hssf::record::BoundSheetRecord::BOFComparator()
{
clinit();
return BOFComparator_;
}
java::util::Comparator* poi::hssf::record::BoundSheetRecord::BOFComparator_;
extern java::lang::Class *class_(const char16_t *c, int n);
java::lang::Class* poi::hssf::record::BoundSheetRecord::class_()
{
static ::java::lang::Class* c = ::class_(u"org.apache.poi.hssf.record.BoundSheetRecord", 43);
return c;
}
void poi::hssf::record::BoundSheetRecord::clinit()
{
super::clinit();
static bool in_cl_init = false;
struct clinit_ {
clinit_() {
in_cl_init = true;
hiddenFlag_ = ::poi::util::BitFieldFactory::getInstance(1);
veryHiddenFlag_ = ::poi::util::BitFieldFactory::getInstance(2);
BOFComparator_ = new BoundSheetRecord_1();
}
};
if(!in_cl_init) {
static clinit_ clinit_instance;
}
}
int32_t poi::hssf::record::BoundSheetRecord::serialize(int32_t offset, ::int8_tArray* data)
{
return super::serialize(offset, data);
}
int8_tArray* poi::hssf::record::BoundSheetRecord::serialize()
{
return super::serialize();
}
java::lang::Class* poi::hssf::record::BoundSheetRecord::getClass0()
{
return class_();
}
| pebble2015/cpoi | src/org/apache/poi/hssf/record/BoundSheetRecord.cpp | C++ | apache-2.0 | 8,028 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.devicefarm.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Represents a specific warning or failure.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/devicefarm-2015-06-23/Problem" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Problem implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Information about the associated run.
* </p>
*/
private ProblemDetail run;
/**
* <p>
* Information about the associated job.
* </p>
*/
private ProblemDetail job;
/**
* <p>
* Information about the associated suite.
* </p>
*/
private ProblemDetail suite;
/**
* <p>
* Information about the associated test.
* </p>
*/
private ProblemDetail test;
/**
* <p>
* Information about the associated device.
* </p>
*/
private Device device;
/**
* <p>
* The problem's result.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* </ul>
*/
private String result;
/**
* <p>
* A message about the problem's result.
* </p>
*/
private String message;
/**
* <p>
* Information about the associated run.
* </p>
*
* @param run
* Information about the associated run.
*/
public void setRun(ProblemDetail run) {
this.run = run;
}
/**
* <p>
* Information about the associated run.
* </p>
*
* @return Information about the associated run.
*/
public ProblemDetail getRun() {
return this.run;
}
/**
* <p>
* Information about the associated run.
* </p>
*
* @param run
* Information about the associated run.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Problem withRun(ProblemDetail run) {
setRun(run);
return this;
}
/**
* <p>
* Information about the associated job.
* </p>
*
* @param job
* Information about the associated job.
*/
public void setJob(ProblemDetail job) {
this.job = job;
}
/**
* <p>
* Information about the associated job.
* </p>
*
* @return Information about the associated job.
*/
public ProblemDetail getJob() {
return this.job;
}
/**
* <p>
* Information about the associated job.
* </p>
*
* @param job
* Information about the associated job.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Problem withJob(ProblemDetail job) {
setJob(job);
return this;
}
/**
* <p>
* Information about the associated suite.
* </p>
*
* @param suite
* Information about the associated suite.
*/
public void setSuite(ProblemDetail suite) {
this.suite = suite;
}
/**
* <p>
* Information about the associated suite.
* </p>
*
* @return Information about the associated suite.
*/
public ProblemDetail getSuite() {
return this.suite;
}
/**
* <p>
* Information about the associated suite.
* </p>
*
* @param suite
* Information about the associated suite.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Problem withSuite(ProblemDetail suite) {
setSuite(suite);
return this;
}
/**
* <p>
* Information about the associated test.
* </p>
*
* @param test
* Information about the associated test.
*/
public void setTest(ProblemDetail test) {
this.test = test;
}
/**
* <p>
* Information about the associated test.
* </p>
*
* @return Information about the associated test.
*/
public ProblemDetail getTest() {
return this.test;
}
/**
* <p>
* Information about the associated test.
* </p>
*
* @param test
* Information about the associated test.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Problem withTest(ProblemDetail test) {
setTest(test);
return this;
}
/**
* <p>
* Information about the associated device.
* </p>
*
* @param device
* Information about the associated device.
*/
public void setDevice(Device device) {
this.device = device;
}
/**
* <p>
* Information about the associated device.
* </p>
*
* @return Information about the associated device.
*/
public Device getDevice() {
return this.device;
}
/**
* <p>
* Information about the associated device.
* </p>
*
* @param device
* Information about the associated device.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Problem withDevice(Device device) {
setDevice(device);
return this;
}
/**
* <p>
* The problem's result.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* </ul>
*
* @param result
* The problem's result.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* @see ExecutionResult
*/
public void setResult(String result) {
this.result = result;
}
/**
* <p>
* The problem's result.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* </ul>
*
* @return The problem's result.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* @see ExecutionResult
*/
public String getResult() {
return this.result;
}
/**
* <p>
* The problem's result.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* </ul>
*
* @param result
* The problem's result.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see ExecutionResult
*/
public Problem withResult(String result) {
setResult(result);
return this;
}
/**
* <p>
* The problem's result.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* </ul>
*
* @param result
* The problem's result.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* @see ExecutionResult
*/
public void setResult(ExecutionResult result) {
withResult(result);
}
/**
* <p>
* The problem's result.
* </p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* </ul>
*
* @param result
* The problem's result.</p>
* <p>
* Allowed values include:
* </p>
* <ul>
* <li>
* <p>
* PENDING
* </p>
* </li>
* <li>
* <p>
* PASSED
* </p>
* </li>
* <li>
* <p>
* WARNED
* </p>
* </li>
* <li>
* <p>
* FAILED
* </p>
* </li>
* <li>
* <p>
* SKIPPED
* </p>
* </li>
* <li>
* <p>
* ERRORED
* </p>
* </li>
* <li>
* <p>
* STOPPED
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see ExecutionResult
*/
public Problem withResult(ExecutionResult result) {
this.result = result.toString();
return this;
}
/**
* <p>
* A message about the problem's result.
* </p>
*
* @param message
* A message about the problem's result.
*/
public void setMessage(String message) {
this.message = message;
}
/**
* <p>
* A message about the problem's result.
* </p>
*
* @return A message about the problem's result.
*/
public String getMessage() {
return this.message;
}
/**
* <p>
* A message about the problem's result.
* </p>
*
* @param message
* A message about the problem's result.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Problem withMessage(String message) {
setMessage(message);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRun() != null)
sb.append("Run: ").append(getRun()).append(",");
if (getJob() != null)
sb.append("Job: ").append(getJob()).append(",");
if (getSuite() != null)
sb.append("Suite: ").append(getSuite()).append(",");
if (getTest() != null)
sb.append("Test: ").append(getTest()).append(",");
if (getDevice() != null)
sb.append("Device: ").append(getDevice()).append(",");
if (getResult() != null)
sb.append("Result: ").append(getResult()).append(",");
if (getMessage() != null)
sb.append("Message: ").append(getMessage());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Problem == false)
return false;
Problem other = (Problem) obj;
if (other.getRun() == null ^ this.getRun() == null)
return false;
if (other.getRun() != null && other.getRun().equals(this.getRun()) == false)
return false;
if (other.getJob() == null ^ this.getJob() == null)
return false;
if (other.getJob() != null && other.getJob().equals(this.getJob()) == false)
return false;
if (other.getSuite() == null ^ this.getSuite() == null)
return false;
if (other.getSuite() != null && other.getSuite().equals(this.getSuite()) == false)
return false;
if (other.getTest() == null ^ this.getTest() == null)
return false;
if (other.getTest() != null && other.getTest().equals(this.getTest()) == false)
return false;
if (other.getDevice() == null ^ this.getDevice() == null)
return false;
if (other.getDevice() != null && other.getDevice().equals(this.getDevice()) == false)
return false;
if (other.getResult() == null ^ this.getResult() == null)
return false;
if (other.getResult() != null && other.getResult().equals(this.getResult()) == false)
return false;
if (other.getMessage() == null ^ this.getMessage() == null)
return false;
if (other.getMessage() != null && other.getMessage().equals(this.getMessage()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRun() == null) ? 0 : getRun().hashCode());
hashCode = prime * hashCode + ((getJob() == null) ? 0 : getJob().hashCode());
hashCode = prime * hashCode + ((getSuite() == null) ? 0 : getSuite().hashCode());
hashCode = prime * hashCode + ((getTest() == null) ? 0 : getTest().hashCode());
hashCode = prime * hashCode + ((getDevice() == null) ? 0 : getDevice().hashCode());
hashCode = prime * hashCode + ((getResult() == null) ? 0 : getResult().hashCode());
hashCode = prime * hashCode + ((getMessage() == null) ? 0 : getMessage().hashCode());
return hashCode;
}
@Override
public Problem clone() {
try {
return (Problem) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.devicefarm.model.transform.ProblemMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| aws/aws-sdk-java | aws-java-sdk-devicefarm/src/main/java/com/amazonaws/services/devicefarm/model/Problem.java | Java | apache-2.0 | 19,577 |
var logger = require('../logging').getLogger(__LOGGER__);
var {PAGE_CSS_NODE_ID} = require('../constants');
var Q = require('q');
var PageUtil = require('./PageUtil')
var loadedCss = {};
module.exports = {
registerPageLoad: function registerPageLoad() {
if (SERVER_SIDE) {
throw new Error("ClientCssHelper.registerPageLoad can't be called server-side");
}
// for each css node in the head that the react-server server wrote to the response, note it down in the cache, so that
// we can remove it on a page to page transition.
var serverWrittenLinkNodes = document.head.querySelectorAll(`link[${PAGE_CSS_NODE_ID}],style[${PAGE_CSS_NODE_ID}]`);
for (var i = 0; i < serverWrittenLinkNodes.length; i++) {
var key, styleNode = serverWrittenLinkNodes[i];
if (styleNode.href) {
key = normalizeLocalUrl(styleNode.href);
} else {
key = styleNode.innerHTML;
}
loadedCss[key] = styleNode;
}
},
ensureCss: function ensureCss(routeName, pageObject) {
if (SERVER_SIDE) {
throw new Error("ClientCssHelper.registerPageLoad can't be called server-side");
}
return Q.all(PageUtil.standardizeStyles(pageObject.getHeadStylesheets())).then(newCss => {
var newCssByKey = {};
newCss
.filter(style => !!style)
.forEach(style => {newCssByKey[this._keyFromStyleSheet(style)] = style});
// first, remove the unneeded CSS link elements.
Object.keys(loadedCss).forEach(loadedCssKey => {
if (!newCssByKey[loadedCssKey]) {
// remove the corresponding node from the DOM.
logger.debug("Removing stylesheet: " + loadedCssKey);
var node = loadedCss[loadedCssKey];
node.parentNode.removeChild(node);
delete loadedCss[loadedCssKey];
}
});
// next add the style URLs that weren't already loaded.
return Q.all(Object.keys(newCssByKey).map(newCssKey => {
var retval;
if (!loadedCss[newCssKey]) {
// this means that the CSS is not currently present in the
// document, so we need to add it.
logger.debug("Adding stylesheet: " + newCssKey);
var style = newCssByKey[newCssKey];
var styleTag;
if (style.href) {
styleTag = document.createElement('link');
styleTag.rel = 'stylesheet';
styleTag.href = style.href;
// If we _can_ wait for the CSS to be loaded before
// proceeding, let's do so.
if ('onload' in styleTag) {
var dfd = Q.defer();
styleTag.onload = dfd.resolve;
retval = dfd.promise;
}
} else {
styleTag = document.createElement('style');
styleTag.innerHTML = style.text;
}
styleTag.type = style.type;
styleTag.media = style.media;
loadedCss[newCssKey] = styleTag;
document.head.appendChild(styleTag);
} else {
logger.debug(`Stylesheet already loaded (no-op): ${newCssKey}`);
}
return retval;
}));
});
},
_keyFromStyleSheet: function(style) {
return normalizeLocalUrl(style.href) || style.text;
},
}
function normalizeLocalUrl(url) {
// Step 1: make the url protocol less first. This helps recognizing http://0.0.0.0:3001/common.css
// and //0.0.0.0:3001/common.css as the same file.
// Step 2: The browser will give us a full URL even if we only put a
// path in on the server. So, if we're comparing against just
// a path here we need to strip the base off to avoid a flash
// of unstyled content.
if (typeof url === 'string') {
url = url
.replace(/^http[s]?:/, '')
.replace(new RegExp("^//" + location.host), '');
}
return url;
}
| redfin/react-server | packages/react-server/core/util/ClientCssHelper.js | JavaScript | apache-2.0 | 3,519 |
package lesson.types;
public class Classes {
public static void main(String[] args) {
JustClass one = new JustClass();
JustClass two = new JustClass(123, "sdf");
System.out.println(one);
System.out.println(two);
}
}
class JustClass {
private int number;
private String name;
public JustClass() { }
public JustClass(int number, String name) {
this.number = number;
this.name = name;
}
@Override
public String toString() {
return String
.format("JustClass {%s, %d}", name,number);
}
}
| nesterione/JavaTrainings | src/lesson/types/Classes.java | Java | apache-2.0 | 602 |
using GeneticCreatures.Classes.UtilityClasses;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Tester.classes;
namespace GeneticCreatures.Classes.Objects.InanimateObjects
{
class Wall : Selectable
{
public Wall(Position pos)
: base(pos, defaultColour)
{
allWalls.Add(this);
}
public Wall(Position pos, Colour col)
: base(pos, col)
{
allWalls.Add(this);
}
public static List<Wall> allWalls = new List<Wall>();
private static Colour defaultColour = new Colour(100, 100, 100);
private static Colour selectedColour = Colour.White;
public override void Draw()
{
if (GameState.GetState() != GameStates.CreatingNet)
{
Colour chosenCol = colour;
if (this.isSelected)
chosenCol = selectedColour;
ShapeDrawer.DrawCircle(position.Location, DrawRadius, chosenCol);
}
}
protected override void DestroyWorldObject()
{
allWalls.Remove(this);
base.DestroyWorldObject();
}
}
}
| joeeyles/geneticcreatures | GeneticCreatures/GeneticCreatures/Classes/Objects/InanimateObjects/Wall.cs | C# | apache-2.0 | 1,296 |
/**
* Utility classes for converting between granularities of SI (power-of-ten) and IEC (power-of-two)
* byte units and bit units.
* <p>
* <h3>Example Usage</h3>
* What's the difference in hard drive space between perception and actual?
* <pre><code>
* long perception = BinaryByteUnit.TEBIBYTES.toBytes(2);
* long usable = DecimalByteUnit.TERABYTES.toBytes(2);
* long lost = BinaryByteUnit.BYTES.toGibibytes(perception - usable);
* System.out.println(lost + " GiB lost on a 2TB drive.");
* </code></pre>
* <p>
* Method parameter for specifying a resource size.
* <pre><code>
* public void installDiskCache(long count, ByteUnit unit) {
* long size = unit.toBytes(count);
* // TODO Install disk cache of 'size' bytes.
* }
* </code></pre>
*/
package com.jakewharton.byteunits;
| JakeWharton/byteunits | src/main/java/com/jakewharton/byteunits/package-info.java | Java | apache-2.0 | 799 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.experimental.logical.relational;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.pig.data.DataType;
import org.apache.pig.impl.util.Pair;
/**
* Schema, from a logical perspective.
*/
public class LogicalSchema {
public static class LogicalFieldSchema {
public String alias;
public byte type;
public long uid;
public LogicalSchema schema;
public LogicalFieldSchema(String alias, LogicalSchema schema, byte type) {
this(alias, schema, type, -1);
}
public LogicalFieldSchema(String alias, LogicalSchema schema, byte type, long uid) {
this.alias = alias;
this.type = type;
this.schema = schema;
this.uid = uid;
}
/**
* Equality is defined as having the same type and either the same schema
* or both null schema. Alias and uid are not checked.
*/
public boolean isEqual(Object other) {
if (other instanceof LogicalFieldSchema) {
LogicalFieldSchema ofs = (LogicalFieldSchema)other;
if (type != ofs.type) return false;
if (schema == null && ofs.schema == null) return true;
if (schema == null) return false;
else return schema.isEqual(ofs.schema);
} else {
return false;
}
}
public String toString() {
if( type == DataType.BAG ) {
if( schema == null ) {
return ( alias + "#" + uid + ":bag{}#" );
}
return ( alias + "#" + uid + ":bag{" + schema.toString() + "}" );
} else if( type == DataType.TUPLE ) {
if( schema == null ) {
return ( alias + "#" + uid + ":tuple{}" );
}
return ( alias + "#" + uid + ":tuple(" + schema.toString() + ")" );
}
return ( alias + "#" + uid + ":" + DataType.findTypeName(type) );
}
}
private List<LogicalFieldSchema> fields;
private Map<String, Pair<Integer, Boolean>> aliases;
public LogicalSchema() {
fields = new ArrayList<LogicalFieldSchema>();
aliases = new HashMap<String, Pair<Integer, Boolean>>();
}
/**
* Add a field to this schema.
* @param field to be added to the schema
*/
public void addField(LogicalFieldSchema field) {
fields.add(field);
if (field.alias != null && !field.alias.equals("")) {
// put the full name of this field into aliases map
// boolean in the pair indicates if this alias is full name
aliases.put(field.alias, new Pair<Integer, Boolean>(fields.size()-1, true));
int index = 0;
// check and put short names into alias map if there is no conflict
while(index != -1) {
index = field.alias.indexOf("::", index);
if (index != -1) {
String a = field.alias.substring(index+2);
if (aliases.containsKey(a)) {
// remove conflict if the conflict is not full name
// we can never remove full name
if (!aliases.get(a).second) {
aliases.remove(a);
}
}else{
// put alias into map and indicate it is a short name
aliases.put(a, new Pair<Integer, Boolean>(fields.size()-1, false));
}
index = index +2;
}
}
}
}
/**
* Fetch a field by alias
* @param alias
* @return field associated with alias, or null if no such field
*/
public LogicalFieldSchema getField(String alias) {
Pair<Integer, Boolean> p = aliases.get(alias);
if (p == null) {
return null;
}
return fields.get(p.first);
}
/**
* Fetch a field by field number
* @param fieldNum field number to fetch
* @return field
*/
public LogicalFieldSchema getField(int fieldNum) {
return fields.get(fieldNum);
}
/**
* Get all fields
* @return list of all fields
*/
public List<LogicalFieldSchema> getFields() {
return fields;
}
/**
* Get the size of the schema.
* @return size
*/
public int size() {
return fields.size();
}
/**
* Two schemas are equal if they are of equal size and their fields
* schemas considered in order are equal.
*/
public boolean isEqual(Object other) {
if (other != null && other instanceof LogicalSchema) {
LogicalSchema os = (LogicalSchema)other;
if (size() != os.size()) return false;
for (int i = 0; i < size(); i++) {
if (!getField(i).isEqual(os.getField(i))) return false;
}
return true;
} else {
return false;
}
}
/**
* Look for the index of the field that contains the specified uid
* @param uid the uid to look for
* @return the index of the field, -1 if not found
*/
public int findField(long uid) {
for(int i=0; i< size(); i++) {
LogicalFieldSchema f = getField(i);
// if this field has the same uid, then return this field
if (f.uid == uid) {
return i;
}
// if this field has a schema, check its schema
if (f.schema != null) {
if (f.schema.findField(uid) != -1) {
return i;
}
}
}
return -1;
}
/**
* Merge two schemas.
* @param s1
* @param s2
* @return a merged schema, or null if the merge fails
*/
public static LogicalSchema merge(LogicalSchema s1, LogicalSchema s2) {
// TODO
return null;
}
public String toString() {
StringBuilder str = new StringBuilder();
for( LogicalFieldSchema field : fields ) {
str.append( field.toString() + "," );
}
if( fields.size() != 0 ) {
str.deleteCharAt( str.length() -1 );
}
return str.toString();
}
}
| hirohanin/pig7hadoop21 | src/org/apache/pig/experimental/logical/relational/LogicalSchema.java | Java | apache-2.0 | 7,432 |
package com.github.nikolaymakhonin.android_app_example.di.factories;
import android.content.Context;
import android.support.annotation.NonNull;
import com.github.nikolaymakhonin.android_app_example.di.components.AppComponent;
import com.github.nikolaymakhonin.android_app_example.di.components.DaggerAppComponent;
import com.github.nikolaymakhonin.android_app_example.di.components.DaggerServiceComponent;
import com.github.nikolaymakhonin.android_app_example.di.components.ServiceComponent;
import com.github.nikolaymakhonin.common_di.modules.service.ServiceModuleBase;
public final class ComponentsFactory {
public static AppComponent buildAppComponent(@NonNull Context appContext) {
ServiceComponent serviceComponent = buildServiceComponent(appContext);
AppComponent appComponent = DaggerAppComponent.builder()
.serviceComponent(serviceComponent)
.build();
return appComponent;
}
public static ServiceComponent buildServiceComponent(@NonNull Context appContext) {
ServiceComponent serviceComponent = DaggerServiceComponent.builder()
.serviceModuleBase(new ServiceModuleBase(appContext))
.build();
return serviceComponent;
}
}
| NikolayMakhonin/AndroidAppExample | AndroidAppExample/AppExample/src/main/java/com/github/nikolaymakhonin/android_app_example/di/factories/ComponentsFactory.java | Java | apache-2.0 | 1,243 |
/* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2020, Arnaud Roques
*
* Project Info: https://plantuml.com
*
* If you like this project or if you find it useful, you can support us at:
*
* https://plantuml.com/patreon (only 1$ per month!)
* https://plantuml.com/paypal
*
* This file is part of PlantUML.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Original Author: Arnaud Roques
*/
package net.sourceforge.plantuml.objectdiagram.command;
import net.sourceforge.plantuml.LineLocation;
import net.sourceforge.plantuml.command.CommandExecutionResult;
import net.sourceforge.plantuml.command.SingleLineCommand2;
import net.sourceforge.plantuml.command.regex.IRegex;
import net.sourceforge.plantuml.command.regex.RegexConcat;
import net.sourceforge.plantuml.command.regex.RegexLeaf;
import net.sourceforge.plantuml.command.regex.RegexResult;
import net.sourceforge.plantuml.cucadiagram.IEntity;
import net.sourceforge.plantuml.objectdiagram.AbstractClassOrObjectDiagram;
import net.sourceforge.plantuml.skin.VisibilityModifier;
import net.sourceforge.plantuml.ugraphic.color.NoSuchColorException;
public class CommandAddData extends SingleLineCommand2<AbstractClassOrObjectDiagram> {
public CommandAddData() {
super(getRegexConcat());
}
static IRegex getRegexConcat() {
return RegexConcat.build(CommandAddData.class.getName(), RegexLeaf.start(), //
new RegexLeaf("NAME", "([%pLN_.]+)"), //
RegexLeaf.spaceZeroOrMore(), //
new RegexLeaf(":"), //
RegexLeaf.spaceZeroOrMore(), //
new RegexLeaf("DATA", "(.*)"), RegexLeaf.end()); //
}
@Override
protected CommandExecutionResult executeArg(AbstractClassOrObjectDiagram diagram, LineLocation location,
RegexResult arg) throws NoSuchColorException {
final String name = arg.get("NAME", 0);
final IEntity entity = diagram.getOrCreateLeaf(diagram.buildLeafIdent(name),
diagram.buildCode(name), null, null);
final String field = arg.get("DATA", 0);
if (field.length() > 0 && VisibilityModifier.isVisibilityCharacter(field)) {
diagram.setVisibilityModifierPresent(true);
}
entity.getBodier().addFieldOrMethod(field);
return CommandExecutionResult.ok();
}
}
| talsma-ict/umldoclet | src/plantuml-asl/src/net/sourceforge/plantuml/objectdiagram/command/CommandAddData.java | Java | apache-2.0 | 2,860 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.xml.security.test.stax.c14n;
import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer20010315_Excl;
import org.junit.Before;
import org.apache.xml.security.stax.ext.stax.XMLSecEvent;
import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer20010315_ExclOmitCommentsTransformer;
import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer20010315_ExclWithCommentsTransformer;
import org.apache.xml.security.test.stax.utils.XMLSecEventAllocator;
import org.apache.xml.security.utils.XMLUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import java.io.*;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author $Author: coheigea $
* @version $Revision: 1721336 $ $Date: 2015-12-22 10:45:18 +0000 (Tue, 22 Dec 2015) $
*/
public class Canonicalizer20010315ExclusiveTest extends org.junit.Assert {
private XMLInputFactory xmlInputFactory;
@Before
public void setUp() throws Exception {
this.xmlInputFactory = XMLInputFactory.newInstance();
this.xmlInputFactory.setEventAllocator(new XMLSecEventAllocator());
}
@org.junit.Test
public void test221excl() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Canonicalizer20010315_ExclWithCommentsTransformer c = new Canonicalizer20010315_ExclWithCommentsTransformer();
c.setOutputStream(baos);
XMLEventReader xmlSecEventReader = xmlInputFactory.createXMLEventReader(
this.getClass().getClassLoader().getResourceAsStream(
"org/apache/xml/security/c14n/inExcl/example2_2_1.xml")
);
XMLSecEvent xmlSecEvent = null;
while (xmlSecEventReader.hasNext()) {
xmlSecEvent = (XMLSecEvent) xmlSecEventReader.nextEvent();
if (xmlSecEvent.isStartElement() && xmlSecEvent.asStartElement().getName().equals(new QName("http://example.net", "elem2"))) {
break;
}
}
while (xmlSecEventReader.hasNext()) {
c.transform(xmlSecEvent);
if (xmlSecEvent.isEndElement() && xmlSecEvent.asEndElement().getName().equals(new QName("http://example.net", "elem2"))) {
break;
}
xmlSecEvent = (XMLSecEvent) xmlSecEventReader.nextEvent();
}
byte[] reference =
getBytesFromResource(this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/inExcl/example2_2_c14nized_exclusive.xml"));
boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray());
if (!equals) {
System.out.println("Expected:\n" + new String(reference, "UTF-8"));
System.out.println("");
System.out.println("Got:\n" + new String(baos.toByteArray(), "UTF-8"));
}
assertTrue(equals);
}
@org.junit.Test
public void test222excl() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Canonicalizer20010315_ExclWithCommentsTransformer c = new Canonicalizer20010315_ExclWithCommentsTransformer();
c.setOutputStream(baos);
canonicalize(c,
this.getClass().getClassLoader().getResourceAsStream(
"org/apache/xml/security/c14n/inExcl/example2_2_2.xml"),
new QName("http://example.net", "elem2")
);
byte[] reference =
getBytesFromResource(this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/inExcl/example2_2_c14nized_exclusive.xml"));
boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray());
if (!equals) {
System.out.println("Expected:\n" + new String(reference, "UTF-8"));
System.out.println("");
System.out.println("Got:\n" + new String(baos.toByteArray(), "UTF-8"));
}
assertTrue(equals);
}
@org.junit.Test
public void test24excl() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Canonicalizer20010315_ExclWithCommentsTransformer c = new Canonicalizer20010315_ExclWithCommentsTransformer();
c.setOutputStream(baos);
canonicalize(c,
this.getClass().getClassLoader().getResourceAsStream(
"org/apache/xml/security/c14n/inExcl/example2_4.xml"),
new QName("http://example.net", "elem2")
);
byte[] reference =
getBytesFromResource(this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/inExcl/example2_4_c14nized.xml"));
boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray());
if (!equals) {
System.out.println("Expected:\n" + new String(reference, "UTF-8"));
System.out.println("");
System.out.println("Got:\n" + new String(baos.toByteArray(), "UTF-8"));
}
assertTrue(equals);
}
@org.junit.Test
public void testComplexDocexcl() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Canonicalizer20010315_ExclWithCommentsTransformer c = new Canonicalizer20010315_ExclWithCommentsTransformer();
c.setOutputStream(baos);
canonicalize(c,
this.getClass().getClassLoader().getResourceAsStream(
"org/apache/xml/security/c14n/inExcl/plain-soap-1.1.xml"),
new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body", "env")
);
byte[] reference =
getBytesFromResource(this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/inExcl/plain-soap-c14nized.xml"));
boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray());
if (!equals) {
System.out.println("Expected:\n" + new String(reference, "UTF-8"));
System.out.println("");
System.out.println("Got:\n" + new String(baos.toByteArray(), "UTF-8"));
}
assertTrue(equals);
}
@org.junit.Test
public void testNodeSet() throws Exception {
final String XML =
"<env:Envelope"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</env:Envelope>";
final String c14nXML =
"<env:Body"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("env");
inclusiveNamespaces.add("ns0");
inclusiveNamespaces.add("xsi");
inclusiveNamespaces.add("wsu");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
/**
* Method test24Aexcl - a testcase for SANTUARIO-263
* "Canonicalizer can't handle dynamical created DOM correctly"
* https://issues.apache.org/jira/browse/SANTUARIO-263
*/
@org.junit.Test
public void test24Aexcl() throws Exception {
Document doc = XMLUtils.createDocumentBuilder(false).newDocument();
Element local = doc.createElementNS("foo:bar", "dsig:local");
Element test = doc.createElementNS("http://example.net", "etsi:test");
Element elem2 = doc.createElementNS("http://example.net", "etsi:elem2");
Element stuff = doc.createElementNS("foo:bar", "dsig:stuff");
elem2.appendChild(stuff);
test.appendChild(elem2);
local.appendChild(test);
doc.appendChild(local);
TransformerFactory tf = TransformerFactory.newInstance();
Transformer t = tf.newTransformer();
StringWriter stringWriter = new StringWriter();
StreamResult streamResult = new StreamResult(stringWriter);
t.transform(new DOMSource(doc), streamResult);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
Canonicalizer20010315_ExclWithCommentsTransformer c =
new Canonicalizer20010315_ExclWithCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(stringWriter.toString()), new QName("http://example.net", "elem2"));
byte[] reference =
getBytesFromResource(this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/inExcl/example2_4_c14nized.xml"));
boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray());
assertTrue(equals);
}
/**
* Test default namespace behavior if its in the InclusiveNamespace prefix list.
*
* @throws Exception
*/
@org.junit.Test
public void testDefaultNSInInclusiveNamespacePrefixList1() throws Exception {
final String XML =
"<env:Envelope"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</env:Envelope>";
final String c14nXML =
"<env:Body"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("#default");
inclusiveNamespaces.add("xsi");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
{
//exactly the same outcome is expected if #default is not set:
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("xsi");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
}
/**
* Test default namespace behavior if its in the InclusiveNamespace prefix list.
*
* @throws Exception
*/
@org.junit.Test
public void testDefaultNSInInclusiveNamespacePrefixList2() throws Exception {
final String XML =
"<env:Envelope"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns=\"http://example.com\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</env:Envelope>";
final String c14nXML1 =
"<env:Body"
+ " xmlns=\"http://example.com\""
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
final String c14nXML2 =
"<env:Body"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("#default");
inclusiveNamespaces.add("xsi");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML1);
}
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("xsi");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML2);
}
}
/**
* Test default namespace behavior if its in the InclusiveNamespace prefix list.
*
* @throws Exception
*/
@org.junit.Test
public void testDefaultNSInInclusiveNamespacePrefixList3() throws Exception {
final String XML =
"<env:Envelope"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns=\"\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</env:Envelope>";
final String c14nXML =
"<env:Body"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("#default");
inclusiveNamespaces.add("xsi");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
{
//exactly the same outcome is expected if #default is not set:
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("xsi");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
}
/**
* Test default namespace behavior if its in the InclusiveNamespace prefix list.
*
* @throws Exception
*/
@org.junit.Test
public void testDefaultNSInInclusiveNamespacePrefixList4() throws Exception {
final String XML =
"<env:Envelope"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</env:Envelope>";
final String c14nXML =
"<env:Body"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("#default");
inclusiveNamespaces.add("xsi");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
{
//exactly the same outcome is expected if #default is not set:
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("xsi");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
}
/**
* Test default namespace behavior if its in the InclusiveNamespace prefix list.
*
* @throws Exception
*/
@org.junit.Test
public void testPropagateDefaultNs1() throws Exception {
final String XML =
"<env:Envelope"
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</env:Envelope>";
final String c14nXML =
"<env:Body"
+ " xmlns=\"\""
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("#default");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
@org.junit.Test
public void testPropagateDefaultNs2() throws Exception {
final String XML =
"<env:Envelope"
+ " xmlns=\"http://example.com\""
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</env:Envelope>";
final String c14nXML =
"<env:Body"
+ " xmlns=\"http://example.com\""
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("#default");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
@org.junit.Test
public void testPropagateDefaultNs3() throws Exception {
final String XML =
"<Envelope"
+ " xmlns=\"http://example.com\""
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</Envelope>";
final String c14nXML =
"<env:Body"
+ " xmlns=\"http://example.com\""
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("#default");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
@org.junit.Test
public void testPropagateDefaultNs4() throws Exception {
final String XML =
"<Envelope"
+ " xmlns=\"\""
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</Envelope>";
final String c14nXML =
"<env:Body"
+ " xmlns=\"\""
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\""
+ " wsu:Id=\"body\">"
+ "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("#default");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
@org.junit.Test
public void testPropagateDefaultNs5() throws Exception {
final String XML =
"<env:Envelope"
+ " xmlns=\"http://example.com\""
+ " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\""
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""
+ " xmlns:ns0=\"http://xmlsoap.org/Ping\""
+ " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">"
+ "<env:Body xmlns=\"\" wsu:Id=\"body\">"
+ "<ns0:Ping xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>"
+ "</env:Body>"
+ "</env:Envelope>";
final String c14nXML =
"<ns0:Ping xmlns=\"\" xmlns:ns0=\"http://xmlsoap.org/Ping\" " +
"xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">"
+ "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>"
+ "</ns0:Ping>";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
List<String> inclusiveNamespaces = new ArrayList<String>();
inclusiveNamespaces.add("#default");
Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer();
Map<String, Object> transformerProperties = new HashMap<String, Object>();
transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces);
transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE);
c.setProperties(transformerProperties);
c.setOutputStream(baos);
canonicalize(c, new StringReader(XML), new QName("http://xmlsoap.org/Ping", "Ping"));
assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML);
}
private void canonicalize(
Canonicalizer20010315_Excl c, InputStream inputStream, QName elementName)
throws XMLStreamException {
canonicalize(c, xmlInputFactory.createXMLEventReader(inputStream), elementName);
}
private void canonicalize(
Canonicalizer20010315_Excl c, Reader reader, QName elementName)
throws XMLStreamException {
canonicalize(c, xmlInputFactory.createXMLEventReader(reader), elementName);
}
private void canonicalize(
Canonicalizer20010315_Excl c, XMLEventReader xmlEventReader, QName elementName)
throws XMLStreamException {
XMLSecEvent xmlSecEvent = null;
while (xmlEventReader.hasNext()) {
xmlSecEvent = (XMLSecEvent) xmlEventReader.nextEvent();
if (xmlSecEvent.isStartElement() && xmlSecEvent.asStartElement().getName().equals(elementName)) {
break;
}
}
while (xmlEventReader.hasNext()) {
c.transform(xmlSecEvent);
if (xmlSecEvent.isEndElement() && xmlSecEvent.asEndElement().getName().equals(elementName)) {
break;
}
xmlSecEvent = (XMLSecEvent) xmlEventReader.nextEvent();
}
}
public static byte[] getBytesFromResource(URL resource) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
InputStream inputStream = resource.openStream();
try {
byte buf[] = new byte[1024];
int len;
while ((len = inputStream.read(buf)) > 0) {
baos.write(buf, 0, len);
}
return baos.toByteArray();
} finally {
inputStream.close();
}
}
} | Legostaev/xmlsec-gost | src/test/java/org/apache/xml/security/test/stax/c14n/Canonicalizer20010315ExclusiveTest.java | Java | apache-2.0 | 40,733 |
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.biodata.formats.io;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractFormatReader<T> {
protected Path path;
protected Logger logger;
protected AbstractFormatReader() {
path = null;
logger = LoggerFactory.getLogger(AbstractFormatReader.class);
// logger.setLevel(Logger.DEBUG_LEVEL);
}
protected AbstractFormatReader(Path f) throws IOException {
Files.exists(f);
this.path = f;
logger = LoggerFactory.getLogger(AbstractFormatReader.class);
// logger.setLevel(Logger.DEBUG_LEVEL);
}
public abstract int size() throws IOException, FileFormatException;
public abstract T read() throws FileFormatException;
public abstract T read(String regexFilter) throws FileFormatException;
public abstract List<T> read(int size) throws FileFormatException;
public abstract List<T> readAll() throws FileFormatException, IOException;
public abstract List<T> readAll(String pattern) throws FileFormatException;
public abstract void close() throws IOException;
}
| kalyanreddyemani/biodata | biodata-formats/src/main/java/org/opencb/biodata/formats/io/AbstractFormatReader.java | Java | apache-2.0 | 1,817 |
<?php
if ( ! function_exists('env') ) {
/**
* 获取一个环境变量的值,支持布尔值,null,empty
*
* @param string $key
* @param mixed $default
* @return mixed
*/
function env($key, $default = null) {
$value = getenv($key);
if ($value === false) return value($default);
return $value;
}
}
if ( ! function_exists('dump') ) {
/**
* 浏览器友好的变量输出
* @param mixed $var 变量
* @param boolean $echo 是否输出 默认为True 如果为false 则返回输出字符串
* @param string $label 标签 默认为空
* @param boolean $strict 是否严谨 默认为true
* @return void|string
*/
function dump($var, $echo=true, $label=null, $strict=true) {
$label = ($label === null) ? '' : rtrim($label) . ' ';
if (!$strict) {
if (ini_get('html_errors')) {
$output = print_r($var, true);
$output = '<pre>' . $label . htmlspecialchars($output, ENT_QUOTES) . '</pre>';
} else {
$output = $label . print_r($var, true);
}
} else {
ob_start();
var_dump($var);
$output = ob_get_clean();
if (!extension_loaded('xdebug')) {
$output = preg_replace('/\]\=\>\n(\s+)/m', '] => ', $output);
$output = '<pre>' . $label . htmlspecialchars($output, ENT_QUOTES) . '</pre>';
}
}
if ($echo) {
echo($output);
return null;
}else
return $output;
}
} | huanguosoft/framework | src/Foundation/helpers.php | PHP | apache-2.0 | 1,615 |
package com.jpattern.core.command;
import com.jpattern.core.IProvider;
import com.jpattern.core.exception.NullProviderException;
import com.jpattern.logger.ILogger;
import com.jpattern.logger.SystemOutLoggerFactory;
/**
*
* @author Francesco Cina'
*
* 11/set/2011
*/
public abstract class ACommand<T extends IProvider> {
private ICommandExecutor commandExecutor;
private IOnExceptionStrategy onExceptionStrategy;
private T provider;
private ILogger logger = null;
private boolean executed = false;
private boolean rolledback = false;
/**
* This method launch the execution of the command (or chain of commands) using the default
* default Executor and catching every runtime exception.
* This command is the same of:
* exec(provider, true);
* @return the result of the execution
*/
public final ACommandResult exec(T provider) {
return exec(provider, new DefaultCommandExecutor());
}
/**
* This method launch the execution of the command (or chain of commands).
* Every command in the chain will be managed by an ICommandExecutor object.
* This command is the same of:
* exec(commandExecutor, true);
* @param aCommandExecutor the pool in which the command will runs
* @return the result of the execution
*/
public final ACommandResult exec(T provider, ICommandExecutor commandExecutor) {
visit(provider);
return exec( commandExecutor, new CommandResult());
}
/**
* This method launch the rollback of the command execution (or chain of commands) using the default
* default Executor and catching every runtime exception.
* The rollback is effectively performed only if the command has been executed with a positive result, otherwise
* the command is intended as "not executed" then no rollback will be performed.
* This command is the same of:
* rollback(provider, true);
* @return the result of the rollback
*/
public final ACommandResult rollback(T provider) {
return rollback(provider, new DefaultCommandExecutor());
}
/**
* This method launch the rollback of the command execution (or chain of commands) using a custom command executor.
* The rollback is effectively performed only if the command has been executed with a positive result, otherwise
* the command is intended as "not executed" then no rollback will be performed.
* This command is the same of:
* rollback(provider, commandExecutor, true);
* @return the result of the rollback
*/
public final ACommandResult rollback(T provider, ICommandExecutor commandExecutor) {
visit(provider);
return rollback(commandExecutor, new CommandResult());
}
void visit(T provider) {
this.provider = provider;
}
protected final ACommandResult exec(ICommandExecutor commandExecutor, ACommandResult commandResult) {
this.commandExecutor = commandExecutor;
commandResult.setExecutionStart(this);
getCommandExecutor().execute(this, commandResult);
return commandResult;
}
protected final ACommandResult rollback(ICommandExecutor commandExecutor, ACommandResult commandResult) {
this.commandExecutor = commandExecutor;
commandResult.setExecutionStart(this);
getCommandExecutor().rollback(this, commandResult);
return commandResult;
}
protected final ICommandExecutor getCommandExecutor() {
if (commandExecutor==null) {
commandExecutor = new DefaultCommandExecutor();
}
return commandExecutor;
}
protected final T getProvider() {
if (provider==null) {
throw new NullProviderException();
}
return provider;
}
protected final ILogger getLogger() {
if (logger == null) {
if (provider == null) {
logger = new SystemOutLoggerFactory().logger(getClass());
} else {
logger = getProvider().getLoggerService().logger(this.getClass());
}
}
return logger;
}
public void setOnExceptionStrategy(IOnExceptionStrategy onExceptionStrategy) {
this.onExceptionStrategy = onExceptionStrategy;
}
public IOnExceptionStrategy getOnExceptionStrategy() {
if (onExceptionStrategy == null) {
onExceptionStrategy = new CatchOnExceptionStrategy();
}
return onExceptionStrategy;
}
protected final void doExec(ACommandResult commandResult) {
try {
int errorSize = commandResult.getErrorMessages().size();
executed = false;
rolledback = false;
execute(commandResult);
executed = commandResult.getErrorMessages().size() == errorSize;
} catch (RuntimeException e) {
getOnExceptionStrategy().onException(e, getLogger(), commandResult, "RuntimeException thrown");
} finally {
try {
postExecute(commandResult);
} finally {
commandResult.setExecutionEnd(this);
}
}
}
void postExecute(ACommandResult commandResult) {
}
void postRollback(ACommandResult commandResult) {
}
protected final void doRollback(ACommandResult commandResult) {
try {
if (executed && !rolledback) {
rollback(commandResult);
rolledback = true;
}
} catch (RuntimeException e) {
getOnExceptionStrategy().onException(e, getLogger(), commandResult, "RuntimeException thrown while rollbacking");
} finally {
try {
postRollback(commandResult);
} finally {
commandResult.setExecutionEnd(this);
}
}
}
protected abstract void execute(ACommandResult commandResult);
protected abstract void rollback(ACommandResult commandResult);
void setExecuted(boolean executed) {
this.executed = executed;
}
boolean isExecuted() {
return executed;
}
void setRolledback(boolean rolledback) {
this.rolledback = rolledback;
}
boolean isRolledback() {
return rolledback;
}
}
| ufoscout/jpattern | core/src/main/java/com/jpattern/core/command/ACommand.java | Java | apache-2.0 | 5,668 |
<?php
namespace App\Http\Controllers\Sadmin;
use App\Http\Controllers\Controller;
use App\Driver;
use App\Customer;
use App\User;
use App\Detail;
use Illuminate\Http\Request;
use Illuminate\Pagination\Paginator;
use Illuminate\Support\Facades\Input;
use Illuminate\Support\Facades\DB;
use Illuminate\Foundation\Auth\AuthenticatesUsers;
use Illuminate\Support\Facades\Auth;
use File;
use Mail;
use PDF;
class BillingController extends Controller
{
public function __construct()
{
$this->middleware(function ($request, $next) {
$user = Auth::user();
$customer_email = Auth::user()->email;
$customer = Customer::where("email", $customer_email)->get();
$this->customer_id = $customer[0]->id;
return $next($request);
});
}
public function index(Request $request)
{
if(Auth::user() == NULL) {
return redirect('sadmin');
}
$customer_email = Auth::user()->email;
$login_user_id = Auth::user()->id;
$s = $request->s;
$billings = Detail::join('drivers', 'details.driver_id', '=', 'drivers.id');
$billings=$billings->select('details.*', 'drivers.user_id');
$billings=$billings->where('drivers.company_id', $this->customer_id);
$billings=$billings->where('details.invoice_created','<>','');
if(isset($s))$billings -> search($s);
$billings=$billings->orderBy('details.created_at','desc')->paginate(10);
return view('sadmin.billing.index', compact('billings','s','customer_email'));
}
public function set_paymentmark(Request $request) {
$id = $request->id;
$detail = Detail::find($id);
$detail->paid_status = 1;
$detail->save();
$result['status'] = 'success';
die(json_encode($result));
}
public function create_invoice($id){
$detail_id = $id;
$detail = Detail::join('contact_lists as c','details.contact_id','c.id')->
select('details.*','c.d_company_name','c.address1','c.city','c.state','c.zipcode')->where('details.id', $detail_id)->get();
$driver = Driver::join('details', 'details.driver_id','=', 'drivers.id')
->join('contact_lists','details.contact_id','contact_lists.id')
->join('users','users.id','drivers.user_id')
->where('details.id',$detail_id)->get();
$driver = $driver[0];
$customer = Customer::find($this->customer_id);
return view('sadmin.billing.invoice_template', compact('detail_id','detail','driver','customer'));
}
public function generate_invoice(Request $request) {
$detail_id = $request['detail_id'];
$activity = $request['activity'];
$amount = $request['sp_rate'];
$charge_array = array();
for($i=0; $i< count($activity); $i++) {
$item['text'] = $activity[$i];
$item['rate'] = $amount[$i];
array_push($charge_array,$item);
}
$invoice_details = $this->arrayToObject($charge_array);
//return response()->json(['add_charge'=>$add_charge[0]->text], $this->successStatus);
$drivers = Driver::join('details', 'details.driver_id','=', 'drivers.id')
->join('contact_lists','details.contact_id','contact_lists.id')
->join('users','users.id','drivers.user_id')
->where('details.id',$detail_id)->get();
// $contact = Detail::where('id', $drivers[0]->contact_id)->get();
$customers = Customer::where('id', $drivers[0]->company_id)->get();
$filename ='Invoice_'. uniqid(). ".pdf" ;
$filepath = public_path('files').'/'.$filename;
// $pdf=PDF::loadView('driver_invoice_pdf',['drivers' => $drivers, 'customers' => $customers, 'invoice_details' => $invoice_details])->setPaper('a4')->save($filepath);
$pdf=PDF::setOptions([
'logOutputFile' => storage_path('logs/log.htm'),
'tempDir' => storage_path('logs/')
])->loadView('driver_invoice_pdf',['drivers' => $drivers, 'customers' => $customers, 'invoice_details' => $invoice_details])->setPaper('a4')->save($filepath);
$filepath_str = asset('/files/'.$filename);
$detail = Detail::findOrFail($detail_id);
$files = $detail->upload;
$names = $detail->filename;
$detail->upload = ($files=="")?$filepath_str:$files.",".$filepath_str ;
$detail->filename = ($names=="")?$filename:$names.",".$filename;
$detail->invoice_created = date("Y-m-d");
$detail->save();
$result['status'] = 'ok';
die(json_encode($result));
}
private function array_to_obj($array, &$obj)
{
foreach ($array as $key => $value)
{
if (is_array($value))
{
$obj->$key = new \stdClass();
$this->array_to_obj($value, $obj->$key);
}
else
{
$obj->$key = $value;
}
}
return $obj;
}
private function arrayToObject($array)
{
$object= new \stdClass();
return $this->array_to_obj($array,$object);
}
public function send_invoice(Request $request){
$detail_id = $request->detail_id;
///////
$from = Auth::user();
$to = $request->to;
$subject = $request->subject;
$content = $request->message;
$attach = json_decode($request->attach);
$message_arr = json_decode($content);
$data = array(
'from' => $from,
'to' => $to,
'subject' => $subject,
'content' => $message_arr,
'attach' => $attach
);
$mail_status = Mail::send('sadmin.invoice.invoice_mail', $data,function($message) use($data){
$message->to($data['to'])->subject($data['subject']);
$message->from($data['from']->email, $data['from']->firstname." " .$data['from']->lastname);
$message->replyTo($data['from']->email, $data['from']->firstname." " .$data['from']->lastname);
foreach($data['attach'] as $filePath){
$message->attach($filePath);
}
});
if(count(Mail::failures()) > 0){
$result['msg'] = 'Failed to send invoice email, please try again.';
$result['status'] = "fail";
}else{
$result['msg'] = 'Sent the invoice email succesfully.';
$result['status'] = "success";
}
die(json_encode($result));
}
/*
public function set_payment(Request $request){
$invoice_id = $request->id;
$invoice = Invoice::find($invoice_id);
if($invoice->send_status == 0){
$result['msg'] = "The invoice is not sent yet.\n Please confirm before.";
$result['status']="fail";
}else{
$invoice->paid_status=1;
if($invoice->save()){
$result['status']="success";
}else{
$result['status']="fail";
$result['msg'] = "Failed the save.";
}
}
die(json_encode($result));
}
//invoice delete
public function destroy($id)
{
$invoice_detail = Invoice_detail::where('inv_id','=',$id)->get();
foreach ($invoice_detail as $recode) {
$recode -> delete();
}
$invoice_special = Invoice_special::where('inv_id',$id)->get();
foreach ($invoice_special as $recode) {
$recode -> delete();
}
$invoice = Invoice::find($id);
$invoice->delete();
// return redirect('admin/invoice');
return back();
}
*/
}
| widedeveloper/laravel-transport | app/Http/Controllers/Sadmin/BillingController.php | PHP | apache-2.0 | 8,073 |
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.hash;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Output stream decorator that hashes data written to the stream.
* Inspired by the Google Guava project.
*/
public final class HashingOutputStream extends FilterOutputStream {
private final Hasher hasher;
public HashingOutputStream(HashFunction hashFunction, OutputStream out) {
super(checkNotNull(out));
this.hasher = checkNotNull(hashFunction.newHasher());
}
@Override
public void write(int b) throws IOException {
hasher.putByte((byte) b);
out.write(b);
}
@Override
public void write(byte[] bytes, int off, int len) throws IOException {
hasher.putBytes(bytes, off, len);
out.write(bytes, off, len);
}
public HashCode hash() {
return hasher.hash();
}
@Override
public void close() throws IOException {
out.close();
}
}
| gstevey/gradle | subprojects/base-services/src/main/java/org/gradle/internal/hash/HashingOutputStream.java | Java | apache-2.0 | 1,665 |
/*
* Copyright (c) 2014 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.helios.agent;
import com.spotify.docker.client.ContainerNotFoundException;
import com.spotify.docker.client.DockerClient;
import com.spotify.docker.client.DockerException;
import com.spotify.docker.client.messages.ContainerInfo;
import com.spotify.helios.common.descriptors.Goal;
import com.spotify.helios.common.descriptors.Job;
import com.spotify.helios.servicescommon.DefaultReactor;
import com.spotify.helios.servicescommon.Reactor;
import com.spotify.helios.servicescommon.statistics.MetricsContext;
import com.spotify.helios.servicescommon.statistics.SupervisorMetrics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InterruptedIOException;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static com.spotify.helios.common.descriptors.TaskStatus.State.STOPPED;
import static com.spotify.helios.common.descriptors.TaskStatus.State.STOPPING;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Supervises docker containers for a single job.
*/
public class Supervisor {
public interface Listener {
void stateChanged(Supervisor supervisor);
}
private static final Logger log = LoggerFactory.getLogger(Supervisor.class);
private final DockerClient docker;
private final Job job;
private final RestartPolicy restartPolicy;
private final SupervisorMetrics metrics;
private final Reactor reactor;
private final Listener listener;
private final TaskRunnerFactory runnerFactory;
private final StatusUpdater statusUpdater;
private final TaskMonitor monitor;
private final Sleeper sleeper;
private volatile Goal goal;
private volatile String containerId;
private volatile TaskRunner runner;
private volatile Command currentCommand;
private volatile Command performedCommand;
public Supervisor(final Builder builder) {
this.job = checkNotNull(builder.job, "job");
this.docker = checkNotNull(builder.dockerClient, "docker");
this.restartPolicy = checkNotNull(builder.restartPolicy, "restartPolicy");
this.metrics = checkNotNull(builder.metrics, "metrics");
this.listener = checkNotNull(builder.listener, "listener");
this.currentCommand = new Nop();
this.containerId = builder.existingContainerId;
this.runnerFactory = checkNotNull(builder.runnerFactory, "runnerFactory");
this.statusUpdater = checkNotNull(builder.statusUpdater, "statusUpdater");
this.monitor = checkNotNull(builder.monitor, "monitor");
this.reactor = new DefaultReactor("supervisor-" + job.getId(), new Update(),
SECONDS.toMillis(30));
this.reactor.startAsync();
statusUpdater.setContainerId(containerId);
this.sleeper = builder.sleeper;
}
public void setGoal(final Goal goal) {
if (this.goal == goal) {
return;
}
log.debug("Supervisor {}: setting goal: {}", job.getId(), goal);
this.goal = goal;
statusUpdater.setGoal(goal);
switch (goal) {
case START:
currentCommand = new Start();
reactor.signal();
metrics.supervisorStarted();
break;
case STOP:
case UNDEPLOY:
currentCommand = new Stop();
reactor.signal();
metrics.supervisorStopped();
break;
}
}
/**
* Close this supervisor. The actual container is left as-is.
*/
public void close() {
reactor.stopAsync();
if (runner != null) {
runner.stopAsync();
}
metrics.supervisorClosed();
monitor.close();
}
/**
* Wait for supervisor to stop after closing it.
*/
public void join() {
reactor.awaitTerminated();
if (runner != null) {
// Stop the runner again in case it was rewritten by the reactor before it terminated.
runner.stopAsync();
runner.awaitTerminated();
}
}
/**
* Check if the current command is start.
* @return True if current command is start, otherwise false.
*/
public boolean isStarting() {
return currentCommand instanceof Start;
}
/**
* Check if the current command is stop.
* @return True if current command is stop, otherwise false.
*/
public boolean isStopping() {
return currentCommand instanceof Stop;
}
/**
* Check whether the last start/stop command is done.
* @return True if last start/stop command is done, otherwise false.
*/
public boolean isDone() {
return currentCommand == performedCommand;
}
/**
* Get the current container id
* @return The container id.
*/
public String containerId() {
return containerId;
}
private class Update implements Reactor.Callback {
@Override
public void run(final boolean timeout) throws InterruptedException {
final Command command = currentCommand;
final boolean done = performedCommand == command;
log.debug("Supervisor {}: update: performedCommand={}, command={}, done={}",
job.getId(), performedCommand, command, done);
command.perform(done);
if (!done) {
performedCommand = command;
fireStateChanged();
}
}
}
private void fireStateChanged() {
log.debug("Supervisor {}: state changed", job.getId());
try {
listener.stateChanged(this);
} catch (Exception e) {
log.error("Listener threw exception", e);
}
}
public static Builder newBuilder() {
return new Builder();
}
public static class Builder {
private Builder() {
}
private Job job;
private String existingContainerId;
private DockerClient dockerClient;
private RestartPolicy restartPolicy;
private SupervisorMetrics metrics;
private Listener listener = new NopListener();
private TaskRunnerFactory runnerFactory;
private StatusUpdater statusUpdater;
private TaskMonitor monitor;
private Sleeper sleeper = new ThreadSleeper();
public Builder setJob(final Job job) {
this.job = job;
return this;
}
public Builder setExistingContainerId(final String existingContainerId) {
this.existingContainerId = existingContainerId;
return this;
}
public Builder setRestartPolicy(final RestartPolicy restartPolicy) {
this.restartPolicy = restartPolicy;
return this;
}
public Builder setDockerClient(final DockerClient dockerClient) {
this.dockerClient = dockerClient;
return this;
}
public Builder setMetrics(SupervisorMetrics metrics) {
this.metrics = metrics;
return this;
}
public Builder setListener(final Listener listener) {
this.listener = listener;
return this;
}
public Builder setRunnerFactory(final TaskRunnerFactory runnerFactory) {
this.runnerFactory = runnerFactory;
return this;
}
public Builder setStatusUpdater(final StatusUpdater statusUpdater) {
this.statusUpdater = statusUpdater;
return this;
}
public Builder setMonitor(final TaskMonitor monitor) {
this.monitor = monitor;
return this;
}
public Builder setSleeper(final Sleeper sleeper) {
this.sleeper = sleeper;
return this;
}
public Supervisor build() {
return new Supervisor(this);
}
private class NopListener implements Listener {
@Override
public void stateChanged(final Supervisor supervisor) {
}
}
}
private interface Command {
/**
* Perform the command. Although this is declared to throw InterruptedException, this will only
* happen when the supervisor is being shut down. During normal operations, the operation will
* be allowed to run until it's done.
* @param done Flag indicating if operation is done.
* @throws InterruptedException If thread is interrupted.
*/
void perform(final boolean done) throws InterruptedException;
}
/**
* Starts a container and attempts to keep it up indefinitely, restarting it when it exits.
*/
private class Start implements Command {
@Override
public void perform(final boolean done) throws InterruptedException {
if (runner == null) {
// There's no active runner, start it to bring up the container.
startAfter(0);
return;
}
if (runner.isRunning()) {
// There's an active runner, brought up by this or another Start command previously.
return;
}
// Check if the runner exited normally or threw an exception
final Result<Integer> result = runner.result();
if (!result.isSuccess()) {
// Runner threw an exception, inspect it.
final Throwable t = result.getException();
if (t instanceof InterruptedException || t instanceof InterruptedIOException) {
// We're probably shutting down, remove the runner and bail.
log.debug("task runner interrupted");
runner = null;
reactor.signal();
return;
} else if (t instanceof DockerException) {
log.error("docker error", t);
} else {
log.error("task runner threw exception", t);
}
}
// Restart the task
startAfter(restartPolicy.delay(monitor.throttle()));
}
private void startAfter(final long delay) {
log.debug("starting job (delay={}): {}", delay, job);
runner = runnerFactory.create(delay, containerId, new TaskListener());
runner.startAsync();
runner.resultFuture().addListener(reactor.signalRunnable(), directExecutor());
}
}
/**
* Stops a container, making sure that the runner spawned by {@link Start} is stopped and the
* container is not running.
*/
private class Stop implements Command {
@Override
public void perform(final boolean done) throws InterruptedException {
if (done) {
return;
}
final Integer gracePeriod = job.getGracePeriod();
if (gracePeriod != null && gracePeriod > 0) {
log.info("Unregistering from service discovery for {} seconds before stopping",
gracePeriod);
statusUpdater.setState(STOPPING);
statusUpdater.update();
if (runner.unregister()) {
log.info("Unregistered. Now sleeping for {} seconds.", gracePeriod);
sleeper.sleep(TimeUnit.MILLISECONDS.convert(gracePeriod, TimeUnit.SECONDS));
}
}
log.info("stopping job: {}", job);
// Stop the runner
if (runner != null) {
runner.stop();
runner = null;
}
final RetryScheduler retryScheduler = BoundedRandomExponentialBackoff.newBuilder()
.setMinIntervalMillis(SECONDS.toMillis(1))
.setMaxIntervalMillis(SECONDS.toMillis(30))
.build().newScheduler();
// Kill the container after stopping the runner
while (!containerNotRunning()) {
killContainer();
Thread.sleep(retryScheduler.nextMillis());
}
statusUpdater.setState(STOPPED);
statusUpdater.setContainerError(containerError());
statusUpdater.update();
}
private void killContainer() throws InterruptedException {
if (containerId == null) {
return;
}
try {
docker.killContainer(containerId);
} catch (DockerException e) {
log.error("failed to kill container {}", containerId, e);
}
}
private boolean containerNotRunning()
throws InterruptedException {
if (containerId == null) {
return true;
}
final ContainerInfo containerInfo;
try {
containerInfo = docker.inspectContainer(containerId);
} catch (ContainerNotFoundException e) {
return true;
} catch (DockerException e) {
log.error("failed to query container {}", containerId, e);
return false;
}
return !containerInfo.state().running();
}
private String containerError() throws InterruptedException {
if (containerId == null) {
return null;
}
final ContainerInfo containerInfo;
try {
containerInfo = docker.inspectContainer(containerId);
} catch (ContainerNotFoundException e) {
return null;
} catch (DockerException e) {
log.error("failed to query container {}", containerId, e);
return null;
}
return containerInfo.state().error();
}
}
private static class Nop implements Command {
@Override
public void perform(final boolean done) {
}
}
@Override
public String toString() {
return "Supervisor{" +
"job=" + job +
", currentCommand=" + currentCommand +
", performedCommand=" + performedCommand +
'}';
}
private class TaskListener extends TaskRunner.NopListener {
private MetricsContext pullContext;
@Override
public void failed(final Throwable t, final String containerError) {
metrics.containersThrewException();
}
@Override
public void pulling() {
pullContext = metrics.containerPull();
}
@Override
public void pullFailed() {
if (pullContext != null) {
pullContext.failure();
}
}
@Override
public void pulled() {
if (pullContext != null) {
pullContext.success();
}
}
@Override
public void created(final String createdContainerId) {
containerId = createdContainerId;
}
}
}
| gtonic/helios | helios-services/src/main/java/com/spotify/helios/agent/Supervisor.java | Java | apache-2.0 | 14,062 |
package de.saxsys.mvvmfx.examples.contacts.model;
public class Subdivision {
private final String name;
private final String abbr;
private final Country country;
public Subdivision(String name, String abbr, Country country) {
this.name = name;
this.abbr = abbr;
this.country = country;
}
public String getName() {
return name;
}
public String getAbbr() {
return abbr;
}
public Country getCountry() {
return country;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Subdivision that = (Subdivision) o;
if (!abbr.equals(that.abbr)) {
return false;
}
if (!country.equals(that.country)) {
return false;
}
if (!name.equals(that.name)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + abbr.hashCode();
result = 31 * result + country.hashCode();
return result;
}
}
| sialcasa/mvvmFX | examples/contacts-example/src/main/java/de/saxsys/mvvmfx/examples/contacts/model/Subdivision.java | Java | apache-2.0 | 1,010 |
package http
import (
bm "go-common/library/net/http/blademaster"
)
func debugCache(c *bm.Context) {
opt := new(struct {
Keys string `form:"keys" validate:"required"`
})
if err := c.Bind(opt); err != nil {
return
}
c.JSONMap(srv.DebugCache(opt.Keys), nil)
}
| LQJJ/demo | 126-go-common-master/app/job/main/aegis/server/http/debug.go | GO | apache-2.0 | 270 |
package fr.jmini.asciidoctorj.testcases;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.asciidoctor.AttributesBuilder;
import org.asciidoctor.OptionsBuilder;
import org.asciidoctor.ast.Block;
import org.asciidoctor.ast.Document;
import org.asciidoctor.ast.Title;
public class ShowTitleTrueTestCase implements AdocTestCase {
public static final String ASCIIDOC = "" +
"= My page\n" +
"\n" +
"Some text\n" +
"";
@Override
public String getAdocInput() {
return ASCIIDOC;
}
@Override
public Map<String, Object> getInputOptions() {
AttributesBuilder attributesBuilder = AttributesBuilder.attributes()
.showTitle(true);
return OptionsBuilder.options()
.attributes(attributesBuilder)
.asMap();
}
// tag::expected-html[]
public static final String EXPECTED_HTML = "" +
"<h1>My page</h1>\n" +
"<div class=\"paragraph\">\n" +
"<p>Some text</p>\n" +
"</div>";
// end::expected-html[]
@Override
public String getHtmlOutput() {
return EXPECTED_HTML;
}
@Override
// tag::assert-code[]
public void checkAst(Document astDocument) {
Document document1 = astDocument;
assertThat(document1.getId()).isNull();
assertThat(document1.getNodeName()).isEqualTo("document");
assertThat(document1.getParent()).isNull();
assertThat(document1.getContext()).isEqualTo("document");
assertThat(document1.getDocument()).isSameAs(document1);
assertThat(document1.isInline()).isFalse();
assertThat(document1.isBlock()).isTrue();
assertThat(document1.getAttributes()).containsEntry("doctitle", "My page")
.containsEntry("doctype", "article")
.containsEntry("example-caption", "Example")
.containsEntry("figure-caption", "Figure")
.containsEntry("filetype", "html")
.containsEntry("notitle", "")
.containsEntry("prewrap", "")
.containsEntry("showtitle", true)
.containsEntry("table-caption", "Table");
assertThat(document1.getRoles()).isNullOrEmpty();
assertThat(document1.isReftext()).isFalse();
assertThat(document1.getReftext()).isNull();
assertThat(document1.getCaption()).isNull();
assertThat(document1.getTitle()).isNull();
assertThat(document1.getStyle()).isNull();
assertThat(document1.getLevel()).isEqualTo(0);
assertThat(document1.getContentModel()).isEqualTo("compound");
assertThat(document1.getSourceLocation()).isNull();
assertThat(document1.getSubstitutions()).isNullOrEmpty();
assertThat(document1.getBlocks()).hasSize(1);
Block block1 = (Block) document1.getBlocks()
.get(0);
assertThat(block1.getId()).isNull();
assertThat(block1.getNodeName()).isEqualTo("paragraph");
assertThat(block1.getParent()).isSameAs(document1);
assertThat(block1.getContext()).isEqualTo("paragraph");
assertThat(block1.getDocument()).isSameAs(document1);
assertThat(block1.isInline()).isFalse();
assertThat(block1.isBlock()).isTrue();
assertThat(block1.getAttributes()).isNullOrEmpty();
assertThat(block1.getRoles()).isNullOrEmpty();
assertThat(block1.isReftext()).isFalse();
assertThat(block1.getReftext()).isNull();
assertThat(block1.getCaption()).isNull();
assertThat(block1.getTitle()).isNull();
assertThat(block1.getStyle()).isNull();
assertThat(block1.getLevel()).isEqualTo(0);
assertThat(block1.getContentModel()).isEqualTo("simple");
assertThat(block1.getSourceLocation()).isNull();
assertThat(block1.getSubstitutions()).containsExactly("specialcharacters", "quotes", "attributes", "replacements", "macros", "post_replacements");
assertThat(block1.getBlocks()).isNullOrEmpty();
assertThat(block1.getLines()).containsExactly("Some text");
assertThat(block1.getSource()).isEqualTo("Some text");
Title title1 = document1.getStructuredDoctitle();
assertThat(title1.getMain()).isEqualTo("My page");
assertThat(title1.getSubtitle()).isNull();
assertThat(title1.getCombined()).isEqualTo("My page");
assertThat(title1.isSanitized()).isFalse();
assertThat(document1.getDoctitle()).isEqualTo("My page");
assertThat(document1.getOptions()).containsEntry("header_footer", false);
}
// end::assert-code[]
@Override
// tag::mock-code[]
public Document createMock() {
Document mockDocument1 = mock(Document.class);
when(mockDocument1.getId()).thenReturn(null);
when(mockDocument1.getNodeName()).thenReturn("document");
when(mockDocument1.getParent()).thenReturn(null);
when(mockDocument1.getContext()).thenReturn("document");
when(mockDocument1.getDocument()).thenReturn(mockDocument1);
when(mockDocument1.isInline()).thenReturn(false);
when(mockDocument1.isBlock()).thenReturn(true);
Map<String, Object> map1 = new HashMap<>();
map1.put("doctitle", "My page");
map1.put("doctype", "article");
map1.put("example-caption", "Example");
map1.put("figure-caption", "Figure");
map1.put("filetype", "html");
map1.put("notitle", "");
map1.put("prewrap", "");
map1.put("showtitle", true);
map1.put("table-caption", "Table");
when(mockDocument1.getAttributes()).thenReturn(map1);
when(mockDocument1.getRoles()).thenReturn(Collections.emptyList());
when(mockDocument1.isReftext()).thenReturn(false);
when(mockDocument1.getReftext()).thenReturn(null);
when(mockDocument1.getCaption()).thenReturn(null);
when(mockDocument1.getTitle()).thenReturn(null);
when(mockDocument1.getStyle()).thenReturn(null);
when(mockDocument1.getLevel()).thenReturn(0);
when(mockDocument1.getContentModel()).thenReturn("compound");
when(mockDocument1.getSourceLocation()).thenReturn(null);
when(mockDocument1.getSubstitutions()).thenReturn(Collections.emptyList());
Block mockBlock1 = mock(Block.class);
when(mockBlock1.getId()).thenReturn(null);
when(mockBlock1.getNodeName()).thenReturn("paragraph");
when(mockBlock1.getParent()).thenReturn(mockDocument1);
when(mockBlock1.getContext()).thenReturn("paragraph");
when(mockBlock1.getDocument()).thenReturn(mockDocument1);
when(mockBlock1.isInline()).thenReturn(false);
when(mockBlock1.isBlock()).thenReturn(true);
when(mockBlock1.getAttributes()).thenReturn(Collections.emptyMap());
when(mockBlock1.getRoles()).thenReturn(Collections.emptyList());
when(mockBlock1.isReftext()).thenReturn(false);
when(mockBlock1.getReftext()).thenReturn(null);
when(mockBlock1.getCaption()).thenReturn(null);
when(mockBlock1.getTitle()).thenReturn(null);
when(mockBlock1.getStyle()).thenReturn(null);
when(mockBlock1.getLevel()).thenReturn(0);
when(mockBlock1.getContentModel()).thenReturn("simple");
when(mockBlock1.getSourceLocation()).thenReturn(null);
when(mockBlock1.getSubstitutions()).thenReturn(Arrays.asList("specialcharacters", "quotes", "attributes", "replacements", "macros", "post_replacements"));
when(mockBlock1.getBlocks()).thenReturn(Collections.emptyList());
when(mockBlock1.getLines()).thenReturn(Collections.singletonList("Some text"));
when(mockBlock1.getSource()).thenReturn("Some text");
when(mockDocument1.getBlocks()).thenReturn(Collections.singletonList(mockBlock1));
Title mockTitle1 = mock(Title.class);
when(mockTitle1.getMain()).thenReturn("My page");
when(mockTitle1.getSubtitle()).thenReturn(null);
when(mockTitle1.getCombined()).thenReturn("My page");
when(mockTitle1.isSanitized()).thenReturn(false);
when(mockDocument1.getStructuredDoctitle()).thenReturn(mockTitle1);
when(mockDocument1.getDoctitle()).thenReturn("My page");
Map<Object, Object> map2 = new HashMap<>();
map2.put("attributes", "{\"showtitle\"=>true}");
map2.put("header_footer", false);
when(mockDocument1.getOptions()).thenReturn(map2);
return mockDocument1;
}
// end::mock-code[]
} | jmini/asciidoctorj-experiments | test-cases/adoc-test-cases/src/main/java/fr/jmini/asciidoctorj/testcases/ShowTitleTrueTestCase.java | Java | apache-2.0 | 8,764 |
package fr.sii.ogham.sms.message;
import fr.sii.ogham.core.util.EqualsBuilder;
import fr.sii.ogham.core.util.HashCodeBuilder;
/**
* Represents a phone number. It wraps a simple string. The aim is to abstracts
* the concept and to be able to provide other fields latter if needed.
*
* @author Aurélien Baudet
*
*/
public class PhoneNumber {
/**
* The phone number as string
*/
private String number;
/**
* Initialize the phone number with the provided number.
*
* @param number
* the phone number
*/
public PhoneNumber(String number) {
super();
this.number = number;
}
public String getNumber() {
return number;
}
public void setNumber(String number) {
this.number = number;
}
@Override
public String toString() {
return number;
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(number).hashCode();
}
@Override
public boolean equals(Object obj) {
return new EqualsBuilder(this, obj).appendFields("number").isEqual();
}
}
| groupe-sii/ogham | ogham-core/src/main/java/fr/sii/ogham/sms/message/PhoneNumber.java | Java | apache-2.0 | 1,017 |
package org.fastnate.generator.converter;
import java.time.Duration;
import org.fastnate.generator.context.GeneratorContext;
import org.fastnate.generator.statements.ColumnExpression;
import org.fastnate.generator.statements.PrimitiveColumnExpression;
/**
* Converts a {@link Duration} to an SQL expression.
*
* @author Tobias Liefke
*/
public class DurationConverter implements ValueConverter<Duration> {
@Override
public ColumnExpression getExpression(final Duration value, final GeneratorContext context) {
return PrimitiveColumnExpression.create(value.toNanos(), context.getDialect());
}
@Override
public ColumnExpression getExpression(final String defaultValue, final GeneratorContext context) {
return getExpression(Duration.parse(defaultValue), context);
}
}
| liefke/org.fastnate | fastnate-generator/src/main/java/org/fastnate/generator/converter/DurationConverter.java | Java | apache-2.0 | 786 |
module AssemblyAndServiceOperationsMixin
# Commands used from new dtk client
def check_if_instance_running(node_address, port, path)
endpoint = node_address + ":" + port
response = request_response(path, {}, 'get', endpoint);
response.code == 200
end
def get_node_by_name(service_instance_name, node_name)
nodes_response = send_request("/rest/api/v1/services/#{service_instance_name}/nodes", {}, 'get')
nodes = nodes_response['data']
if nodes.empty?
puts "No nodes found";
return false
end
selected_node_arr = nodes.select { |node| node['display_name'] == node_name } if nodes
if selected_node_arr.empty? || selected_node_arr.length > 1
puts "Expected only one node, but found: #{selected_node_arr}"
return false
end
node = selected_node_arr.first
puts "Found requested node: #{node}"
node
end
def verify_service_instance_nodes_terminated(service_instance_name)
require 'aws-sdk-ec2'
puts "Verify service instance nodes have been terminated", "-----------------------------------------------------"
nodes_terminated = true
ec2 = Aws::EC2::Client.new(region: 'us-east-1')
ec2_instance = ec2.describe_instances(filters:[{ name: 'tag:Name', values: ["*" + service_instance_name + "*"] }])
ec2_instance.reservations.each do |status|
puts "Instance details: #{status}"
if status.instances.first.state.name == "running"
nodes_terminated = false
puts "Service instance: #{service_instance_name} nodes have not been terminated"
end
end
puts ""
puts "Service instance: #{service_instance_name} nodes have been terminated" if nodes_terminated
nodes_terminated
end
def check_if_service_instance_exists(service_instance_name)
puts "Check if service instance exists", "-----------------------------------"
service_instance_exists = false
service_instances_list = send_request("/rest/api/v1/services/list", {}, 'get')
ap service_instances_list
if service_instances_list['status'] == 'ok' && !service_instances_list['data'].empty?
service_instances_list['data'].each do |instance|
if instance['display_name'] == service_instance_name
puts "Service instance: #{service_instance_name} found!"
service_instance_exists = true
end
end
else
puts "Service instance #{service_instance_name} is not found!"
end
puts "Service instance #{service_instance_name} is not found!" unless service_instance_exists
puts ""
service_instance_exists
end
def check_if_node_exists_in_service_instance(service_instance_name, node_name)
puts "Check if node exists in service instance", "---------------------------------------"
node_exists = false
nodes_list = send_request("/rest/api/v1/services/#{service_instance_name}/nodes", {}, 'get')
ap nodes_list
if nodes_list['status'] == 'ok' && !nodes_list['data'].empty?
nodes_list['data'].each do |node|
if node['display_name'] == node_name
puts "Node: #{node_name} found!"
node_exists = true
end
end
else
puts "Node #{node_name} is not found in #{service_instance_name}"
end
puts "Node #{node_name} is not found in #{service_instance_name}" unless node_exists
puts ""
node_exists
end
def check_if_node_group_exists_in_service_instance(service_instance_name, node_group_name, cardinality)
puts "Check if node group exists in service instance", "-------------------------------------------"
node_group_exist = false
nodes_list = send_request("/rest/api/v1/services/#{service_instance_name}/nodes", {}, 'get')
ap nodes_list
if nodes_list['status'] == 'ok' && !nodes_list['data'].empty?
node_group_members = []
nodes_list['data'].each do |node|
if node['display_name'].include? node_group_name + ":" # indicator it is node group member
node_group_members << node['display_name']
end
end
if node_group_members.size == cardinality
puts "Node group #{node_group_name} is found in #{service_instance_name}"
node_group_exist = true
end
else
puts "Node group #{node_group_name} is not found in #{service_instance_name}"
end
puts "Node group #{node_group_name} is not found in #{service_instance_name}" unless node_group_exist
puts ""
node_group_exist
end
def check_if_component_exists_in_service_instance(service_instance_name, component_name)
puts "Check if component exists in service instance", "-----------------------------------------"
component_exists = false
components_list = send_request("/rest/api/v1/services/#{service_instance_name}/components", {}, 'get')
ap components_list
if components_list['status'] == 'ok' && !components_list['data'].empty?
components_list['data'].each do |cmp|
if cmp['display_name'] == component_name
puts "Component: #{component_name} found!"
component_exists = true
end
end
else
puts "Component #{component_name} is not found in #{service_instance_name}"
end
puts "Component #{component_name} is not found in #{service_instance_name}" unless component_exists
puts ""
component_exists
end
def check_if_action_exists_in_service_instance(service_instance_name, action_to_check)
puts "Check if action exists in service instance", "------------------------------------------"
action_exists = false
list_of_actions = send_request("/rest/api/v1/services/#{service_instance_name}/actions", {}, 'get')
ap list_of_actions
if list_of_actions['status'] == 'ok' && !list_of_actions['data'].empty?
list_of_actions['data'].each do |action|
if action['display_name'] == action_to_check
puts "Action: #{action_to_check} found!"
action_exists = true
end
end
else
puts "Action #{action_to_check} is not found in #{service_instance_name}"
end
puts "Action #{action_to_check} is not found in #{service_instance_name}" unless action_exists
puts ""
action_exists
end
def check_if_attributes_exists_in_service_instance(service_instance_name, attributes_to_check)
puts "Check if attributes exist and are correct in service instance", "---------------------------------------------------"
attributes_exist = false
attributes_list = send_request("/rest/api/v1/services/#{service_instance_name}/attributes?all&format=yaml", {}, 'get')
puts "Attributes to check:"
ap attributes_to_check
puts ""
puts "Attributes on service instance:"
ap attributes_list
puts ""
if attributes_list['status'] == 'ok' && !attributes_list['data'].empty?
attributes_exist_and_values_correct = []
attributes_list['data'].each do |attr|
if (attributes_to_check.keys.include? attr['name']) && (attributes_to_check.values.include? attr['value'])
attributes_exist_and_values_correct << true
end
end
if (attributes_exist_and_values_correct.count == attributes_to_check.count) && (!attributes_exist_and_values_correct.include? false)
puts "All attributes: #{attributes_to_check} are verified and exist on service instance"
attributes_exist = true
else
puts "Some attributes are missing or they don't have expected values on service instance"
end
else
puts "Attributes #{attributes_to_check} are not found in #{service_instance_name}"
end
puts ""
attributes_exist
end
def check_task_status(service_instance_name)
puts "Check task status", "----------------"
service_converged = { pass: false, error: nil }
end_loop = false
count = 0
max_num_of_retries = 80
while (count < max_num_of_retries)
sleep 10
count += 1
task_status_response = send_request("/rest/api/v1/services/#{service_instance_name}/task_status", {}, 'get')
if task_status_response['status'] == 'ok'
if task_status_response['data'].first['status'] == 'succeeded'
puts "Service was converged successfully!"
service_converged[:pass] = true
break
elsif task_status_response['data'].first['status'] == 'failed'
puts 'Service was not converged successfully!'
ap task_status_response['data']
service_converged[:error] = task_status_response['data']
break
end
else
ap task_status_response['data']
service_converged[:error] = task_status_response['data']
puts "Service was not converged successfully!"
break
end
end
puts ''
service_converged
end
def check_task_status_with_breakpoint(service_instance_name, subtask_name_with_breakpoint)
puts "Check task status with breakpoint", "---------------------------------"
debug_passed = false
end_loop = false
count = 0
max_num_of_retries = 30
while (count < max_num_of_retries)
sleep 10
count += 1
task_status_response = send_request("/rest/api/v1/services/#{service_instance_name}/task_status", {}, 'get')
ap task_status_response
if task_status_response['status'] == 'ok'
if task_status_response['data'].first['status'] == 'debugging'
subtask = task_status_response['data'].select { |subtask| subtask['type'].include? subtask_name_with_breakpoint }.first
debug_command = subtask['info']['message'].match(/(byebug -R.+)'/)[1]
debug_execution = `echo c | #{debug_command}`
puts debug_execution
if debug_execution.include? "Connected"
debug_passed = true
break
else
debug_passed = false
break
end
end
else
ap task_status_response['data']
puts "Service was not converged successfully! Debug cannot proceed"
break
end
end
puts ''
debug_passed
end
def check_delete_task_status(service_instance_name)
puts "Check delete task status", "------------------------"
service_deleted = { pass: false, error: nil }
end_loop = false
count = 0
max_num_of_retries = 50
while (count < max_num_of_retries)
sleep 10
count += 1
task_status_response = send_request("/rest/api/v1/services/#{service_instance_name}/task_status", {}, 'get')
if task_status_response['status'] == 'ok'
if task_status_response['data'].first['status'] == 'succeeded'
puts "Service was deleted successfully!"
service_deleted[:pass] = true
break
elsif task_status_response['data'].first['status'] == 'failed'
puts 'Service was not deleted successfully!'
ap task_status_response['data']
service_deleted[:error] = task_status_response['data']
break
end
else
ap task_status_response
if task_status_response['errors'].first['message'] == "No context with the name '#{service_instance_name}' exists"
puts "Service was deleted successfully!"
service_deleted[:pass] = true
else
puts "Service was not deleted successfully!"
service_deleted[:error] = task_status_response['data']
end
break
end
end
puts ''
service_deleted
end
def stage_service_instance(service_instance_name, context = nil)
#Get list of assemblies, extract selected assembly, stage service and return its id
puts "Stage service:", "--------------"
service_id = nil
extract_id_regex = /id: (\d+)/
assembly_list = send_request('/rest/assembly/list', {:subtype=>'template'})
puts "List of avaliable assemblies: "
pretty_print_JSON(assembly_list)
test_template = assembly_list['data'].select { |x| x['display_name'] == @assembly }.first
if (!test_template.nil?)
puts "Assembly #{@assembly} found!"
assembly_id = test_template['id']
puts "Assembly id: #{assembly_id}"
if @is_context
stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :service_module_name => service_instance_name, :is_context => @is_context})
else
unless context
stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :service_module_name => service_instance_name})
else
stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :service_module_name => service_instance_name, :context_id=>context})
end
end
pretty_print_JSON(stage_service_response)
if (stage_service_response['data'].include? "name: #{@service_name}")
puts "Stage of #{@service_name} assembly completed successfully!"
service_id_match = stage_service_response['data'].match(extract_id_regex)
self.service_id = service_id_match[1].to_i
puts "Service id for a staged service: #{self.service_id}"
else
puts "Stage service didnt pass!"
end
else
puts "Assembly #{@service_name} not found!"
end
puts ""
end
# Commands used from old dtk client
def stage_service(context = nil)
#Get list of assemblies, extract selected assembly, stage service and return its id
puts "Stage service:", "--------------"
service_id = nil
extract_id_regex = /id: (\d+)/
assembly_list = send_request('/rest/assembly/list', {:subtype=>'template'})
puts "List of avaliable assemblies: "
pretty_print_JSON(assembly_list)
test_template = assembly_list['data'].select { |x| x['display_name'] == @assembly }.first
if (!test_template.nil?)
puts "Assembly #{@assembly} found!"
assembly_id = test_template['id']
puts "Assembly id: #{assembly_id}"
if @is_context
stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :is_context => @is_context})
else
unless context
stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name})
else
stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name, :context_id=>context})
end
end
pretty_print_JSON(stage_service_response)
if (stage_service_response['data'].include? "name: #{@service_name}")
puts "Stage of #{@service_name} assembly completed successfully!"
service_id_match = stage_service_response['data'].match(extract_id_regex)
self.service_id = service_id_match[1].to_i
puts "Service id for a staged service: #{self.service_id}"
else
puts "Stage service didnt pass!"
end
else
puts "Assembly #{@service_name} not found!"
end
puts ""
end
def get_components_versions(service_id)
puts "Get all component versions from service:", "-----------------------------"
components_list = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :node_id => nil, :component_id => nil, :subtype=>'instance', :about=>'components'})
components_list = components_list['data'].map! { |c| c['version'] }
puts ""
return components_list
end
def get_default_context_service
puts "Get default context service instance id:", "---------------------------------------"
service_id = nil
default_context_service_response = send_request('/rest/assembly/get_default_context', {})
if default_context_service_response['status'] == 'ok'
puts "Default context service instance succesfully found."
service_id = default_context_service_response['data']['id']
else
puts "Default context service was not succesfully found."
end
puts ''
service_id
end
def stage_service_with_namespace(namespace)
#Get list of assemblies, extract selected assembly, stage service and return its id
puts "Stage service:", "--------------"
service_id = nil
extract_id_regex = /id: (\d+)/
assembly_list = send_request('/rest/assembly/list', {:subtype=>'template'})
puts "List of avaliable assemblies: "
pretty_print_JSON(assembly_list)
test_template = assembly_list['data'].select { |x| x['display_name'] == @assembly && x['namespace'] == namespace }.first
if (!test_template.nil?)
puts "Assembly #{@assembly} from namespace #{namespace} found!"
assembly_id = test_template['id']
puts "Assembly id: #{assembly_id}"
stage_service_response = send_request('/rest/assembly/stage', {:assembly_id=>assembly_id, :name=>@service_name})
pretty_print_JSON(stage_service_response)
if (stage_service_response['data'].include? "name: #{@service_name}")
puts "Stage of #{@service_name} assembly completed successfully!"
service_id_match = stage_service_response['data'].match(extract_id_regex)
self.service_id = service_id_match[1].to_i
puts "Service id for a staged service: #{self.service_id}"
else
puts "Stage service didnt pass!"
end
else
puts "Assembly #{@service_name} not found!"
end
puts ""
end
def check_service_info(service_id, info_to_check)
puts "Show service info:", "------------------"
info_exist = false
service_info_response = send_request('/rest/assembly/info', {:assembly_id=>service_id, :subtype=>:instance})
pretty_print_JSON(service_info_response)
if service_info_response['data'].include? info_to_check
puts "#{info_to_check} exists in info output!"
info_exist = true
else
puts "#{info_to_check} does not exist in info output!"
end
puts ""
return info_exist
end
def rename_service(service_id, new_service_name)
puts "Rename service:", "---------------"
service_renamed = false
service_list = send_request('/rest/assembly/list', {:detail_level=>'nodes', :subtype=>'instance'})
service_name = service_list['data'].select { |x| x['id'] == service_id }
if service_name.any?
puts "Old service name is: #{service_name}. Proceed with renaming it to #{new_service_name}..."
rename_status = send_request('/rest/assembly/rename', {:assembly_id=>service_id, :assembly_name=>service_name, :new_assembly_name=>new_service_name})
if rename_status['status'] == 'ok'
puts "Service #{service_name} renamed to #{new_service_name} successfully!"
service_renamed = true
else
puts "Service #{service_name} was not renamed to #{new_service_name} successfully!"
end
else
puts "Service with id #{service_id} does not exist!"
end
puts ""
return service_renamed
end
def create_attribute(service_id, attribute_name)
#Create attribute
puts "Create attribute:", "-----------------"
attributes_created = false
create_attribute_response = send_request('/rest/assembly/set_attributes', {:assembly_id=>service_id, :create=>true, :pattern=>attribute_name})
puts "List of service attributes:"
service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id})
pretty_print_JSON(service_attributes)
extract_attribute = service_attributes['data'].select { |x| x['display_name'].include? attribute_name }.first['display_name']
if (extract_attribute == attribute_name)
puts "Creating #{attribute_name} attribute completed successfully!"
attributes_created = true
end
puts ""
return attributes_created
end
def check_if_attribute_exists(service_id, attribute_name)
puts "Check if attribute exists:", "--------------------------"
attribute_exists = false
puts "List of service attributes:"
service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id})
pretty_print_JSON(service_attributes)
extract_attribute = service_attributes['data'].select { |x| x['display_name'].include? attribute_name }.first['display_name']
if (extract_attribute == attribute_name)
puts "#{attribute_name} attribute exists!"
attribute_exists = true
else
puts "#{attribute_name} attribute does not exist!"
end
puts ""
return attribute_exists
end
def link_attributes(service_id, source_attribute, context_attribute)
puts "Link attributes:", "----------------"
attributes_linked = false
link_attributes_response = send_request('/rest/assembly/add_ad_hoc_attribute_links', {:assembly_id=>service_id, :context_attribute_term=>context_attribute, :source_attribute_term=>"$#{source_attribute}"})
pretty_print_JSON(link_attributes_response)
if link_attributes_response['status'] == 'ok'
puts "Link between #{source_attribute} attribute and #{context_attribute} attribute is established!"
attributes_linked = true
else
puts "Link between #{source_attribute} attribute and #{context_attribute} attribute is not established!"
end
puts ""
return attributes_linked
end
def get_service_id_by_name(service_name)
puts "Get service instance id by its name", "-----------------------------------"
service_list = send_request('/rest/assembly/list', {:detail_level=>'nodes', :subtype=>'instance'})
puts "List of all services and its content:"
service_instance = nil
filtered_services = service_list['data'].select { |x| x['display_name'] == service_name }
if filtered_services.length == 1
puts "Service instance with name #{service_name} exists: "
pretty_print_JSON(filtered_services)
service_instance = filtered_services[0]
elsif filtered_services.length.zero?
puts "Service instance with name #{service_name} does not exist."
else
puts "Multiple service instances with name #{service_name} exist."
end
end
def check_if_service_exists(service_id)
#Get list of existing services and check if staged service exists
puts "Check if service exists:", "------------------------"
service_exists = false
service_list = send_request('/rest/assembly/list', {:detail_level=>'nodes', :subtype=>'instance'})
puts "List of all services and its content:"
pretty_print_JSON(service_list)
test_service = service_list['data'].select { |x| x['id'] == service_id }
puts "Service with id #{service_id}: "
pretty_print_JSON(test_service)
if (test_service.any?)
extract_service_id = test_service.first['id']
execution_status = test_service.first['execution_status']
if ((extract_service_id == service_id) && (execution_status == 'staged'))
puts "Service with id #{service_id} exists!"
service_exists = true
end
else
puts "Service with id #{service_id} does not exist!"
end
puts ""
return service_exists
end
def list_specific_success_service(service_name)
puts "List success services:", "------------------------"
service_list = send_request('/rest/assembly/list', {:subtype=>'instance', :detail_level => 'nodes'})
success_services = service_list['data'].select { |x| x['display_name'] == service_name && x['execution_status'] == 'succeeded' }
pretty_print_JSON(success_services)
return success_services
end
def list_matched_success_service(service_name)
puts "List success services:", "------------------------"
service_list = send_request('/rest/assembly/list', {:subtype=>'instance', :detail_level => 'nodes'})
success_services = service_list['data'].select { |x| (x['display_name'].include? service_name) && (x['execution_status'] == 'succeeded') }
pretty_print_JSON(success_services)
return success_services
end
def list_specific_failed_service(service_name)
puts "List failed services:", "-------------------------"
service_list = send_request('/rest/assembly/list', {:subtype=>'instance', :detail_level => 'nodes'})
failed_services = service_list['data'].select { |x| x['display_name'] == service_name && x['execution_status'] == 'failed' }
pretty_print_JSON(failed_services)
return failed_services
end
def list_matched_failed_service(service_name)
puts "List failed services:", "-------------------------"
service_list = send_request('/rest/assembly/list', {:subtype=>'instance', :detail_level => 'nodes'})
failed_services = service_list['data'].select { |x| (x['display_name'].include? service_name) && (x['execution_status'] == 'failed') }
pretty_print_JSON(failed_services)
return failed_services
end
def check_service_status(service_id, status_to_check)
#Get list of services and check if service exists and its status
puts "Check service status:", "---------------------"
service_exists = false
end_loop = false
count = 0
max_num_of_retries = 50
while (end_loop == false)
sleep 5
count += 1
service_list = send_request('/rest/assembly/list', {:subtype=>'instance'})
service = service_list['data'].select { |x| x['id'] == service_id }.first
if (!service.nil?)
test_service = send_request('/rest/assembly/info', {:assembly_id=>service_id,:subtype=>:instance})
op_status = test_service['data']['op_status']
extract_service_id = service['id']
if ((extract_service_id == service_id) && (op_status == status_to_check))
puts "Service with id #{extract_service_id} has current op status: #{status_to_check}"
service_exists = true
end_loop = true
else
puts "Service with id #{extract_service_id} still does not have current op status: #{status_to_check}"
end
else
puts "Service with id #{service_id} not found in list"
end_loop = true
end
if (count > max_num_of_retries)
puts "Max number of retries reached..."
end_loop = true
end
end
puts ""
return service_exists
end
def set_attribute(service_id, attribute_name, attribute_value)
#Set attribute on particular service
puts "Set attribute:", "--------------"
is_attributes_set = false
service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id})
attribute_id = service_attributes['data'].select { |x| x['display_name'].include? attribute_name }
if attribute_id.empty?
set_attribute_value_response = send_request('/rest/assembly/set_attributes', {:assembly_id=>service_id, :value=>attribute_value, :pattern=>attribute_name})
if set_attribute_value_response['status'] == 'ok'
puts "Setting of attribute #{attribute_name} completed successfully!"
is_attributes_set = true
end
else
set_attribute_value_response = send_request('/rest/assembly/set_attributes', {:assembly_id=>service_id, :value=>attribute_value, :pattern=>attribute_id.first['id']})
service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id})
extract_attribute_value = service_attributes['data'].select { |x| x['value'] == attribute_value }.first['value']
if extract_attribute_value != nil
puts "Setting of attribute #{attribute_name} completed successfully!"
is_attributes_set = true
end
end
puts ""
return is_attributes_set
end
def set_attribute_on_service_level_component(service_id, attribute_name, attribute_value)
#Set attribute on particular service
puts "Set attribute:", "--------------"
is_attributes_set = false
#Get attribute id for which value will be set
service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id})
attribute_id = service_attributes['data'].select { |x| x['display_name'].include? attribute_name }.first['id']
#Set attribute value for given attribute id
set_attribute_value_response = send_request('/rest/assembly/set_attributes', {:assembly_id=>service_id, :value=>attribute_value, :pattern=>attribute_id})
service_attributes = send_request('/rest/assembly/info_about', {:about=>'attributes', :filter=>nil, :subtype=>'instance', :assembly_id=>service_id})
extract_attribute_value = attribute_id = service_attributes['data'].select { |x| x['display_name'].include? attribute_name }.first['value']
if extract_attribute_value == attribute_value
puts "Setting of attribute #{attribute_name} completed successfully!"
is_attributes_set = true
end
puts ""
return is_attributes_set
end
def get_attribute_value(service_id, node_name, component_name, attribute_name)
puts "Get attribute value by name:", "----------------------------"
puts "List of service attributes:"
service_attributes = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :filter=>nil, :about=>'attributes', :subtype=>'instance'})
pretty_print_JSON(service_attributes)
attributes = service_attributes['data'].select { |x| x['display_name'] == "#{node_name}/#{component_name}/#{attribute_name}" }.first
if !attributes.nil?
attribute_value = service_attributes['data'].select { |x| x['display_name'] == "#{node_name}/#{component_name}/#{attribute_name}" }.first['value']
puts "Attribute value is: #{attribute_value}"
else
puts "Some of the input parameters is incorrect or missing. Node name: #{node_name}, Component name: #{component_name}, Attribute name: #{attribute_name}"
end
puts ""
return attribute_value
end
# new client
def check_component_depedency(service_instance_name, source_component, dependency_component, type)
puts "Check component dependency:", "---------------------------"
dependency_found = false
puts "List service components with dependencies:"
components_list = send_request("/rest/api/v1/services/#{service_instance_name}/component_links", {}, 'get')
component = components_list['data'].select { |x| x['base_component'] == source_component}
if (!component.nil?)
puts "Component #{source_component} exists. Check its dependencies..."
component.each do |deps|
if (deps['dependent_component'] == dependency_component) && (deps['type'] == type)
dependency_found = true
puts "Component #{source_component} has expected dependency component #{dependency_component} with type #{type}"
else
puts "Component #{source_component} does not have expected dependency component #{dependency_component} with type #{type}"
end
end
else
puts "Component #{source_component} does not exist and therefore it does not have any dependencies"
end
puts ""
return dependency_found
end
def converge_service(service_id, max_num_of_retries=15)
puts "Converge service:", "-----------------"
service_converged = false
puts "Converge process for service with id #{service_id} started!"
find_violations = send_request('/rest/assembly/find_violations', {'assembly_id' => service_id})
create_task_response = send_request('/rest/assembly/create_task', {'assembly_id' => service_id})
if (@error_message == "")
task_id = create_task_response['data']['task_id']
puts "Task id: #{task_id}"
task_execute_response = send_request('/rest/task/execute', {'task_id' => task_id})
end_loop = false
count = 0
task_status = 'executing'
while ((task_status.include? 'executing') && (end_loop == false))
sleep 20
count += 1
response_task_status = send_request('/rest/assembly/task_status', {'assembly_id'=> service_id})
status = response_task_status['data'].first['status']
unless status.nil?
if (status.include? 'succeeded')
service_converged = true
puts "Task execution status: #{status}"
puts "Converge process finished successfully!"
end_loop = true
elsif (status.include? 'failed')
puts "Error details on subtasks:"
ap response_task_status['data']
response_task_status['data'].each do |error_message|
unless error_message['errors'].nil?
puts error_message['errors']['message']
puts error_message['errors']['type']
end
end
puts "Task execution status: #{status}"
puts "Converge process was not finished successfully! Some tasks failed!"
end_loop = true
end
puts "Task execution status: #{status}"
end
if (count > max_num_of_retries)
puts "Max number of retries reached..."
puts "Converge process was not finished successfully!"
end_loop = true
end
end
else
puts "Service was not converged successfully!"
end
puts ""
return service_converged
end
def stop_running_service(service_id)
puts "Stop running service:", "---------------------"
service_stopped = false
stop_service_response = send_request('/rest/assembly/stop', {:assembly_id => service_id})
if (stop_service_response['status'] == "ok")
puts "Service stopped successfully!"
service_stopped = true
else
puts "Service was not stopped successfully!"
end
puts ""
return service_stopped
end
def create_assembly_from_service(service_id, service_module_name, assembly_name, namespace=nil)
puts "Create assembly from service:", "-----------------------------"
assembly_created = false
create_assembly_response = send_request('/rest/assembly/promote_to_template', {:service_module_name=>service_module_name, :mode=>:create, :assembly_id=>service_id, :assembly_template_name=>assembly_name, :namespace=>namespace})
if (create_assembly_response['status'] == 'ok')
puts "Assembly #{assembly_name} created in service module #{service_module_name}"
assembly_created = true
else
puts "Assembly #{assembly_name} was not created in service module #{service_module_name}"
end
puts ""
return assembly_created
end
def netstats_check(service_id, port)
puts "Netstats check:", "---------------"
netstats_check = false
end_loop = false
count = 0
max_num_of_retries = 15
while (end_loop == false)
sleep 10
count += 1
if (count > max_num_of_retries)
puts "Max number of retries for getting netstats reached..."
end_loop = true
end
response = send_request('/rest/assembly/initiate_get_netstats', {:node_id=>nil, :assembly_id=>service_id})
pretty_print_JSON(response)
action_results_id = response['data']['action_results_id']
5.downto(1) do |i|
sleep 1
response = send_request('/rest/assembly/get_action_results', {:disable_post_processing=>false, :return_only_if_complete=>true, :action_results_id=>action_results_id, :sort_key=>"port"})
puts "Netstats check:"
pretty_print_JSON(response)
if response['data']['is_complete']
port_to_check = response['data']['results'].select { |x| x['port'] == port}.first
if (!port_to_check.nil?)
puts "Netstats check completed! Port #{port} available!"
netstats_check = true
end_loop = true
break
else
puts "Netstats check completed! Port #{port} is not available!"
netstats_check = false
break
end
end
end
end
puts ""
return netstats_check
end
def start_running_service(service_id)
puts "Start service:", "--------------"
service_started = false
response = send_request('/rest/assembly/start', {:assembly_id => service_id, :node_pattern=>nil})
pretty_print_JSON(response)
task_id = response['data']['task_id']
response = send_request('/rest/task/execute', {:task_id=>task_id})
if (response['status'] == 'ok')
end_loop = false
count = 0
max_num_of_retries = 30
while (end_loop == false)
sleep 10
count += 1
response = send_request('/rest/assembly/info_about', {:assembly_id => service_id, :subtype => 'instance', :about => 'tasks'})
puts "Start instance check:"
status = response['data'].select { |x| x['status'] == 'executing'}.first
pretty_print_JSON(status)
if (count > max_num_of_retries)
puts "Max number of retries for starting instance reached..."
end_loop = true
elsif (status.nil?)
puts "Instance started!"
service_started = true
end_loop = true
end
end
else
puts "Start instance is not completed successfully!"
end
puts ""
return service_started
end
def add_component_by_name_to_service_node(service_id, node_name, component_name)
puts "Add component to service:", "--------------------------"
component_added = false
service_nodes = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :filter=>nil, :about=>'nodes', :subtype=>'instance'})
if (service_nodes['data'].select { |x| x['display_name'] == node_name }.first)
puts "Node #{node_name} exists in service. Get node id..."
node_id = service_nodes['data'].select { |x| x['display_name'] == node_name }.first['id']
component_add_response = send_request('/rest/assembly/add_component', {:node_id=>node_id, :component_template_id=>component_name.split(":").last, :assembly_id=>service_id, :namespace=>component_name.split(":").first})
if (component_add_response['status'] == 'ok')
puts "Component #{component_name} added to service!"
component_added = true
end
else
component_add_response = send_request('/rest/assembly/add_component', {:node_id=>nil, :component_template_id=>component_name.split(":").last, :assembly_id=>service_id, :namespace=>component_name.split(":").first})
if (component_add_response['status'] == 'ok')
puts "Component #{component_name} added to service!"
component_added = true
end
end
puts ""
return component_added
end
def delete_and_destroy_service(service_id)
puts "Delete and destroy service:", "---------------------------"
service_deleted = false
delete_service_response = send_request('/rest/assembly/delete', {:assembly_id=>service_id})
if (delete_service_response['status'] == "ok")
puts "Service deleted successfully!"
service_deleted = true
else
puts "Service was not deleted successfully!"
end
puts ""
return service_deleted
end
def delete_task_status(service_id, component_to_delete, delete_type, check_component_in_task_status=true)
service_deleted = false
end_loop = false
count = 0
max_num_of_retries = 50
task_status = 'executing'
while ((task_status.include? 'executing') && (end_loop == false))
sleep 2
count += 1
response_task_status = send_request('/rest/assembly/task_status', {'assembly_id'=> service_id})
delete_status = response_task_status['data'].first['status']
if !delete_status.nil?
if check_component_in_task_status
component_delete_status = response_task_status['data'].select { |x| x['type'].include? component_to_delete }.first['status']
else
# case when performing delete action on staged service
component_delete_status = 'succeeded'
end
if (delete_status.include? "succeeded") && (component_delete_status.include? "succeeded")
service_deleted = true
task_status = delete_status
puts "Task execution status: #{delete_status}"
puts "#{delete_type} finished successfully!"
end_loop = true
end
if (delete_status.include? 'failed')
puts "Error details:"
ap response_task_status['data']
response_task_status['data'].each do |error_message|
unless error_message['errors'].nil?
puts error_message['errors']['message']
puts error_message['errors']['type']
end
end
puts "Task execution status: #{delete_status}"
puts "#{delete_type} with workflow did not finish successfully!"
task_status = delete_status
end_loop = true
end
puts "Task execution status: #{delete_status}"
else
if delete_type == 'delete_service'
# This is set to true only in case when we delete service instance
# Reason: we cannot get task status details on instance that does not exist anymore
service_deleted = true
break
end
end
if (count > max_num_of_retries)
puts "Max number of retries reached..."
puts "#{delete_type} with workflow did not finish successfully!"
break
end
end
service_deleted
end
def delete_service_with_workflow(service_id, component_to_delete, check_component_in_task_status)
puts "Delete and destroy service with workflow:", "-----------------------------------------"
service_deleted_successfully = false
delete_service_response = send_request('/rest/assembly/delete_using_workflow', {:assembly_id=>service_id, :subtype => :instance})
if delete_service_response['status'] == 'ok'
service_deleted_successfully = delete_task_status(service_id, component_to_delete, 'delete_service', check_component_in_task_status)
puts "Service was deleted successfully!"
else
puts "Service was not deleted successfully!"
end
puts ""
return service_deleted_successfully
end
def delete_node_with_workflow(service_id, node_name, component_to_delete, check_component_in_task_status)
puts "Delete node with workflow:", "----------------------------------"
node_deleted_successfully = false
delete_node_response = send_request('/rest/assembly/delete_node_using_workflow', {:assembly_id=>service_id, :subtype => :instance, :node_id => node_name})
if delete_node_response['status'] == 'ok'
node_deleted_successfully = delete_task_status(service_id, component_to_delete, 'delete_node', check_component_in_task_status)
puts "Node: #{node_name} was deleted successfully!"
else
puts "Node: #{node_name} was not deleted successfully!"
end
puts ""
return node_deleted_successfully
end
def delete_component_with_workflow(service_id, node_name, component_to_delete, check_component_in_task_status)
puts "Delete component with workflow:", "---------------------------------------"
component_deleted_successfully = false
delete_component_response = send_request('/rest/assembly/delete_component_using_workflow', {:assembly_id=>service_id, :task_action => "#{component_to_delete}.delete", :task_params => { "node" => node_name }, :component_id => component_to_delete, :noop_if_no_action => nil, :cmp_full_name => "#{node_name}/#{component_to_delete}", :node_id => node_name })
if delete_component_response['status'] == 'ok'
component_deleted_successfully = delete_task_status(service_id, component_to_delete, 'delete_component', check_component_in_task_status)
puts "Component: #{component_to_delete} was deleted successfully!"
else
puts "Component: #{component_to_delete} was not deleted successfully!"
end
puts ""
return component_deleted_successfully
end
def delete_context(context_name)
puts "Delete context:", "-----------------"
context_deleted = false
delete_context_service_response = send_request('/rest/assembly/delete', {:assembly_id=>context_name})
if (delete_context_service_response['status'] == "ok")
puts "context service deleted successfully!"
context_deleted = true
else
puts "context service was not deleted successfully!"
end
puts ""
return context_deleted
end
def push_assembly_updates(service_id, service_module)
puts "Push assembly updates:", "---------------------"
assembly_updated = false
response = send_request('/rest/assembly/promote_to_template', {:assembly_id=>service_id, :mode => 'update', :use_module_namespace => true })
pretty_print_JSON(response)
if response['status'] == 'ok' && response['data']['full_module_name'] == service_module
assembly_updated = true
end
puts ""
return assembly_updated
end
def push_component_module_updates_without_changes(service_id, component_module)
puts "Push component module updates:", "-------------------------------"
response = send_request('/rest/assembly/promote_module_updates', {:assembly_id=>service_id, :module_name => component_module, :module_type => "component_module" })
return response
end
def get_nodes(service_id)
puts "Get all nodes from service:", "-----------------------------"
nodes_list = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :node_id => nil, :component_id => nil, :subtype=>'instance', :about=>'nodes'})
nodes_list = nodes_list['data'].map! { |c| c['display_name'] }
pretty_print_JSON(nodes_list)
puts ""
return nodes_list
end
def get_components(service_id)
puts "Get all components from service:", "-----------------------------"
components_list = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :node_id => nil, :component_id => nil, :subtype=>'instance', :about=>'components'})
components_list = components_list['data'].map! { |c| c['display_name'] }
puts ""
return components_list
end
def get_cardinality(service_id, node_name)
puts "Get cardinality from service:", "-----------------------------"
cardinality = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :node_id => nil, :component_id => nil, :subtype=>'instance', :about=>'attributes', :format=>'yaml'})
content = YAML.load(cardinality['data'])
puts content
attributes = (content["nodes"]["#{node_name}/"]||{})['attributes']||{}
puts ""
return attributes['cardinality'] && attributes['cardinality'].to_i
end
def get_workflow_info(service_id)
puts "Get workflow info:", "----------------------"
workflow_info = send_request('/rest/assembly/info_about_task', {:assembly_id=>service_id, :subtype => 'instance'})
content = YAML.load(workflow_info['data'])
puts content
puts ""
return content
end
def grant_access(service_id, system_user, rsa_pub_name, ssh_key)
puts "Grant access:", "-----------------"
response = send_request('/rest/assembly/initiate_ssh_pub_access', {:agent_action => :grant_access, :assembly_id=>service_id, :system_user => system_user, :rsa_pub_name => rsa_pub_name, :rsa_pub_key => ssh_key})
pretty_print_JSON(response)
puts ""
return response
end
def revoke_access(service_id, system_user, rsa_pub_name, ssh_key)
puts "Revoke access:", "-----------------"
resp = send_request('/rest/assembly/initiate_ssh_pub_access', {:agent_action => :revoke_access, :assembly_id=>service_id, :system_user => system_user, :rsa_pub_name => rsa_pub_name, :rsa_pub_key => ssh_key})
pretty_print_JSON(resp)
response = nil
if resp['status'] != 'notok'
response = send_request('/rest/assembly/get_action_results', {:action_results_id => resp['data']['action_results_id'], :return_only_if_complete => true, :disable_post_processing => true})
puts response
else
response = resp
end
puts ""
return response
end
def list_services_by_property(key, value)
# Get list of existing workspace service instances in a specific context
puts "List service instances with #{value} value for #{key} property:", "----------------------------------------------------------------------------------"
service_instance_list = send_request('/rest/assembly/list', {:detail_level=>'nodes', :subtype=>'instance', :include_namespaces => true})
filtered_services = nil
if service_instance_list['status'] == 'ok'
filtered_services = service_instance_list['data'].select{ |x| x[key].include? value }
if filtered_services.length.zero?
puts "No service instances with #{value} value for #{key} property been found."
filtered_services = nil
else
puts "#{filtered_services.length} service instances with #{value} value for #{key} property found: "
end
else
puts "Could not get service instance list."
end
puts ''
filtered_services
end
def list_ssh_access(service_id, system_user, rsa_pub_name, nodes)
puts "List ssh access:", "---------------------"
sleep 5
response = send_request('/rest/assembly/list_ssh_access', {:assembly_id=>service_id})
pretty_print_JSON(response)
list = response['data'].select { |x| x['attributes']['linux_user'] == system_user && x['attributes']['key_name'] == rsa_pub_name && (nodes.include? x['node_name']) }
puts ""
return list.map! { |x| x['attributes']['key_name']}
end
def get_task_action_output(service_id, action_id)
puts "Get task action output:", "------------------------"
response = send_request('/rest/assembly/task_action_detail', {:assembly_id=>service_id, :message_id=>action_id})
pretty_print_JSON(response)
runs = {}
if response['status'] == "ok"
output = response['data']
output.gsub!("=","") if response['data'].include? "="
runs = output.split(/\n \n\n|\n\n\n|\n\n/)
else
puts "Task action details were not retrieved successfully!"
end
puts ""
return runs
end
def verify_flags(service_id, component_module_name, update_flag, update_saved_flag)
puts "Verify update and update saved flags:", "---------------------------------"
flags_verified = false
response = send_request('/rest/assembly/info_about', {:assembly_id=>service_id, :subtype=>:instance, :about=>'modules', :detail_to_include=>[:version_info]})
pretty_print_JSON(response)
component_module_details = response['data'].select { |x| x['display_name'] == component_module_name }.first
if !component_module_details.nil?
puts "Component module found! Check flags..."
pretty_print_JSON(component_module_details)
unless component_module_details.key?('local_copy') || component_module_details.key?('update_saved')
puts "Flags dont not exist in the output"
end
if component_module_details['local_copy'] == update_flag && component_module_details['update_saved'] == update_saved_flag
puts "Update and update saved flags match the comparison"
flags_verified = true
else
puts "Update and update saved flags does not match the comparison"
end
else
puts "Component module was not found!"
end
puts ""
flags_verified
end
end | dtk/dtk-server | test/functional/rspec/lib/mixins/assembly_and_service_operations_mixin.rb | Ruby | apache-2.0 | 51,221 |
using System.Collections.Specialized;
using System.ComponentModel;
using System.Linq;
using DNTProfiler.Common.JsonToolkit;
using DNTProfiler.Common.Models;
using DNTProfiler.Common.Mvvm;
using DNTProfiler.Infrastructure.Core;
using DNTProfiler.Infrastructure.ScriptDomVisitors;
using DNTProfiler.Infrastructure.ViewModels;
using DNTProfiler.PluginsBase;
namespace DNTProfiler.InCorrectNullComparisons.ViewModels
{
public class MainViewModel : MainViewModelBase
{
private readonly CallbacksManagerBase _callbacksManager;
public MainViewModel(ProfilerPluginBase pluginContext)
: base(pluginContext)
{
if (Designer.IsInDesignModeStatic)
return;
setActions();
setGuiModel();
_callbacksManager = new CallbacksManagerBase(PluginContext, GuiModelData);
setEvenets();
}
private void Commands_CollectionChanged(object sender, NotifyCollectionChangedEventArgs e)
{
switch (e.Action)
{
case NotifyCollectionChangedAction.Add:
foreach (Command command in e.NewItems)
{
var visitor = new NullComparisonVisitor();
foreach (var parameter in command.Parameters.Where(parameter => parameter.Value == "null"))
{
visitor.NullVariableNames.Add(parameter.Name);
}
_callbacksManager.RunAnalysisVisitorOnCommand(visitor, command);
}
break;
}
}
private void GuiModelData_PropertyChanged(object sender, PropertyChangedEventArgs e)
{
switch (e.PropertyName)
{
case "SelectedApplicationIdentity":
_callbacksManager.ShowSelectedApplicationIdentityLocalCommands();
break;
case "SelectedExecutedCommand":
_callbacksManager.ShowSelectedCommandRelatedStackTraces();
break;
}
}
private void setActions()
{
PluginContext.Reset = () =>
{
ResetAll();
};
PluginContext.GetResults = () =>
{
return GuiModelData.RelatedCommands.ToFormattedJson();
};
}
private void setEvenets()
{
PluginContext.ProfilerData.Commands.CollectionChanged += Commands_CollectionChanged;
}
private void setGuiModel()
{
GuiModelData.PropertyChanged += GuiModelData_PropertyChanged;
}
}
} | VahidN/DNTProfiler | Plugins/DNTProfiler.InCorrectNullComparisons/ViewModels/MainViewModel.cs | C# | apache-2.0 | 2,745 |
//
// Questo file è stato generato dall'architettura JavaTM per XML Binding (JAXB) Reference Implementation, v2.2.8-b130911.1802
// Vedere <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Qualsiasi modifica a questo file andrà persa durante la ricompilazione dello schema di origine.
// Generato il: 2014.10.23 alle 11:27:04 AM CEST
//
package org.cumulus.certificate.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Classe Java per HistoryStateType complex type.
*
* <p>Il seguente frammento di schema specifica il contenuto previsto contenuto in questa classe.
*
* <pre>
* <complexType name="HistoryStateType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="stateId" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="refersToStateId" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "HistoryStateType")
public class HistoryStateType {
@XmlAttribute(name = "stateId", required = true)
protected String stateId;
@XmlAttribute(name = "refersToStateId", required = true)
protected String refersToStateId;
/**
* Recupera il valore della proprietà stateId.
*
* @return
* possible object is
* {@link String }
*
*/
public String getStateId() {
return stateId;
}
/**
* Imposta il valore della proprietà stateId.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setStateId(String value) {
this.stateId = value;
}
/**
* Recupera il valore della proprietà refersToStateId.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRefersToStateId() {
return refersToStateId;
}
/**
* Imposta il valore della proprietà refersToStateId.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRefersToStateId(String value) {
this.refersToStateId = value;
}
}
| fgaudenzi/testManager | testManager/XMLRepository/CertificationModel/org/cumulus/certificate/model/HistoryStateType.java | Java | apache-2.0 | 2,497 |
//---------------------------------------------------------------------------
// Greenplum Database
// Copyright (C) 2013 EMC Corp.
//
// @filename:
// CMDIdCast.cpp
//
// @doc:
// Implementation of mdids for cast functions
//---------------------------------------------------------------------------
#include "naucrates/md/CMDIdCast.h"
#include "naucrates/dxl/xml/CXMLSerializer.h"
using namespace gpos;
using namespace gpmd;
//---------------------------------------------------------------------------
// @function:
// CMDIdCast::CMDIdCast
//
// @doc:
// Ctor
//
//---------------------------------------------------------------------------
CMDIdCast::CMDIdCast
(
CMDIdGPDB *pmdidSrc,
CMDIdGPDB *pmdidDest
)
:
m_pmdidSrc(pmdidSrc),
m_pmdidDest(pmdidDest),
m_str(m_wszBuffer, GPOS_ARRAY_SIZE(m_wszBuffer))
{
GPOS_ASSERT(pmdidSrc->FValid());
GPOS_ASSERT(pmdidDest->FValid());
// serialize mdid into static string
Serialize();
}
//---------------------------------------------------------------------------
// @function:
// CMDIdCast::~CMDIdCast
//
// @doc:
// Dtor
//
//---------------------------------------------------------------------------
CMDIdCast::~CMDIdCast()
{
m_pmdidSrc->Release();
m_pmdidDest->Release();
}
//---------------------------------------------------------------------------
// @function:
// CMDIdCast::Serialize
//
// @doc:
// Serialize mdid into static string
//
//---------------------------------------------------------------------------
void
CMDIdCast::Serialize()
{
// serialize mdid as SystemType.mdidSrc.mdidDest
m_str.AppendFormat
(
GPOS_WSZ_LIT("%d.%d.%d.%d;%d.%d.%d"),
Emdidt(),
m_pmdidSrc->OidObjectId(),
m_pmdidSrc->UlVersionMajor(),
m_pmdidSrc->UlVersionMinor(),
m_pmdidDest->OidObjectId(),
m_pmdidDest->UlVersionMajor(),
m_pmdidDest->UlVersionMinor()
);
}
//---------------------------------------------------------------------------
// @function:
// CMDIdCast::Wsz
//
// @doc:
// Returns the string representation of the mdid
//
//---------------------------------------------------------------------------
const WCHAR *
CMDIdCast::Wsz() const
{
return m_str.Wsz();
}
//---------------------------------------------------------------------------
// @function:
// CMDIdCast::PmdidSrc
//
// @doc:
// Returns the source type id
//
//---------------------------------------------------------------------------
IMDId *
CMDIdCast::PmdidSrc() const
{
return m_pmdidSrc;
}
//---------------------------------------------------------------------------
// @function:
// CMDIdCast::PmdidDest
//
// @doc:
// Returns the destination type id
//
//---------------------------------------------------------------------------
IMDId *
CMDIdCast::PmdidDest() const
{
return m_pmdidDest;
}
//---------------------------------------------------------------------------
// @function:
// CMDIdCast::FEquals
//
// @doc:
// Checks if the mdids are equal
//
//---------------------------------------------------------------------------
BOOL
CMDIdCast::FEquals
(
const IMDId *pmdid
)
const
{
if (NULL == pmdid || EmdidCastFunc != pmdid->Emdidt())
{
return false;
}
const CMDIdCast *pmdidCastFunc = CMDIdCast::PmdidConvert(pmdid);
return m_pmdidSrc->FEquals(pmdidCastFunc->PmdidSrc()) &&
m_pmdidDest->FEquals(pmdidCastFunc->PmdidDest());
}
//---------------------------------------------------------------------------
// @function:
// CMDIdCast::Serialize
//
// @doc:
// Serializes the mdid as the value of the given attribute
//
//---------------------------------------------------------------------------
void
CMDIdCast::Serialize
(
CXMLSerializer * pxmlser,
const CWStringConst *pstrAttribute
)
const
{
pxmlser->AddAttribute(pstrAttribute, &m_str);
}
//---------------------------------------------------------------------------
// @function:
// CMDIdCast::OsPrint
//
// @doc:
// Debug print of the id in the provided stream
//
//---------------------------------------------------------------------------
IOstream &
CMDIdCast::OsPrint
(
IOstream &os
)
const
{
os << "(" << m_str.Wsz() << ")";
return os;
}
// EOF
| PengJi/gporca-comments | libnaucrates/src/md/CMDIdCast.cpp | C++ | apache-2.0 | 4,157 |
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.serverhealth;
import com.thoughtworks.go.config.CruiseConfig;
import com.thoughtworks.go.config.CruiseConfigProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
@Service
public class ServerHealthService implements ApplicationContextAware {
private static final Logger LOG = LoggerFactory.getLogger(ServerHealthService.class);
private HashMap<ServerHealthState, Set<String>> pipelinesWithErrors;
private Map<HealthStateType, ServerHealthState> serverHealth;
private ApplicationContext applicationContext;
public ServerHealthService() {
this.serverHealth = new ConcurrentHashMap<>();
this.pipelinesWithErrors = new HashMap<>();
}
public void removeByScope(HealthStateScope scope) {
for (HealthStateType healthStateType : entryKeys()) {
if (healthStateType.isSameScope(scope)) {
serverHealth.remove(healthStateType);
}
}
}
private Set<HealthStateType> entryKeys() {
return new HashSet<>(serverHealth.keySet());
}
public List<ServerHealthState> filterByScope(HealthStateScope scope) {
List<ServerHealthState> filtered = new ArrayList<>();
for (Map.Entry<HealthStateType, ServerHealthState> entry : sortedEntries()) {
HealthStateType type = entry.getKey();
if (type.isSameScope(scope)) {
filtered.add(entry.getValue());
}
}
return filtered;
}
public HealthStateType update(ServerHealthState serverHealthState) {
HealthStateType type = serverHealthState.getType();
if (serverHealthState.getLogLevel() == HealthStateLevel.OK) {
if (serverHealth.containsKey(type)) {
serverHealth.remove(type);
}
return null;
} else {
serverHealth.put(type, serverHealthState);
return type;
}
}
// called from spring timer
public synchronized void onTimer() {
CruiseConfig currentConfig = applicationContext.getBean(CruiseConfigProvider.class).getCurrentConfig();
purgeStaleHealthMessages(currentConfig);
LOG.debug("Recomputing material to pipeline mappings.");
HashMap<ServerHealthState, Set<String>> erroredPipelines = new HashMap<>();
for (Map.Entry<HealthStateType, ServerHealthState> entry : serverHealth.entrySet()) {
erroredPipelines.put(entry.getValue(), entry.getValue().getPipelineNames(currentConfig));
}
pipelinesWithErrors = erroredPipelines;
LOG.debug("Done recomputing material to pipeline mappings.");
}
public Set<String> getPipelinesWithErrors(ServerHealthState serverHealthState) {
return pipelinesWithErrors.get(serverHealthState);
}
void purgeStaleHealthMessages(CruiseConfig cruiseConfig) {
removeMessagesForElementsNoLongerInConfig(cruiseConfig);
removeExpiredMessages();
}
@Deprecated(forRemoval = true) // Remove once we get rid of SpringJUnitTestRunner
public void removeAllLogs() {
serverHealth.clear();
}
private void removeMessagesForElementsNoLongerInConfig(CruiseConfig cruiseConfig) {
for (HealthStateType type : entryKeys()) {
if (type.isRemovedFromConfig(cruiseConfig)) {
this.removeByScope(type);
}
}
}
private void removeExpiredMessages() {
for (Map.Entry<HealthStateType, ServerHealthState> entry : new HashSet<>(serverHealth.entrySet())) {
ServerHealthState value = entry.getValue();
if (value.hasExpired()) {
serverHealth.remove(entry.getKey());
}
}
}
private void removeByScope(HealthStateType type) {
removeByScope(type.getScope());
}
public ServerHealthStates logs() {
ArrayList<ServerHealthState> logs = new ArrayList<>();
for (Map.Entry<HealthStateType, ServerHealthState> entry : sortedEntries()) {
logs.add(entry.getValue());
}
return new ServerHealthStates(logs);
}
private List<Map.Entry<HealthStateType, ServerHealthState>> sortedEntries() {
List<Map.Entry<HealthStateType, ServerHealthState>> entries = new ArrayList<>(serverHealth.entrySet());
entries.sort(Comparator.comparing(Map.Entry::getKey));
return entries;
}
public String getLogsAsText() {
StringBuilder text = new StringBuilder();
for (ServerHealthState state : logs()) {
text.append(state.getDescription());
text.append("\n\t");
text.append(state.getMessage());
text.append("\n");
}
return text.toString();
}
public boolean containsError(HealthStateType type, HealthStateLevel level) {
ServerHealthStates allLogs = logs();
for (ServerHealthState log : allLogs) {
if (log.getType().equals(type) && log.getLogLevel() == level) {
return true;
}
}
return false;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
}
| GaneshSPatil/gocd | common/src/main/java/com/thoughtworks/go/serverhealth/ServerHealthService.java | Java | apache-2.0 | 6,153 |
package gov.ic.geoint.spreadsheet;
/**
*
*/
public interface ICell extends Hashable {
/**
*
* @return
*/
public int getColumnNum();
/**
*
* @return
*/
public int getRowNum();
/**
*
* @return
*/
public String getValue();
}
| GEOINT/spreadsheetDiff | src/main/java/gov/ic/geoint/spreadsheet/ICell.java | Java | apache-2.0 | 298 |
package sdk.chat.demo.examples.api;
import io.reactivex.functions.Consumer;
import sdk.guru.common.DisposableMap;
public class BaseExample implements Consumer<Throwable> {
// Add the disposables to a map so you can dispose of them all at one time
protected DisposableMap dm = new DisposableMap();
@Override
public void accept(Throwable throwable) throws Exception {
// Handle exception
}
}
| chat-sdk/chat-sdk-android | chat-sdk-demo/src/main/java/sdk/chat/demo/examples/api/BaseExample.java | Java | apache-2.0 | 423 |
//Copyright (c) 2014 by Disy Informationssysteme GmbH
package net.disy.eenvplus.tfes.core.api.query;
// NOT_PUBLISHED
public interface ISuggestionQuery extends ISourceQuery {
String getKeyword();
}
| eENVplus/tf-exploitation-server | TF_Exploitation_Server_core/src/main/java/net/disy/eenvplus/tfes/core/api/query/ISuggestionQuery.java | Java | apache-2.0 | 201 |
package com.jwetherell.algorithms.data_structures.interfaces;
/**
* A tree can be defined recursively (locally) as a collection of nodes (starting at a root node),
* where each node is a data structure consisting of a value, together with a list of nodes (the "children"),
* with the constraints that no node is duplicated. A tree can be defined abstractly as a whole (globally)
* as an ordered tree, with a value assigned to each node.
* <p>
* @see <a href="https://en.wikipedia.org/wiki/Tree_(data_structure)">Tree (Wikipedia)</a>
* <br>
* @author Justin Wetherell <phishman3579@gmail.com>
*/
public interface ITree<T> {
/**
* Add value to the tree. Tree can contain multiple equal values.
*
* @param value to add to the tree.
* @return True if successfully added to tree.
*/
public boolean add(T value);
/**
* Remove first occurrence of value in the tree.
*
* @param value to remove from the tree.
* @return T value removed from tree.
*/
public T remove(T value);
/**
* Clear the entire stack.
*/
public void clear();
/**
* Does the tree contain the value.
*
* @param value to locate in the tree.
* @return True if tree contains value.
*/
public boolean contains(T value);
/**
* Get number of nodes in the tree.
*
* @return Number of nodes in the tree.
*/
public int size();
/**
* Validate the tree according to the invariants.
*
* @return True if the tree is valid.
*/
public boolean validate();
/**
* Get Tree as a Java compatible Collection
*
* @return Java compatible Collection
*/
public java.util.Collection<T> toCollection();
}
| phishman3579/java-algorithms-implementation | src/com/jwetherell/algorithms/data_structures/interfaces/ITree.java | Java | apache-2.0 | 1,766 |
package org.nd4j.linalg.indexing;
import com.google.common.base.Function;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.nd4j.linalg.BaseNd4jTest;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.impl.accum.MatchCondition;
import org.nd4j.linalg.api.ops.impl.transforms.comparison.CompareAndReplace;
import org.nd4j.linalg.api.ops.impl.transforms.comparison.CompareAndSet;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.factory.Nd4jBackend;
import org.nd4j.linalg.indexing.conditions.AbsValueGreaterThan;
import org.nd4j.linalg.indexing.conditions.Condition;
import org.nd4j.linalg.indexing.conditions.Conditions;
import org.nd4j.linalg.indexing.functions.Value;
import java.util.Arrays;
import static org.junit.Assert.*;
/**
* @author raver119@gmail.com
*/
@RunWith(Parameterized.class)
public class BooleanIndexingTest extends BaseNd4jTest {
public BooleanIndexingTest(Nd4jBackend backend) {
super(backend);
}
/*
1D array checks
*/
@Test
public void testAnd1() throws Exception {
INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f});
assertTrue(BooleanIndexing.and(array, Conditions.greaterThan(0.5f)));
}
@Test
public void testAnd2() throws Exception {
INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f});
assertTrue(BooleanIndexing.and(array, Conditions.lessThan(6.0f)));
}
@Test
public void testAnd3() throws Exception {
INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f});
assertFalse(BooleanIndexing.and(array, Conditions.lessThan(5.0f)));
}
@Test
public void testAnd4() throws Exception {
INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f});
assertFalse(BooleanIndexing.and(array, Conditions.greaterThan(4.0f)));
}
@Test
public void testAnd5() throws Exception {
INDArray array = Nd4j.create(new float[] {1e-5f, 1e-5f, 1e-5f, 1e-5f, 1e-5f});
assertTrue(BooleanIndexing.and(array, Conditions.greaterThanOrEqual(1e-5f)));
}
@Test
public void testAnd6() throws Exception {
INDArray array = Nd4j.create(new float[] {1e-5f, 1e-5f, 1e-5f, 1e-5f, 1e-5f});
assertFalse(BooleanIndexing.and(array, Conditions.lessThan(1e-5f)));
}
@Test
public void testAnd7() throws Exception {
INDArray array = Nd4j.create(new float[] {1e-5f, 1e-5f, 1e-5f, 1e-5f, 1e-5f});
assertTrue(BooleanIndexing.and(array, Conditions.equals(1e-5f)));
}
@Test
public void testOr1() throws Exception {
INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f});
assertTrue(BooleanIndexing.or(array, Conditions.greaterThan(3.0f)));
}
@Test
public void testOr2() throws Exception {
INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f});
assertTrue(BooleanIndexing.or(array, Conditions.lessThan(3.0f)));
}
@Test
public void testOr3() throws Exception {
INDArray array = Nd4j.create(new float[] {1.0f, 2.0f, 3.0f, 4.0f, 5.0f});
assertFalse(BooleanIndexing.or(array, Conditions.greaterThan(6.0f)));
}
@Test
public void testApplyWhere1() throws Exception {
INDArray array = Nd4j.create(new float[] {-1f, -1f, -1f, -1f, -1f});
BooleanIndexing.applyWhere(array, Conditions.lessThan(Nd4j.EPS_THRESHOLD), new Value(Nd4j.EPS_THRESHOLD));
//System.out.println("Array contains: " + Arrays.toString(array.data().asFloat()));
assertTrue(BooleanIndexing.and(array, Conditions.equals(Nd4j.EPS_THRESHOLD)));
}
@Test
public void testApplyWhere2() throws Exception {
INDArray array = Nd4j.create(new float[] {0f, 0f, 0f, 0f, 0f});
BooleanIndexing.applyWhere(array, Conditions.lessThan(1.0f), new Value(1.0f));
assertTrue(BooleanIndexing.and(array, Conditions.equals(1.0f)));
}
@Test
public void testApplyWhere3() throws Exception {
INDArray array = Nd4j.create(new float[] {1e-18f, 1e-18f, 1e-18f, 1e-18f, 1e-18f});
BooleanIndexing.applyWhere(array, Conditions.lessThan(1e-12f), new Value(1e-12f));
//System.out.println("Array contains: " + Arrays.toString(array.data().asFloat()));
assertTrue(BooleanIndexing.and(array, Conditions.equals(1e-12f)));
}
@Test
public void testApplyWhere4() throws Exception {
INDArray array = Nd4j.create(new float[] {1e-18f, Float.NaN, 1e-18f, 1e-18f, 1e-18f});
BooleanIndexing.applyWhere(array, Conditions.lessThan(1e-12f), new Value(1e-12f));
//System.out.println("Array contains: " + Arrays.toString(array.data().asFloat()));
BooleanIndexing.applyWhere(array, Conditions.isNan(), new Value(1e-16f));
System.out.println("Array contains: " + Arrays.toString(array.data().asFloat()));
assertFalse(BooleanIndexing.or(array, Conditions.isNan()));
assertTrue(BooleanIndexing.or(array, Conditions.equals(1e-12f)));
assertTrue(BooleanIndexing.or(array, Conditions.equals(1e-16f)));
}
/*
2D array checks
*/
@Test
public void test2dAnd1() throws Exception {
INDArray array = Nd4j.zeros(10, 10);
assertTrue(BooleanIndexing.and(array, Conditions.equals(0f)));
}
@Test
public void test2dAnd2() throws Exception {
INDArray array = Nd4j.zeros(10, 10);
array.slice(4).putScalar(2, 1e-5f);
System.out.println(array);
assertFalse(BooleanIndexing.and(array, Conditions.equals(0f)));
}
@Test
public void test2dAnd3() throws Exception {
INDArray array = Nd4j.zeros(10, 10);
array.slice(4).putScalar(2, 1e-5f);
assertFalse(BooleanIndexing.and(array, Conditions.greaterThan(0f)));
}
@Test
public void test2dAnd4() throws Exception {
INDArray array = Nd4j.zeros(10, 10);
array.slice(4).putScalar(2, 1e-5f);
assertTrue(BooleanIndexing.or(array, Conditions.greaterThan(1e-6f)));
}
@Test
public void test2dApplyWhere1() throws Exception {
INDArray array = Nd4j.ones(4, 4);
array.slice(3).putScalar(2, 1e-5f);
//System.out.println("Array before: " + Arrays.toString(array.data().asFloat()));
BooleanIndexing.applyWhere(array, Conditions.lessThan(1e-4f), new Value(1e-12f));
//System.out.println("Array after 1: " + Arrays.toString(array.data().asFloat()));
assertTrue(BooleanIndexing.or(array, Conditions.equals(1e-12f)));
assertTrue(BooleanIndexing.or(array, Conditions.equals(1.0f)));
assertFalse(BooleanIndexing.and(array, Conditions.equals(1e-12f)));
}
/**
* This test fails, because it highlights current mechanics on SpecifiedIndex stuff.
* Internally there's
*
* @throws Exception
*/
@Test
public void testSliceAssign1() throws Exception {
INDArray array = Nd4j.zeros(4, 4);
INDArray patch = Nd4j.create(new float[] {1e-5f, 1e-5f, 1e-5f});
INDArray slice = array.slice(1);
int[] idx = new int[] {0, 1, 3};
INDArrayIndex[] range = new INDArrayIndex[] {new SpecifiedIndex(idx)};
INDArray subarray = slice.get(range);
System.out.println("Subarray: " + Arrays.toString(subarray.data().asFloat()) + " isView: " + subarray.isView());
slice.put(range, patch);
System.out.println("Array after being patched: " + Arrays.toString(array.data().asFloat()));
assertFalse(BooleanIndexing.and(array, Conditions.equals(0f)));
}
@Test
public void testConditionalAssign1() throws Exception {
INDArray array1 = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7});
INDArray array2 = Nd4j.create(new double[] {7, 6, 5, 4, 3, 2, 1});
INDArray comp = Nd4j.create(new double[] {1, 2, 3, 4, 3, 2, 1});
BooleanIndexing.replaceWhere(array1, array2, Conditions.greaterThan(4));
assertEquals(comp, array1);
}
@Test
public void testCaSTransform1() throws Exception {
INDArray array = Nd4j.create(new double[] {1, 2, 0, 4, 5});
INDArray comp = Nd4j.create(new double[] {1, 2, 3, 4, 5});
Nd4j.getExecutioner().exec(new CompareAndSet(array, 3, Conditions.equals(0)));
assertEquals(comp, array);
}
@Test
public void testCaSTransform2() throws Exception {
INDArray array = Nd4j.create(new double[] {1, 2, 0, 4, 5});
INDArray comp = Nd4j.create(new double[] {3, 2, 3, 4, 5});
Nd4j.getExecutioner().exec(new CompareAndSet(array, 3.0, Conditions.lessThan(2)));
assertEquals(comp, array);
}
@Test
public void testCaSPairwiseTransform1() throws Exception {
INDArray array = Nd4j.create(new double[] {1, 2, 0, 4, 5});
INDArray comp = Nd4j.create(new double[] {1, 2, 3, 4, 5});
Nd4j.getExecutioner().exec(new CompareAndSet(array, comp, Conditions.lessThan(5)));
assertEquals(comp, array);
}
@Test
public void testCaRPairwiseTransform1() throws Exception {
INDArray array = Nd4j.create(new double[] {1, 2, 0, 4, 5});
INDArray comp = Nd4j.create(new double[] {1, 2, 3, 4, 5});
Nd4j.getExecutioner().exec(new CompareAndReplace(array, comp, Conditions.lessThan(1)));
assertEquals(comp, array);
}
@Test
public void testCaSPairwiseTransform2() throws Exception {
INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5});
INDArray y = Nd4j.create(new double[] {2, 4, 3, 0, 5});
INDArray comp = Nd4j.create(new double[] {2, 4, 3, 4, 5});
Nd4j.getExecutioner().exec(new CompareAndSet(x, y, Conditions.epsNotEquals(0.0)));
assertEquals(comp, x);
}
@Test
public void testCaRPairwiseTransform2() throws Exception {
INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5});
INDArray y = Nd4j.create(new double[] {2, 4, 3, 4, 5});
INDArray comp = Nd4j.create(new double[] {2, 4, 0, 4, 5});
Nd4j.getExecutioner().exec(new CompareAndReplace(x, y, Conditions.epsNotEquals(0.0)));
assertEquals(comp, x);
}
@Test
public void testCaSPairwiseTransform3() throws Exception {
INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5});
INDArray y = Nd4j.create(new double[] {2, 4, 3, 4, 5});
INDArray comp = Nd4j.create(new double[] {2, 4, 3, 4, 5});
Nd4j.getExecutioner().exec(new CompareAndReplace(x, y, Conditions.lessThan(4)));
assertEquals(comp, x);
}
@Test
public void testCaRPairwiseTransform3() throws Exception {
INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5});
INDArray y = Nd4j.create(new double[] {2, 4, 3, 4, 5});
INDArray comp = Nd4j.create(new double[] {2, 2, 3, 4, 5});
Nd4j.getExecutioner().exec(new CompareAndReplace(x, y, Conditions.lessThan(2)));
assertEquals(comp, x);
}
@Test
public void testMatchConditionAllDimensions1() throws Exception {
INDArray array = Nd4j.create(new double[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
int val = (int) Nd4j.getExecutioner().exec(new MatchCondition(array, Conditions.lessThan(5)), Integer.MAX_VALUE)
.getDouble(0);
assertEquals(5, val);
}
@Test
public void testMatchConditionAllDimensions2() throws Exception {
INDArray array = Nd4j.create(new double[] {0, 1, 2, 3, Double.NaN, 5, 6, 7, 8, 9});
int val = (int) Nd4j.getExecutioner().exec(new MatchCondition(array, Conditions.isNan()), Integer.MAX_VALUE)
.getDouble(0);
assertEquals(1, val);
}
@Test
public void testMatchConditionAllDimensions3() throws Exception {
INDArray array = Nd4j.create(new double[] {0, 1, 2, 3, Double.NEGATIVE_INFINITY, 5, 6, 7, 8, 9});
int val = (int) Nd4j.getExecutioner()
.exec(new MatchCondition(array, Conditions.isInfinite()), Integer.MAX_VALUE).getDouble(0);
assertEquals(1, val);
}
@Test
public void testAbsValueGreaterThan() {
final double threshold = 2;
Condition absValueCondition = new AbsValueGreaterThan(threshold);
Function<Number, Number> clipFn = new Function<Number, Number>() {
@Override
public Number apply(Number number) {
System.out.println("Number: " + number.doubleValue());
return (number.doubleValue() > threshold ? threshold : -threshold);
}
};
Nd4j.getRandom().setSeed(12345);
INDArray orig = Nd4j.rand(1, 20).muli(6).subi(3); //Random numbers: -3 to 3
INDArray exp = orig.dup();
INDArray after = orig.dup();
for (int i = 0; i < exp.length(); i++) {
double d = exp.getDouble(i);
if (d > threshold) {
exp.putScalar(i, threshold);
} else if (d < -threshold) {
exp.putScalar(i, -threshold);
}
}
BooleanIndexing.applyWhere(after, absValueCondition, clipFn);
System.out.println(orig);
System.out.println(exp);
System.out.println(after);
assertEquals(exp, after);
}
@Test
public void testMatchConditionAlongDimension1() throws Exception {
INDArray array = Nd4j.ones(3, 10);
array.getRow(2).assign(0.0);
boolean result[] = BooleanIndexing.and(array, Conditions.equals(0.0), 1);
boolean comp[] = new boolean[] {false, false, true};
System.out.println("Result: " + Arrays.toString(result));
assertArrayEquals(comp, result);
}
@Test
public void testMatchConditionAlongDimension2() throws Exception {
INDArray array = Nd4j.ones(3, 10);
array.getRow(2).assign(0.0).putScalar(0, 1.0);
System.out.println("Array: " + array);
boolean result[] = BooleanIndexing.or(array, Conditions.lessThan(0.9), 1);
boolean comp[] = new boolean[] {false, false, true};
System.out.println("Result: " + Arrays.toString(result));
assertArrayEquals(comp, result);
}
@Test
public void testMatchConditionAlongDimension3() throws Exception {
INDArray array = Nd4j.ones(3, 10);
array.getRow(2).assign(0.0).putScalar(0, 1.0);
boolean result[] = BooleanIndexing.and(array, Conditions.lessThan(0.0), 1);
boolean comp[] = new boolean[] {false, false, false};
System.out.println("Result: " + Arrays.toString(result));
assertArrayEquals(comp, result);
}
@Test
public void testConditionalUpdate() {
INDArray arr = Nd4j.linspace(-2, 2, 5);
INDArray ones = Nd4j.ones(5);
INDArray exp = Nd4j.create(new double[] {1, 1, 0, 1, 1});
Nd4j.getExecutioner().exec(new CompareAndSet(ones, arr, ones, Conditions.equals(0.0)));
assertEquals(exp, ones);
}
@Test
public void testFirstIndex1() {
INDArray arr = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 0});
INDArray result = BooleanIndexing.firstIndex(arr, Conditions.greaterThanOrEqual(3));
assertEquals(2, result.getDouble(0), 0.0);
}
@Test
public void testFirstIndex2() {
INDArray arr = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 0});
INDArray result = BooleanIndexing.firstIndex(arr, Conditions.lessThan(3));
assertEquals(0, result.getDouble(0), 0.0);
}
@Test
public void testLastIndex1() {
INDArray arr = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 0});
INDArray result = BooleanIndexing.lastIndex(arr, Conditions.greaterThanOrEqual(3));
assertEquals(8, result.getDouble(0), 0.0);
}
@Test
public void testFirstIndex2D() {
INDArray arr = Nd4j.create(new double[] {1, 2, 3, 0, 1, 3, 7, 8, 9}).reshape('c', 3, 3);
INDArray result = BooleanIndexing.firstIndex(arr, Conditions.greaterThanOrEqual(2), 1);
INDArray exp = Nd4j.create(new double[] {1, 2, 0});
assertEquals(exp, result);
}
@Test
public void testLastIndex2D() {
INDArray arr = Nd4j.create(new double[] {1, 2, 3, 0, 1, 3, 7, 8, 0}).reshape('c', 3, 3);
INDArray result = BooleanIndexing.lastIndex(arr, Conditions.greaterThanOrEqual(2), 1);
INDArray exp = Nd4j.create(new double[] {2, 2, 1});
assertEquals(exp, result);
}
@Test
public void testEpsEquals1() throws Exception {
INDArray array = Nd4j.create(new double[]{-1, -1, -1e-8, 1e-8, 1, 1});
MatchCondition condition = new MatchCondition(array, Conditions.epsEquals(0.0));
int numZeroes = Nd4j.getExecutioner().exec(condition, Integer.MAX_VALUE).getInt(0);
assertEquals(2, numZeroes);
}
@Override
public char ordering() {
return 'c';
}
}
| huitseeker/nd4j | nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/indexing/BooleanIndexingTest.java | Java | apache-2.0 | 17,052 |
package com.xiaojinzi.component.error;
public class ServiceRepeatCreateException extends RuntimeException {
public ServiceRepeatCreateException() {
}
public ServiceRepeatCreateException(String message) {
super(message);
}
public ServiceRepeatCreateException(String message, Throwable cause) {
super(message, cause);
}
public ServiceRepeatCreateException(Throwable cause) {
super(cause);
}
}
| xiaojinzi123/Component | ComponentImpl/src/main/java/com/xiaojinzi/component/error/ServiceRepeatCreateException.java | Java | apache-2.0 | 453 |
package de.uniulm.omi.cloudiator.sword.multicloud.service;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import de.uniulm.omi.cloudiator.sword.domain.Cloud;
import de.uniulm.omi.cloudiator.sword.domain.Pricing;
import de.uniulm.omi.cloudiator.sword.multicloud.pricing.PricingSupplierFactory;
import de.uniulm.omi.cloudiator.sword.service.PricingService;
import java.util.*;
import static com.google.common.base.Preconditions.checkNotNull;
public class MultiCloudPricingService implements PricingService {
private final CloudRegistry cloudRegistry;
@Inject
private PricingSupplierFactory pricingSupplierFactory;
@Inject
public MultiCloudPricingService(CloudRegistry cloudRegistry) {
this.cloudRegistry = checkNotNull(cloudRegistry, "cloudRegistry is null");
}
@Override
public Iterable<Pricing> listPricing() {
/*final ImmutableSet.Builder<Pricing> builder = ImmutableSet.builder();
Optional<Cloud> awsCloud = cloudRegistry.list().stream().filter(cloud -> cloud.api().providerName().equals("aws-ec2")).findFirst();
if(awsCloud.isPresent()) {
Supplier<Set<Pricing>> awsPricingSupplier = pricingSupplierFactory.createAWSPricingSupplier(awsCloud.get().credential());
builder.addAll(awsPricingSupplier.get());
}
return builder.build();*/
final ImmutableSet.Builder<Pricing> builder = ImmutableSet.builder();
cloudRegistry
.list()
.stream()
.filter(cloud -> cloud.api().providerName().equals("aws-ec2"))
.findFirst()
.ifPresent(cloud -> builder.addAll(pricingSupplierFactory.createAWSPricingSupplier(cloud.credential()).get()));
return builder.build();
}
}
| cloudiator/sword | multicloud/src/main/java/de/uniulm/omi/cloudiator/sword/multicloud/service/MultiCloudPricingService.java | Java | apache-2.0 | 1,844 |
/*
* Copyright 2011-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.transform;
import java.lang.reflect.Constructor;
import com.amazonaws.AmazonServiceException;
public abstract class AbstractErrorUnmarshaller<T> implements Unmarshaller<AmazonServiceException, T> {
/**
* The type of AmazonServiceException that will be instantiated. Subclasses
* specialized for a specific type of exception can control this through the
* protected constructor.
*/
protected final Class<? extends AmazonServiceException> exceptionClass;
/**
* Constructs a new error unmarshaller that will unmarshall error responses
* into AmazonServiceException objects.
*/
public AbstractErrorUnmarshaller() {
this(AmazonServiceException.class);
}
/**
* Constructs a new error unmarshaller that will unmarshall error responses
* into objects of the specified class, extending AmazonServiceException.
*
* @param exceptionClass
* The subclass of AmazonServiceException which will be
* instantiated and populated by this class.
*/
public AbstractErrorUnmarshaller(Class<? extends AmazonServiceException> exceptionClass) {
this.exceptionClass = exceptionClass;
}
/**
* Constructs a new exception object of the type specified in this class's
* constructor and sets the specified error message.
*
* @param message
* The error message to set in the new exception object.
*
* @return A new exception object of the type specified in this class's
* constructor and sets the specified error message.
*
* @throws Exception
* If there are any problems using reflection to invoke the
* exception class's constructor.
*/
protected AmazonServiceException newException(String message) throws Exception {
Constructor<? extends AmazonServiceException> constructor = exceptionClass.getConstructor(String.class);
return constructor.newInstance(message);
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-core/src/main/java/com/amazonaws/transform/AbstractErrorUnmarshaller.java | Java | apache-2.0 | 2,633 |
package com.icfcc.cache.support;
import com.icfcc.cache.Cache;
import java.util.Collection;
/**
* Simple cache manager working against a given collection of caches.
* Useful for testing or simple caching declarations.
*
* @author Costin Leau
* @since 3.1
*/
public class SimpleCacheManager extends AbstractCacheManager {
private Collection<? extends Cache> caches;
/**
* Specify the collection of Cache instances to use for this CacheManager.
*/
public void setCaches(Collection<? extends Cache> caches) {
this.caches = caches;
}
@Override
protected Collection<? extends Cache> loadCaches() {
return this.caches;
}
} | Gitpiece/spring-cache-project | spring-cache/src/main/java/com/icfcc/cache/support/SimpleCacheManager.java | Java | apache-2.0 | 684 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 DNAnexus, Inc.
#
# This file is part of dx-toolkit (DNAnexus platform client libraries).
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
This submodule contains helper functions for parsing and printing the
contents of describe hashes for various DNAnexus entities (projects,
containers, dataobjects, apps, and jobs).
'''
from __future__ import print_function, unicode_literals, division, absolute_import
import datetime, time, json, math, sys, copy
import locale
import subprocess
from collections import defaultdict
import dxpy
from .printing import (RED, GREEN, BLUE, YELLOW, WHITE, BOLD, UNDERLINE, ENDC, DELIMITER, get_delimiter, fill)
from ..compat import basestring, USING_PYTHON2
def JOB_STATES(state):
if state == 'failed':
return BOLD() + RED() + state + ENDC()
elif state == 'done':
return BOLD() + GREEN() + state + ENDC()
elif state in ['running', 'in_progress']:
return GREEN() + state + ENDC()
elif state == 'partially_failed':
return RED() + state + ENDC()
else:
return YELLOW() + state + ENDC()
def DATA_STATES(state):
if state == 'open':
return YELLOW() + state + ENDC()
elif state == 'closing':
return YELLOW() + state + ENDC()
elif state == 'closed':
return GREEN() + state + ENDC()
else:
return state
SIZE_LEVEL = ['bytes', 'KB', 'MB', 'GB', 'TB']
def get_size_str(size):
"""
Formats a byte size as a string.
The returned string is no more than 9 characters long.
"""
if size is None:
return "0 " + SIZE_LEVEL[0]
if size == 0:
magnitude = 0
level = 0
else:
magnitude = math.floor(math.log(size, 10))
level = int(min(math.floor(magnitude // 3), 4))
return ('%d' if level == 0 else '%.2f') % (float(size) / 2**(level*10)) + ' ' + SIZE_LEVEL[level]
def parse_typespec(thing):
if isinstance(thing, basestring):
return thing
elif '$and' in thing:
return '(' + ' AND '.join(map(parse_typespec, thing['$and'])) + ')'
elif '$or' in thing:
return '(' + ' OR '.join(map(parse_typespec, thing['$or'])) + ')'
else:
return 'Type spec could not be parsed'
def get_io_desc(parameter, include_class=True, show_opt=True, app_help_version=False):
# For interactive help, format array:CLASS inputs as:
# -iNAME=CLASS [-iNAME=... [...]] # If input is required (needs >=1 inputs)
# [-iNAME=CLASS [...]] # If input is optional (needs >=0 inputs
if app_help_version and parameter["class"].startswith("array"):
scalar_parameter = parameter.copy()
# Munge the parameter dict (strip off "array:" to turn it into a
# scalar) and recurse
scalar_parameter["class"] = scalar_parameter["class"][6:]
if "default" in parameter or parameter.get("optional"):
return "[" + get_io_desc(scalar_parameter, include_class=include_class, show_opt=False, app_help_version=app_help_version) + " [-i%s=... [...]]]" % (parameter["name"],)
else:
return get_io_desc(scalar_parameter, include_class=include_class, show_opt=False, app_help_version=app_help_version) + " [-i%s=... [...]]" % (parameter["name"],)
desc = ""
is_optional = False
if show_opt:
if "default" in parameter or parameter.get("optional"):
is_optional = True
desc += "["
desc += ('-i' if app_help_version else '') + parameter["name"]
include_parens = include_class or 'type' in parameter or 'default' in parameter
if include_parens:
desc += ("=" if app_help_version else " ") + "("
is_first = True
if include_class:
desc += parameter["class"]
is_first = False
if "type" in parameter:
if not is_first:
desc += ", "
else:
is_first = False
desc += "type " + parse_typespec(parameter["type"])
if "default" in parameter:
if not is_first:
desc += ', '
desc += 'default=' + json.dumps(parameter['default'])
if include_parens:
desc += ")"
if show_opt and is_optional:
desc += "]"
return desc
def get_io_spec(spec, skip_fields=None):
if spec is None:
return 'null'
if skip_fields is None:
skip_fields = []
filtered_spec = [param for param in spec if param["name"] not in skip_fields]
groups = defaultdict(list)
for param in filtered_spec:
groups[param.get('group')].append(param)
list_of_params = []
for param in groups.get(None, []):
list_of_params.append(get_io_desc(param))
for group in groups:
if group is None:
continue
list_of_params.append("{g}:".format(g=group))
for param in groups[group]:
list_of_params.append(" "+get_io_desc(param))
if len(skip_fields) > 0:
list_of_params.append("<advanced inputs hidden; use --verbose to see more>")
if len(list_of_params) == 0:
return '-'
if get_delimiter() is not None:
return ('\n' + get_delimiter()).join(list_of_params)
else:
return ('\n' + ' '*16).join([fill(param,
subsequent_indent=' '*18,
width_adjustment=-18) for param in list_of_params])
def is_job_ref(thing, reftype=dict):
'''
:param thing: something that might be a job-based object reference hash
:param reftype: type that a job-based object reference would be (default is dict)
'''
return isinstance(thing, reftype) and \
((len(thing) == 2 and \
isinstance(thing.get('field'), basestring) and \
isinstance(thing.get('job'), basestring)) or \
(len(thing) == 1 and \
isinstance(thing.get('$dnanexus_link'), reftype) and \
isinstance(thing['$dnanexus_link'].get('field'), basestring) and \
isinstance(thing['$dnanexus_link'].get('job'), basestring)))
def get_job_from_jbor(thing):
'''
:returns: Job ID from a JBOR
Assumes :func:`is_job_ref` evaluates to True
'''
if '$dnanexus_link' in thing:
return thing['$dnanexus_link']['job']
else:
return thing['job']
def get_field_from_jbor(thing):
'''
:returns: Output field name from a JBOR
Assumes :func:`is_job_ref` evaluates to True
'''
if '$dnanexus_link' in thing:
return thing['$dnanexus_link']['field']
else:
return thing['field']
def get_index_from_jbor(thing):
'''
:returns: Array index of the JBOR if applicable; None otherwise
Assumes :func:`is_job_ref` evaluates to True
'''
if '$dnanexus_link' in thing:
return thing['$dnanexus_link'].get('index')
else:
return None
def is_metadata_ref(thing, reftype=dict):
return isinstance(thing, reftype) and \
len(thing) == 1 and \
isinstance(thing.get('$dnanexus_link'), reftype) and \
isinstance(thing['$dnanexus_link'].get('metadata'), basestring)
def jbor_to_str(val):
ans = get_job_from_jbor(val) + ':' + get_field_from_jbor(val)
index = get_index_from_jbor(val)
if index is not None:
ans += "." + str(index)
return ans
def io_val_to_str(val):
if is_job_ref(val):
# Job-based object references
return jbor_to_str(val)
elif isinstance(val, dict) and '$dnanexus_link' in val:
# DNAnexus link
if isinstance(val['$dnanexus_link'], basestring):
# simple link
return val['$dnanexus_link']
elif 'project' in val['$dnanexus_link'] and 'id' in val['$dnanexus_link']:
return val['$dnanexus_link']['project'] + ':' + val['$dnanexus_link']['id']
else:
return json.dumps(val)
elif isinstance(val, list):
if len(val) == 0:
return '[]'
else:
return '[ ' + ', '.join([io_val_to_str(item) for item in val]) + ' ]'
elif isinstance(val, dict):
return '{ ' + ', '.join([key + ': ' + io_val_to_str(value) for key, value in val.items()]) + ' }'
else:
return json.dumps(val)
def job_output_to_str(job_output, prefix='\n', title="Output: ", title_len=None):
if len(job_output) == 0:
return prefix + title + "-"
else:
if title_len is None:
title_len = len(title)
return prefix + title + (prefix+' '*title_len).join([fill(key + ' = ' + io_val_to_str(value),
subsequent_indent=' '*9,
break_long_words=False) for key, value in job_output.items()])
def get_io_field(io_hash, defaults=None, delim='=', highlight_fields=()):
def highlight_value(key, value):
if key in highlight_fields:
return YELLOW() + value + ENDC()
else:
return value
if defaults is None:
defaults = {}
if io_hash is None:
return '-'
if len(io_hash) == 0 and len(defaults) == 0:
return '-'
if get_delimiter() is not None:
return ('\n' + get_delimiter()).join([(key + delim + highlight_value(key, io_val_to_str(value))) for key, value in io_hash.items()] +
[('[' + key + delim + io_val_to_str(value) + ']') for key, value in defaults.items()])
else:
lines = [fill(key + ' ' + delim + ' ' + highlight_value(key, io_val_to_str(value)),
initial_indent=' ' * FIELD_NAME_WIDTH,
subsequent_indent=' ' * (FIELD_NAME_WIDTH + 1),
break_long_words=False)
for key, value in io_hash.items()]
lines.extend([fill('[' + key + ' ' + delim + ' ' + io_val_to_str(value) + ']',
initial_indent=' ' * FIELD_NAME_WIDTH,
subsequent_indent=' ' * (FIELD_NAME_WIDTH + 1),
break_long_words=False)
for key, value in defaults.items()])
return '\n'.join(lines)[FIELD_NAME_WIDTH:]
def get_resolved_jbors(resolved_thing, orig_thing, resolved_jbors):
if resolved_thing == orig_thing:
return
if is_job_ref(orig_thing):
jbor_str = jbor_to_str(orig_thing)
if jbor_str not in resolved_jbors:
try:
from dxpy.api import job_describe
job_output = job_describe(get_job_from_jbor(orig_thing)).get('output')
if job_output is not None:
field_value = job_output.get(get_field_from_jbor(orig_thing))
jbor_index = get_index_from_jbor(orig_thing)
if jbor_index is not None:
if isinstance(field_value, list):
resolved_jbors[jbor_str] = field_value[jbor_index]
else:
resolved_jbors[jbor_str] = field_value
except:
# Just don't report any resolved JBORs if there are
# any problems
pass
elif isinstance(orig_thing, list):
for i in range(len(orig_thing)):
get_resolved_jbors(resolved_thing[i], orig_thing[i], resolved_jbors)
elif isinstance(orig_thing, dict) and '$dnanexus_link' not in orig_thing:
for key in orig_thing:
get_resolved_jbors(resolved_thing[key], orig_thing[key], resolved_jbors)
def render_bundleddepends(thing):
from ..bindings.search import find_one_data_object
from ..exceptions import DXError
bundles = []
for item in thing:
bundle_asset_record = dxpy.DXFile(item["id"]["$dnanexus_link"]).get_properties().get("AssetBundle")
asset = None
if bundle_asset_record:
asset = dxpy.DXRecord(bundle_asset_record)
if asset:
try:
bundles.append(asset.describe().get("name") + " (" + asset.get_id() + ")")
except DXError:
asset = None
if not asset:
bundles.append(item["name"] + " (" + item["id"]["$dnanexus_link"] + ")")
return bundles
def render_execdepends(thing):
rendered = []
for item in thing:
dep = copy.copy(item)
dep.setdefault('package_manager', 'apt')
dep['version'] = ' = '+dep['version'] if 'version' in dep else ''
rendered.append("{package_manager}: {name}{version}".format(**dep))
return rendered
def render_stage(title, stage, as_stage_of=None):
lines_to_print = []
if stage['name'] is not None:
lines_to_print.append((title, "{name} ({id})".format(name=stage['name'], id=stage['id'])))
else:
lines_to_print.append((title, stage['id']))
lines_to_print.append((' Executable', stage['executable'] + \
(" (" + RED() + "inaccessible" + ENDC() + ")" \
if stage.get('accessible') is False else "")))
if 'execution' in stage:
is_cached_result = as_stage_of is not None and 'parentAnalysis' in stage['execution'] and \
stage['execution']['parentAnalysis'] != as_stage_of
execution_id_str = stage['execution']['id']
if is_cached_result:
execution_id_str = "[" + execution_id_str + "]"
if 'state' in stage['execution']:
lines_to_print.append((' Execution', execution_id_str + ' (' + JOB_STATES(stage['execution']['state']) + ')'))
else:
lines_to_print.append((' Execution', execution_id_str))
if is_cached_result:
lines_to_print.append((' Cached from', stage['execution']['parentAnalysis']))
for line in lines_to_print:
print_field(line[0], line[1])
def render_short_timestamp(timestamp):
return str(datetime.datetime.fromtimestamp(timestamp//1000))
def render_timestamp(timestamp):
return datetime.datetime.fromtimestamp(timestamp//1000).ctime()
FIELD_NAME_WIDTH = 22
def print_field(label, value):
if get_delimiter() is not None:
sys.stdout.write(label + get_delimiter() + value + '\n')
else:
sys.stdout.write(
label + " " * (FIELD_NAME_WIDTH-len(label)) + fill(value,
subsequent_indent=' '*FIELD_NAME_WIDTH,
width_adjustment=-FIELD_NAME_WIDTH) +
'\n')
def print_nofill_field(label, value):
sys.stdout.write(label + DELIMITER(" " * (FIELD_NAME_WIDTH - len(label))) + value + '\n')
def print_list_field(label, values):
print_field(label, ('-' if len(values) == 0 else DELIMITER(', ').join(values)))
def print_json_field(label, json_value):
print_field(label, json.dumps(json_value, ensure_ascii=False))
def print_project_desc(desc, verbose=False):
recognized_fields = [
'id', 'class', 'name', 'summary', 'description', 'protected', 'restricted', 'created', 'modified',
'dataUsage', 'sponsoredDataUsage', 'tags', 'level', 'folders', 'objects', 'permissions', 'properties',
'appCaches', 'billTo', 'version', 'createdBy', 'totalSponsoredEgressBytes', 'consumedSponsoredEgressBytes',
'containsPHI', 'databaseUIViewOnly', 'region', 'storageCost', 'pendingTransfer','atSpendingLimit',
# Following are app container-specific
'destroyAt', 'project', 'type', 'app', 'appName'
]
# Basic metadata
print_field("ID", desc["id"])
print_field("Class", desc["class"])
if "name" in desc:
print_field("Name", desc["name"])
if 'summary' in desc:
print_field("Summary", desc["summary"])
if 'description' in desc and (verbose or 'summary' not in desc):
print_field("Description", desc['description'])
if 'version' in desc and verbose:
print_field("Version", str(desc['version']))
# Ownership and permissions
if 'billTo' in desc:
print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:])
if 'pendingTransfer' in desc and (verbose or desc['pendingTransfer'] is not None):
print_json_field('Pending transfer to', desc['pendingTransfer'])
if "level" in desc:
print_field("Access level", desc["level"])
if 'region' in desc:
print_field('Region', desc['region'])
# Project settings
if 'protected' in desc:
print_json_field("Protected", desc["protected"])
if 'restricted' in desc:
print_json_field("Restricted", desc["restricted"])
if 'containsPHI' in desc:
print_json_field('Contains PHI', desc['containsPHI'])
if 'databaseUIViewOnly' in desc and desc['databaseUIViewOnly']:
print_json_field('Database UI View Only', desc['databaseUIViewOnly'])
# Usage
print_field("Created", render_timestamp(desc['created']))
if 'createdBy' in desc:
print_field("Created by", desc['createdBy']['user'][desc['createdBy']['user'].find('-') + 1:])
print_field("Last modified", render_timestamp(desc['modified']))
print_field("Data usage", ('%.2f' % desc["dataUsage"]) + ' GB')
if 'sponsoredDataUsage' in desc:
print_field("Sponsored data", ('%.2f' % desc["sponsoredDataUsage"]) + ' GB')
if 'storageCost' in desc:
print_field("Storage cost", "$%.3f/month" % desc["storageCost"])
if 'totalSponsoredEgressBytes' in desc or 'consumedSponsoredEgressBytes' in desc:
total_egress_str = '%.2f GB' % (desc['totalSponsoredEgressBytes'] / 1073741824.,) \
if 'totalSponsoredEgressBytes' in desc else '??'
consumed_egress_str = '%.2f GB' % (desc['consumedSponsoredEgressBytes'] / 1073741824.,) \
if 'consumedSponsoredEgressBytes' in desc else '??'
print_field('Sponsored egress',
('%s used of %s total' % (consumed_egress_str, total_egress_str)))
if 'atSpendingLimit' in desc:
print_json_field("At spending limit?", desc['atSpendingLimit'])
# Misc metadata
if "objects" in desc:
print_field("# Files", str(desc["objects"]))
if "folders" in desc:
print_list_field("Folders", desc["folders"])
if "permissions" in desc:
print_list_field(
"Permissions",
[key[5 if key.startswith('user-') else 0:] + ':' + value for key, value in desc["permissions"].items()]
)
if 'tags' in desc:
print_list_field("Tags", desc["tags"])
if "properties" in desc:
print_list_field("Properties", [key + '=' + value for key, value in desc["properties"].items()])
if "appCaches" in desc:
print_json_field("App caches", desc["appCaches"])
# Container-specific
if 'type' in desc:
print_field("Container type", desc["type"])
if 'project' in desc:
print_field("Associated project", desc["project"])
if 'destroyAt' in desc:
print_field("To be destroyed", render_timestamp(desc['modified']))
if 'app' in desc:
print_field("Associated App ID", desc["app"])
if 'appName' in desc:
print_field("Associated App", desc["appName"])
for field in desc:
if field not in recognized_fields:
print_json_field(field, desc[field])
def get_advanced_inputs(desc, verbose):
details = desc.get("details")
if not verbose and isinstance(details, dict):
return details.get("advancedInputs", [])
return []
def print_app_desc(desc, verbose=False):
recognized_fields = ['id', 'class', 'name', 'version', 'aliases', 'createdBy', 'created', 'modified', 'deleted', 'published', 'title', 'subtitle', 'description', 'categories', 'access', 'dxapi', 'inputSpec', 'outputSpec', 'runSpec', 'resources', 'billTo', 'installed', 'openSource', 'summary', 'applet', 'installs', 'billing', 'details', 'developerNotes',
'authorizedUsers']
print_field("ID", desc["id"])
print_field("Class", desc["class"])
if 'billTo' in desc:
print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:])
print_field("Name", desc["name"])
print_field("Version", desc["version"])
print_list_field("Aliases", desc["aliases"])
print_field("Created by", desc["createdBy"][5 if desc['createdBy'].startswith('user-') else 0:])
print_field("Created", render_timestamp(desc['created']))
print_field("Last modified", render_timestamp(desc['modified']))
print_field("Created from", desc["applet"])
print_json_field('Installed', desc['installed'])
print_json_field('Open source', desc['openSource'])
print_json_field('Deleted', desc['deleted'])
if not desc['deleted']:
advanced_inputs = []
details = desc["details"]
if isinstance(details, dict) and "advancedInputs" in details:
if not verbose:
advanced_inputs = details["advancedInputs"]
del details["advancedInputs"]
if 'published' not in desc or desc["published"] < 0:
print_field("Published", "-")
else:
print_field("Published", render_timestamp(desc['published']))
if "title" in desc and desc['title'] is not None:
print_field("Title", desc["title"])
if "subtitle" in desc and desc['subtitle'] is not None:
print_field("Subtitle", desc["subtitle"])
if 'summary' in desc and desc['summary'] is not None:
print_field("Summary", desc['summary'])
print_list_field("Categories", desc["categories"])
if 'details' in desc:
print_json_field("Details", desc["details"])
print_json_field("Access", desc["access"])
print_field("API version", desc["dxapi"])
if 'inputSpec' in desc:
print_nofill_field("Input Spec", get_io_spec(desc["inputSpec"], skip_fields=advanced_inputs))
print_nofill_field("Output Spec", get_io_spec(desc["outputSpec"]))
print_field("Interpreter", desc["runSpec"]["interpreter"])
if "resources" in desc["runSpec"]:
print_json_field("Resources", desc["runSpec"]["resources"])
if "bundledDepends" in desc["runSpec"]:
print_list_field("bundledDepends", render_bundleddepends(desc["runSpec"]["bundledDepends"]))
if "execDepends" in desc["runSpec"]:
print_list_field("execDepends", render_execdepends(desc["runSpec"]["execDepends"]))
if "systemRequirements" in desc['runSpec']:
print_json_field('Sys Requirements', desc['runSpec']['systemRequirements'])
if 'resources' in desc:
print_field("Resources", desc['resources'])
if 'installs' in desc:
print_field('# Installs', str(desc['installs']))
if 'authorizedUsers' in desc:
print_list_field('AuthorizedUsers', desc["authorizedUsers"])
for field in desc:
if field not in recognized_fields:
print_json_field(field, desc[field])
def print_globalworkflow_desc(desc, verbose=False):
recognized_fields = ['id', 'class', 'name', 'version', 'aliases', 'createdBy', 'created',
'modified', 'deleted', 'published', 'title', 'description',
'categories', 'dxapi', 'billTo', 'summary', 'billing', 'developerNotes',
'authorizedUsers', 'regionalOptions']
is_locked_workflow = False
print_field("ID", desc["id"])
print_field("Class", desc["class"])
if 'billTo' in desc:
print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:])
print_field("Name", desc["name"])
print_field("Version", desc["version"])
print_list_field("Aliases", desc["aliases"])
print_field("Created by", desc["createdBy"][5 if desc['createdBy'].startswith('user-') else 0:])
print_field("Created", render_timestamp(desc['created']))
print_field("Last modified", render_timestamp(desc['modified']))
# print_json_field('Open source', desc['openSource'])
print_json_field('Deleted', desc.get('deleted', False))
if not desc.get('deleted', False):
if 'published' not in desc or desc["published"] < 0:
print_field("Published", "-")
else:
print_field("Published", render_timestamp(desc['published']))
if "title" in desc and desc['title'] is not None:
print_field("Title", desc["title"])
if "subtitle" in desc and desc['subtitle'] is not None:
print_field("Subtitle", desc["subtitle"])
if 'summary' in desc and desc['summary'] is not None:
print_field("Summary", desc['summary'])
print_list_field("Categories", desc["categories"])
if 'details' in desc:
print_json_field("Details", desc["details"])
print_field("API version", desc["dxapi"])
# Additionally, print inputs, outputs, stages of the underlying workflow
# from the region of the current workspace
current_project = dxpy.WORKSPACE_ID
if current_project:
region = dxpy.api.project_describe(current_project, input_params={"fields": {"region": True}})["region"]
if region and region in desc['regionalOptions']:
workflow_desc = desc['regionalOptions'][region]['workflowDescribe']
print_field("Workflow region", region)
if 'id' in workflow_desc:
print_field("Workflow ID", workflow_desc['id'])
if workflow_desc.get('inputSpec') is not None and workflow_desc.get('inputs') is None:
print_nofill_field("Input Spec", get_io_spec(workflow_desc['inputSpec'], skip_fields=get_advanced_inputs(workflow_desc, verbose)))
if workflow_desc.get('outputSpec') is not None and workflow_desc.get('outputs') is None:
print_nofill_field("Output Spec", get_io_spec(workflow_desc['outputSpec']))
if workflow_desc.get('inputs') is not None:
is_locked_workflow = True
print_nofill_field("Workflow Inputs", get_io_spec(workflow_desc['inputs']))
if workflow_desc.get('outputs') is not None:
print_nofill_field("Workflow Outputs", get_io_spec(workflow_desc['outputs']))
if 'stages' in workflow_desc:
for i, stage in enumerate(workflow_desc["stages"]):
render_stage("Stage " + str(i), stage)
if 'authorizedUsers' in desc:
print_list_field('AuthorizedUsers', desc["authorizedUsers"])
if is_locked_workflow:
print_locked_workflow_note()
for field in desc:
if field not in recognized_fields:
print_json_field(field, desc[field])
def get_col_str(col_desc):
return col_desc['name'] + DELIMITER(" (") + col_desc['type'] + DELIMITER(")")
def print_data_obj_desc(desc, verbose=False):
recognized_fields = ['id', 'class', 'project', 'folder', 'name', 'properties', 'tags', 'types', 'hidden', 'details', 'links', 'created', 'modified', 'state', 'title', 'subtitle', 'description', 'inputSpec', 'outputSpec', 'runSpec', 'summary', 'dxapi', 'access', 'createdBy', 'summary', 'sponsored', 'developerNotes',
'stages', 'inputs', 'outputs', 'latestAnalysis', 'editVersion', 'outputFolder', 'initializedFrom', 'temporary']
is_locked_workflow = False
print_field("ID", desc["id"])
print_field("Class", desc["class"])
if 'project' in desc:
print_field("Project", desc['project'])
if 'folder' in desc:
print_field("Folder", desc["folder"])
print_field("Name", desc["name"])
if 'state' in desc:
print_field("State", DATA_STATES(desc['state']))
if 'hidden' in desc:
print_field("Visibility", ("hidden" if desc["hidden"] else "visible"))
if 'types' in desc:
print_list_field("Types", desc['types'])
if 'properties' in desc:
print_list_field("Properties", ['='.join([k, v]) for k, v in desc['properties'].items()])
if 'tags' in desc:
print_list_field("Tags", desc['tags'])
if verbose and 'details' in desc:
print_json_field("Details", desc["details"])
if 'links' in desc:
print_list_field("Outgoing links", desc['links'])
print_field("Created", render_timestamp(desc['created']))
if 'createdBy' in desc:
print_field("Created by", desc['createdBy']['user'][5:])
if 'job' in desc["createdBy"]:
print_field(" via the job", desc['createdBy']['job'])
if verbose and 'executable' in desc['createdBy']:
print_field(" running", desc['createdBy']['executable'])
print_field("Last modified", render_timestamp(desc['modified']))
if "editVersion" in desc:
print_field("Edit Version", str(desc['editVersion']))
if "title" in desc:
print_field("Title", desc["title"])
if "subtitle" in desc:
print_field("Subtitle", desc["subtitle"])
if 'summary' in desc:
print_field("Summary", desc['summary'])
if 'description' in desc and verbose:
print_field("Description", desc["description"])
if 'outputFolder' in desc:
print_field("Output Folder", desc["outputFolder"] if desc["outputFolder"] is not None else "-")
if 'access' in desc:
print_json_field("Access", desc["access"])
if 'dxapi' in desc:
print_field("API version", desc["dxapi"])
# In case of a workflow: do not display "Input/Output Specs" that show stages IO
# when the workflow has workflow-level input/output fields defined.
if desc.get('inputSpec') is not None and desc.get('inputs') is None:
print_nofill_field("Input Spec", get_io_spec(desc['inputSpec'], skip_fields=get_advanced_inputs(desc, verbose)))
if desc.get('outputSpec') is not None and desc.get('outputs') is None:
print_nofill_field("Output Spec", get_io_spec(desc['outputSpec']))
if desc.get('inputs') is not None:
is_locked_workflow = True
print_nofill_field("Workflow Inputs", get_io_spec(desc['inputs']))
if desc.get('outputs') is not None:
print_nofill_field("Workflow Outputs", get_io_spec(desc['outputs']))
if 'runSpec' in desc:
print_field("Interpreter", desc["runSpec"]["interpreter"])
if "resources" in desc['runSpec']:
print_json_field("Resources", desc["runSpec"]["resources"])
if "bundledDepends" in desc["runSpec"]:
print_list_field("bundledDepends", render_bundleddepends(desc["runSpec"]["bundledDepends"]))
if "execDepends" in desc["runSpec"]:
print_list_field("execDepends", render_execdepends(desc["runSpec"]["execDepends"]))
if "systemRequirements" in desc['runSpec']:
print_json_field('Sys Requirements', desc['runSpec']['systemRequirements'])
if 'stages' in desc:
for i, stage in enumerate(desc["stages"]):
render_stage("Stage " + str(i), stage)
if 'initializedFrom' in desc:
print_field("initializedFrom", desc["initializedFrom"]["id"])
if 'latestAnalysis' in desc and desc['latestAnalysis'] is not None:
print_field("Last execution", desc["latestAnalysis"]["id"])
print_field(" run at", render_timestamp(desc["latestAnalysis"]["created"]))
print_field(" state", JOB_STATES(desc["latestAnalysis"]["state"]))
for field in desc:
if field in recognized_fields:
continue
else:
if field == "media":
print_field("Media type", desc['media'])
elif field == "size":
if desc["class"] == "file":
sponsored_str = ""
if 'sponsored' in desc and desc['sponsored']:
sponsored_str = DELIMITER(", ") + "sponsored by DNAnexus"
print_field("Size", get_size_str(desc['size']) + sponsored_str)
else:
print_field("Size", str(desc['size']))
elif field == "length":
print_field("Length", str(desc['length']))
elif field == "columns":
if len(desc['columns']) > 0:
coldescs = "Columns" + DELIMITER(" " *(16-len("Columns"))) + get_col_str(desc["columns"][0])
for column in desc["columns"][1:]:
coldescs += '\n' + DELIMITER(" "*16) + get_col_str(column)
print(coldescs)
else:
print_list_field("Columns", desc['columns'])
else: # Unhandled prettifying
print_json_field(field, desc[field])
if is_locked_workflow:
print_locked_workflow_note()
def printable_ssh_host_key(ssh_host_key):
try:
keygen = subprocess.Popen(["ssh-keygen", "-lf", "/dev/stdin"], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
if USING_PYTHON2:
(stdout, stderr) = keygen.communicate(ssh_host_key)
else:
(stdout, stderr) = keygen.communicate(ssh_host_key.encode())
except:
return ssh_host_key.strip()
else:
if not USING_PYTHON2:
stdout = stdout.decode()
return stdout.replace(" no comment", "").strip()
def print_execution_desc(desc):
recognized_fields = ['id', 'class', 'project', 'workspace', 'region',
'app', 'applet', 'executable', 'workflow',
'state',
'rootExecution', 'parentAnalysis', 'parentJob', 'originJob', 'analysis', 'stage',
'function', 'runInput', 'originalInput', 'input', 'output', 'folder', 'launchedBy', 'created',
'modified', 'failureReason', 'failureMessage', 'stdout', 'stderr', 'waitingOnChildren',
'dependsOn', 'resources', 'projectCache', 'details', 'tags', 'properties',
'name', 'instanceType', 'systemRequirements', 'executableName', 'failureFrom', 'billTo',
'startedRunning', 'stoppedRunning', 'stateTransitions',
'delayWorkspaceDestruction', 'stages', 'totalPrice', 'isFree', 'invoiceMetadata',
'priority', 'sshHostKey']
print_field("ID", desc["id"])
print_field("Class", desc["class"])
if "name" in desc and desc['name'] is not None:
print_field("Job name", desc['name'])
if "executableName" in desc and desc['executableName'] is not None:
print_field("Executable name", desc['executableName'])
print_field("Project context", desc["project"])
if 'region' in desc:
print_field("Region", desc["region"])
if 'billTo' in desc:
print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:])
if 'workspace' in desc:
print_field("Workspace", desc["workspace"])
if 'projectCache' in desc:
print_field('Cache workspace', desc['projectCache'])
print_field('Resources', desc['resources'])
if "app" in desc:
print_field("App", desc["app"])
elif desc.get("executable", "").startswith("globalworkflow"):
print_field("Workflow", desc["executable"])
elif "applet" in desc:
print_field("Applet", desc["applet"])
elif "workflow" in desc:
print_field("Workflow", desc["workflow"]["id"])
if "instanceType" in desc and desc['instanceType'] is not None:
print_field("Instance Type", desc["instanceType"])
if "priority" in desc:
print_field("Priority", desc["priority"])
print_field("State", JOB_STATES(desc["state"]))
if "rootExecution" in desc:
print_field("Root execution", desc["rootExecution"])
if "originJob" in desc:
if desc["originJob"] is None:
print_field("Origin job", "-")
else:
print_field("Origin job", desc["originJob"])
if desc["parentJob"] is None:
print_field("Parent job", "-")
else:
print_field("Parent job", desc["parentJob"])
if "parentAnalysis" in desc:
if desc["parentAnalysis"] is not None:
print_field("Parent analysis", desc["parentAnalysis"])
if "analysis" in desc and desc["analysis"] is not None:
print_field("Analysis", desc["analysis"])
print_field("Stage", desc["stage"])
if "stages" in desc:
for i, (stage, analysis_stage) in enumerate(zip(desc["workflow"]["stages"], desc["stages"])):
stage['execution'] = analysis_stage['execution']
render_stage("Stage " + str(i), stage, as_stage_of=desc["id"])
if "function" in desc:
print_field("Function", desc["function"])
if 'runInput' in desc:
default_fields = {k: v for k, v in desc["originalInput"].items() if k not in desc["runInput"]}
print_nofill_field("Input", get_io_field(desc["runInput"], defaults=default_fields))
else:
print_nofill_field("Input", get_io_field(desc["originalInput"]))
resolved_jbors = {}
input_with_jbors = desc.get('runInput', desc['originalInput'])
for k in desc["input"]:
if k in input_with_jbors and desc["input"][k] != input_with_jbors[k]:
get_resolved_jbors(desc["input"][k], input_with_jbors[k], resolved_jbors)
if len(resolved_jbors) != 0:
print_nofill_field("Resolved JBORs", get_io_field(resolved_jbors, delim=(GREEN() + '=>' + ENDC())))
print_nofill_field("Output", get_io_field(desc["output"]))
if 'folder' in desc:
print_field('Output folder', desc['folder'])
print_field("Launched by", desc["launchedBy"][5:])
print_field("Created", render_timestamp(desc['created']))
if 'startedRunning' in desc:
if 'stoppedRunning' in desc:
print_field("Started running", render_timestamp(desc['startedRunning']))
else:
print_field("Started running", "{t} (running for {rt})".format(t=render_timestamp(desc['startedRunning']),
rt=datetime.timedelta(seconds=int(time.time())-desc['startedRunning']//1000)))
if 'stoppedRunning' in desc:
print_field("Stopped running", "{t} (Runtime: {rt})".format(
t=render_timestamp(desc['stoppedRunning']),
rt=datetime.timedelta(seconds=(desc['stoppedRunning']-desc['startedRunning'])//1000)))
if desc.get('class') == 'analysis' and 'stateTransitions' in desc and desc['stateTransitions']:
# Display finishing time of the analysis if available
if desc['stateTransitions'][-1]['newState'] in ['done', 'failed', 'terminated']:
print_field("Finished", "{t} (Wall-clock time: {wt})".format(
t=render_timestamp(desc['stateTransitions'][-1]['setAt']),
wt=datetime.timedelta(seconds=(desc['stateTransitions'][-1]['setAt']-desc['created'])//1000)))
print_field("Last modified", render_timestamp(desc['modified']))
if 'waitingOnChildren' in desc:
print_list_field('Pending subjobs', desc['waitingOnChildren'])
if 'dependsOn' in desc:
print_list_field('Depends on', desc['dependsOn'])
if "failureReason" in desc:
print_field("Failure reason", desc["failureReason"])
if "failureMessage" in desc:
print_field("Failure message", desc["failureMessage"])
if "failureFrom" in desc and desc['failureFrom'] is not None and desc['failureFrom']['id'] != desc['id']:
print_field("Failure is from", desc['failureFrom']['id'])
if 'systemRequirements' in desc:
print_json_field("Sys Requirements", desc['systemRequirements'])
if "tags" in desc:
print_list_field("Tags", desc["tags"])
if "properties" in desc:
print_list_field("Properties", [key + '=' + value for key, value in desc["properties"].items()])
if "details" in desc and "clonedFrom" in desc["details"]:
cloned_hash = desc["details"]["clonedFrom"]
if "id" in cloned_hash:
print_field("Re-run of", cloned_hash["id"])
print_field(" named", cloned_hash["name"])
same_executable = cloned_hash["executable"] == desc.get("applet", desc.get("app", ""))
print_field(" using", ("" if same_executable else YELLOW()) + \
cloned_hash["executable"] + \
(" (same)" if same_executable else ENDC()))
same_project = cloned_hash["project"] == desc["project"]
same_folder = cloned_hash["folder"] == desc["folder"] or not same_project
print_field(" output folder", ("" if same_project else YELLOW()) + \
cloned_hash["project"] + \
("" if same_project else ENDC()) + ":" + \
("" if same_folder else YELLOW()) + \
cloned_hash["folder"] + \
(" (same)" if (same_project and same_folder) else "" if same_folder else ENDC()))
different_inputs = []
for item in cloned_hash["runInput"]:
if cloned_hash["runInput"][item] != desc["runInput"][item]:
different_inputs.append(item)
print_nofill_field(" input", get_io_field(cloned_hash["runInput"], highlight_fields=different_inputs))
cloned_sys_reqs = cloned_hash.get("systemRequirements")
if isinstance(cloned_sys_reqs, dict):
if cloned_sys_reqs == desc.get('systemRequirements'):
print_nofill_field(" sys reqs", json.dumps(cloned_sys_reqs) + ' (same)')
else:
print_nofill_field(" sys reqs", YELLOW() + json.dumps(cloned_sys_reqs) + ENDC())
if not desc.get('isFree') and desc.get('totalPrice') is not None:
print_field('Total Price', format_currency(desc['totalPrice'], meta=desc['currency']))
if desc.get('invoiceMetadata'):
print_json_field("Invoice Metadata", desc['invoiceMetadata'])
if desc.get('sshHostKey'):
print_nofill_field("SSH Host Key", printable_ssh_host_key(desc['sshHostKey']))
for field in desc:
if field not in recognized_fields:
print_json_field(field, desc[field])
def locale_from_currency_code(dx_code):
"""
This is a (temporary) hardcoded mapping between currency_list.json in nucleus and standard
locale string useful for further formatting
:param dx_code: An id of nucleus/commons/pricing_models/currency_list.json collection
:return: standardised locale, eg 'en_US'; None when no mapping found
"""
currency_locale_map = {0: 'en_US', 1: 'en_GB'}
return currency_locale_map[dx_code] if dx_code in currency_locale_map else None
def format_currency_from_meta(value, meta):
"""
Formats currency value into properly decorated currency string based on provided currency metadata.
Please note that this is very basic solution missing some of the localisation features (such as
negative symbol position and type.
Better option is to use 'locale' module to reflect currency string decorations more accurately.
See 'format_currency'
:param value:
:param meta:
:return:
"""
prefix = '-' if value < 0 else '' # .. TODO: some locales position neg symbol elsewhere, missing meta
prefix += meta['symbol'] if meta['symbolPosition'] == 'left' else ''
suffix = ' %s' % meta["symbol"] if meta['symbolPosition'] == 'right' else ''
# .. TODO: take the group and decimal separators from meta into account (US & UK are the same, so far we're safe)
formatted_value = '{:,.2f}'.format(abs(value))
return prefix + formatted_value + suffix
def format_currency(value, meta, currency_locale=None):
"""
Formats currency value into properly decorated currency string based on either locale (preferred)
or if that is not available then currency metadata. Until locale is provided from the server
a crude mapping between `currency.dxCode` and a locale string is used instead (eg 0: 'en_US')
:param value: amount
:param meta: server metadata (`currency`)
:return: formatted currency string
"""
try:
if currency_locale is None:
currency_locale = locale_from_currency_code(meta['dxCode'])
if currency_locale is None:
return format_currency_from_meta(value, meta)
else:
locale.setlocale(locale.LC_ALL, currency_locale)
return locale.currency(value, grouping=True)
except locale.Error:
# .. locale is probably not available -> fallback to format manually
return format_currency_from_meta(value, meta)
def print_user_desc(desc):
print_field("ID", desc["id"])
print_field("Name", desc["first"] + " " + ((desc["middle"] + " ") if desc["middle"] != '' else '') + desc["last"])
if "email" in desc:
print_field("Email", desc["email"])
bill_to_label = "Default bill to"
if "billTo" in desc:
print_field(bill_to_label, desc["billTo"])
if "appsInstalled" in desc:
print_list_field("Apps installed", desc["appsInstalled"])
def print_generic_desc(desc):
for field in desc:
print_json_field(field, desc[field])
def print_desc(desc, verbose=False):
'''
:param desc: The describe hash of a DNAnexus entity
:type desc: dict
Depending on the class of the entity, this method will print a
formatted and human-readable string containing the data in *desc*.
'''
if desc['class'] in ['project', 'workspace', 'container']:
print_project_desc(desc, verbose=verbose)
elif desc['class'] == 'app':
print_app_desc(desc, verbose=verbose)
elif desc['class'] == 'globalworkflow':
print_globalworkflow_desc(desc, verbose=verbose)
elif desc['class'] in ['job', 'analysis']:
print_execution_desc(desc)
elif desc['class'] == 'user':
print_user_desc(desc)
elif desc['class'] in ['org', 'team']:
print_generic_desc(desc)
else:
print_data_obj_desc(desc, verbose=verbose)
def get_ls_desc(desc, print_id=False):
addendum = ' : ' + desc['id'] if print_id is True else ''
if desc['class'] in ['applet', 'workflow']:
return BOLD() + GREEN() + desc['name'] + ENDC() + addendum
else:
return desc['name'] + addendum
def print_ls_desc(desc, **kwargs):
print(get_ls_desc(desc, **kwargs))
def get_ls_l_header():
return (BOLD() +
'State' + DELIMITER(' ') +
'Last modified' + DELIMITER(' ') +
'Size' + DELIMITER(' ') +
'Name' + DELIMITER(' (') +
'ID' + DELIMITER(')') +
ENDC())
def print_ls_l_header():
print(get_ls_l_header())
def get_ls_l_desc_fields():
return {
'id': True,
'class': True,
'folder': True,
'length': True,
'modified': True,
'name': True,
'project': True,
'size': True,
'state': True
}
def get_ls_l_desc(desc, include_folder=False, include_project=False):
"""
desc must have at least all the fields given by get_ls_l_desc_fields.
"""
# If you make this method consume an additional field, you must add it to
# get_ls_l_desc_fields above.
if 'state' in desc:
state_len = len(desc['state'])
if desc['state'] != 'closed':
state_str = YELLOW() + desc['state'] + ENDC()
else:
state_str = GREEN() + desc['state'] + ENDC()
else:
state_str = ''
state_len = 0
name_str = ''
if include_folder:
name_str += desc['folder'] + ('/' if desc['folder'] != '/' else '')
name_str += desc['name']
if desc['class'] in ['applet', 'workflow']:
name_str = BOLD() + GREEN() + name_str + ENDC()
size_str = ''
if 'size' in desc and desc['class'] == 'file':
size_str = get_size_str(desc['size'])
elif 'length' in desc:
size_str = str(desc['length']) + ' rows'
size_padding = ' ' * max(0, 9 - len(size_str))
return (state_str +
DELIMITER(' '*(8 - state_len)) + render_short_timestamp(desc['modified']) +
DELIMITER(' ') + size_str +
DELIMITER(size_padding + ' ') + name_str +
DELIMITER(' (') + ((desc['project'] + DELIMITER(':')) if include_project else '') + desc['id'] +
DELIMITER(')'))
def print_ls_l_desc(desc, **kwargs):
print(get_ls_l_desc(desc, **kwargs))
def get_find_executions_string(desc, has_children, single_result=False, show_outputs=True,
is_cached_result=False):
'''
:param desc: hash of execution's describe output
:param has_children: whether the execution has children to be printed
:param single_result: whether the execution is displayed as a single result or as part of an execution tree
:param is_cached_result: whether the execution should be formatted as a cached result
'''
is_not_subjob = desc['parentJob'] is None or desc['class'] == 'analysis' or single_result
result = ("* " if is_not_subjob and get_delimiter() is None else "")
canonical_execution_name = desc['executableName']
if desc['class'] == 'job':
canonical_execution_name += ":" + desc['function']
execution_name = desc.get('name', '<no name>')
# Format the name of the execution
if is_cached_result:
result += BOLD() + "[" + ENDC()
result += BOLD() + BLUE()
if desc['class'] == 'analysis':
result += UNDERLINE()
result += execution_name + ENDC()
if execution_name != canonical_execution_name and execution_name+":main" != canonical_execution_name:
result += ' (' + canonical_execution_name + ')'
if is_cached_result:
result += BOLD() + "]" + ENDC()
# Format state
result += DELIMITER(' (') + JOB_STATES(desc['state']) + DELIMITER(') ') + desc['id']
# Add unicode pipe to child if necessary
result += DELIMITER('\n' + (u'│ ' if is_not_subjob and has_children else (" " if is_not_subjob else "")))
result += desc['launchedBy'][5:] + DELIMITER(' ')
result += render_short_timestamp(desc['created'])
cached_and_runtime_strs = []
if is_cached_result:
cached_and_runtime_strs.append(YELLOW() + "cached" + ENDC())
if desc['class'] == 'job':
# Only print runtime if it ever started running
if desc.get('startedRunning'):
if desc['state'] in ['done', 'failed', 'terminated', 'waiting_on_output']:
runtime = datetime.timedelta(seconds=int(desc['stoppedRunning']-desc['startedRunning'])//1000)
cached_and_runtime_strs.append("runtime " + str(runtime))
elif desc['state'] == 'running':
seconds_running = max(int(time.time()-desc['startedRunning']//1000), 0)
msg = "running for {rt}".format(rt=datetime.timedelta(seconds=seconds_running))
cached_and_runtime_strs.append(msg)
if cached_and_runtime_strs:
result += " (" + ", ".join(cached_and_runtime_strs) + ")"
if show_outputs:
prefix = DELIMITER('\n' + (u'│ ' if is_not_subjob and has_children else (" " if is_not_subjob else "")))
if desc.get("output") != None:
result += job_output_to_str(desc['output'], prefix=prefix)
elif desc['state'] == 'failed' and 'failureReason' in desc:
result += prefix + BOLD() + desc['failureReason'] + ENDC() + ": " + fill(desc.get('failureMessage', ''),
subsequent_indent=prefix.lstrip('\n'))
return result
def print_locked_workflow_note():
print_field('Note',
'This workflow has an explicit input specification (i.e. it is locked), and as such stage inputs cannot be modified at run-time.')
| dnanexus/dx-toolkit | src/python/dxpy/utils/describe.py | Python | apache-2.0 | 52,416 |
package fr.fablabmars.model;
import java.util.ArrayList;
import fr.fablabmars.observer.Observable;
import fr.fablabmars.observer.Observer;
/**
* Observable contenant le menu courant.
*
* @author Guillaume Perouffe
* @see Observable
*/
public class CardMenu implements Observable {
/**
* Liste des observateurs de cet observable.
*/
private ArrayList<Observer> listObserver = new ArrayList<Observer>();
/**
* Indice du menu courant
*/
private int panel;
/**
* Constructeur de l'observable
* <p>
* On initialise le menu sur le 'panel' par défaut,
* d'indice 0.
* </p>
*
* @see CardMenu#panel
*/
public CardMenu(){
panel = 0;
}
/**
* Change le panneau courant et notifie les observateurs.
*
* @param panel
* Indice du nouveau menu.
*
* @see CardMenu#panel
* @see Observable#notifyObservers()
*/
public void setPanel(int panel){
this.panel = panel;
notifyObservers();
}
@Override
public void addObserver(Observer obs) {
listObserver.add(obs);
}
@Override
public void removeObserver(Observer obs) {
listObserver.remove(obs);
}
@Override
public void notifyObservers() {
for(Observer obs:listObserver){
obs.update(this);
}
}
/**
* Retourne le menu courant
*
* @return Menu courant
*
* @see CardMenu#panel
*/
@Override
public int getState(){
return panel;
}
}
| gperouffe/FabLabUsers | src/fr/fablabmars/model/CardMenu.java | Java | apache-2.0 | 1,473 |
package ru.job4j.collections.tree;
/**
* Бинарное дерево .
*
* @author Hincu Andrei (andreih1981@gmail.com) by 20.10.17;
* @version $Id$
* @since 0.1
* @param <E> тип данных.
*/
public class BinaryTree<E extends Comparable<E>> extends Tree<E> {
/**
* Корень дерева.
*/
private Node<E> node;
/**
* Размер дерева.
*/
private int size;
/**
* Узел дерева.
* @param <E> значение.
*/
private class Node<E> {
/**
* Значение.
*/
private E value;
/**
* левый сын.
*/
private Node<E> left;
/**
* правый сын.
*/
private Node<E> right;
/**
* Конструктор.
* @param value значение узла.
*/
private Node(E value) {
this.value = value;
}
}
/**
* Добавляем новый элемент или корень дерева.
* @param e значение.
*/
public void add(E e) {
if (node == null) {
node = new Node<>(e);
size++;
} else {
addNewElement(e, node);
}
}
/**
* Метод для поиска места вставки.
* @param e значение.
* @param n текуший узел дерева.
*/
private void addNewElement(E e, Node<E> n) {
if (e.compareTo(n.value) < 0) {
if (n.left == null) {
n.left = new Node<>(e);
size++;
} else {
addNewElement(e, n.left);
}
} else if (e.compareTo(n.value) > 0) {
if (n.right == null) {
n.right = new Node<>(e);
size++;
} else {
addNewElement(e, n.right);
}
}
}
/**
* геттер.
* @return размер дерева.
*/
public int getSize() {
return size;
}
}
| andreiHi/hincuA | chapter_005/src/main/java/ru/job4j/collections/tree/BinaryTree.java | Java | apache-2.0 | 2,101 |
<?php include_once('procedures.php'); ?>
<?php include("top.php"); ?>
<script src="./js/visualRound.js"></script>
<style>
#bottomBar {
position: fixed;
left: 0px;
bottom: 0px;
width: 100%;
height: 40px;
background-color: #EEE;
border-top-width: 1px;
border-top-color: #999;
border-top-style: solid;
overflow: hidden;
z-index: 100;
}
#bottomBarText {
position: relative;
left: 20px;
top: 5px;
font-size: 20px;
color: black;
opacity: 1.0;
}
.bottom_gray_text {
position: relative;
top: 9px;
font-size: 15px;
color: gray;
opacity: 1.0;
}
.footer {
margin-bottom: 50px;
}
.button {
background-color: #9999FF;
cursor: pointer;
}
.left {
float: left;
}
.right {
float:right;
}
.pointer {
cursor: pointer;
}
.timer {
top: 12px;
}
#bottomBarTextOld {
left: 40px;
}
#bottomCounter {
right: 10px;
}
#timersEditor {
right: 5px;
}
</style>
<div class = "container content">
</div>
<div id = "dataContainer" class = "container content">
<?php
$roundId = intval($_GET['round']);
$roundData = getRoundData($roundId);
?>
<div class = "centeredText">
<h2>Раунд "<?php echo $roundData['name']; ?>" игры "<?php echo $roundData['gameName']; ?>" от <?php echo $roundData['date']; ?></h2>
</div>
<h3>Результаты раунда</h3>
<table class = "table table-bordered">
<tr align = center>
<td>Пользователь</td>
<td>Счет</td>
</tr>
<?php
$result = getUsersRoundScoresNoSort($roundId);
$i = -1;
foreach ($result as $row)
{
$i++;
?>
<tr class="tableRow" title="<?php echo $i; ?>" id="r<?php echo $row['id']; ?>" align = "center">
<td>
<?php
echo $row['name'];
?></td>
<td id="c<?php echo $row['id']; ?>"><?php echo 0; ?></td>
</tr>
<?php
}
?>
</table>
<br>
</div>
<script>
round = <?php echo $roundId; ?>;
startVisualization();
</script>
<div id="bottomBar">
<button id="pause" class="btn btn-info left" onClick="pause();">Начать</button>
<button class="btn btn-info left" onClick="step();">Далее</button>
<p id="bottomBarText" class="left"></p>
<p id="bottomBarTextOld" class="bottom_gray_text left"></p>
<p id="timersEditor" class="glyphicon glyphicon-time pointer timer right" onClick="showTimersDialog();"></p>
<p id="bottomCounter" class="bottom_gray_text right"></p>
</div>
<?php include("bottom.php"); ?> | Dovgalyuk/AIBattle | site/visualRound.php | PHP | apache-2.0 | 2,942 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.core.startree.v2.store;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.nio.ByteOrder;
import java.util.List;
import java.util.Map;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.pinot.common.segment.ReadMode;
import org.apache.pinot.core.segment.index.column.ColumnIndexContainer;
import org.apache.pinot.core.segment.index.metadata.SegmentMetadataImpl;
import org.apache.pinot.core.segment.memory.PinotDataBuffer;
import org.apache.pinot.core.startree.v2.StarTreeV2;
import org.apache.pinot.core.startree.v2.StarTreeV2Constants;
import static org.apache.pinot.core.startree.v2.store.StarTreeIndexMapUtils.IndexKey;
import static org.apache.pinot.core.startree.v2.store.StarTreeIndexMapUtils.IndexValue;
/**
* The {@code StarTreeIndexContainer} class contains the indexes for multiple star-trees.
*/
public class StarTreeIndexContainer implements Closeable {
private final PinotDataBuffer _dataBuffer;
private final List<StarTreeV2> _starTrees;
public StarTreeIndexContainer(File segmentDirectory, SegmentMetadataImpl segmentMetadata,
Map<String, ColumnIndexContainer> indexContainerMap, ReadMode readMode)
throws ConfigurationException, IOException {
File indexFile = new File(segmentDirectory, StarTreeV2Constants.INDEX_FILE_NAME);
if (readMode == ReadMode.heap) {
_dataBuffer = PinotDataBuffer
.loadFile(indexFile, 0, indexFile.length(), ByteOrder.LITTLE_ENDIAN, "Star-tree V2 data buffer");
} else {
_dataBuffer = PinotDataBuffer
.mapFile(indexFile, true, 0, indexFile.length(), ByteOrder.LITTLE_ENDIAN, "Star-tree V2 data buffer");
}
File indexMapFile = new File(segmentDirectory, StarTreeV2Constants.INDEX_MAP_FILE_NAME);
List<Map<IndexKey, IndexValue>> indexMapList =
StarTreeIndexMapUtils.loadFromFile(indexMapFile, segmentMetadata.getStarTreeV2MetadataList().size());
_starTrees = StarTreeLoaderUtils.loadStarTreeV2(_dataBuffer, indexMapList, segmentMetadata, indexContainerMap);
}
public List<StarTreeV2> getStarTrees() {
return _starTrees;
}
@Override
public void close()
throws IOException {
_dataBuffer.close();
}
}
| linkedin/pinot | pinot-core/src/main/java/org/apache/pinot/core/startree/v2/store/StarTreeIndexContainer.java | Java | apache-2.0 | 3,059 |
/*
Copyright 2010-2011 Zhengmao HU (James)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.sf.jabb.util.text;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Given a text string to be tested, and list of matching strings, find out which matching string the
* text string starts with.<br>
* 给定一个待检查的文本字符串,以及一批开头匹配字符串,看看待检查的文本字符串以哪个匹配字符串开头。
* <p>
* The matching is case sensitive.
* If one matching string starts with another,
* and the text string starts with them, then the longer one will be considered to be matched.
* <p>
* 匹配时对大小写敏感。如果匹配字符串之间互相饱含,则匹配其中最长的。
*
* <p>
* If the matching need to be checked upon number segments (start number ~ end number) represented
* as strings, {@link #expandNumberMatchingRange(Map, String, String, Object)} method can be used to
* expand number segments to heading number strings.
* <p>
* 如果需要对代表数字号码(开始号码~结束号码)的字符串进行匹配,可使用
* {@link #expandNumberMatchingRange(Map, String, String, Object)} 方法
* 将号码段字符串(一个开始号码,一个结束号码)转换为号码头字符串。
*
* @author Zhengmao HU (James)
*
*/
public class StringStartWithMatcher extends StartWithMatcher {
private static final long serialVersionUID = -2501231925022032723L;
/**
* Create a new instance according to heading strings and their corresponding attachment objects.<br>
* 根据开头匹配字符串、开头匹配字符串所对应的附件对象,创建一个新的实例。
* <p>
* When initializing internal data structure, choose to consume more memory for better matching speed.
* <p>
* 在创建内部数据结构的时候,选择占用更多内存,而换取速度上的提升。
*
* @param headingDefinitions Key is the heading string, Value is its associated attachment object.
* When the heading string is matched, the attachment object will be returned
* as identifier.<p>
* Key是匹配字符串,Value是附件对象。
* 当进行匹配检查的时候,返回附件对象来标识哪一个匹配字符串被匹配上了。
*/
public StringStartWithMatcher(Map<String, ? extends Object> headingDefinitions) {
super(normalizeMatchingDefinitions(headingDefinitions));
}
/**
* Create a new instance according to heading strings and their corresponding attachment objects.<br>
* 根据开头匹配字符串、开头匹配字符串所对应的附件对象,创建一个新的实例。
*
* @param headingDefinitions Key是匹配字符串,Value是附件对象。
* 当进行匹配检查的时候,返回附件对象来标识哪一个匹配字符串被匹配上了。
* <p>
* Key is the heading string, Value is its associated attachment object.
* When the heading string is matched, the attachment object will be returned
* as identifier.
* @param moreSpaceForSpeed 是否占用更多内存,而换取速度上的提升。
* <p>Whether or not to consume
* more memory for better matching speed.
*/
public StringStartWithMatcher(Map<String, ? extends Object> headingDefinitions, boolean moreSpaceForSpeed) {
super(normalizeMatchingDefinitions(headingDefinitions), moreSpaceForSpeed);
}
/**
* Create a copy, the copy will have exactly the same matching
* definitions as the original copy.<br>
* 创建一个副本,这个副本与原先的对象具有完全相同匹配方式。
*
* @param toBeCopied 原本。<br>The original copy.
*/
public StringStartWithMatcher(StringStartWithMatcher toBeCopied) {
super(toBeCopied);
}
/**
* Normalize matching definitions according to requirements of {@link StartWithMatcher}.<br>
* 根据{@link StartWithMatcher}的需要来规范化匹配条件定义。
*
* @param headingDefinitions Key是匹配字符串,Value是附件对象。
* 当进行匹配检查的时候,返回附件对象来标识哪一个匹配字符串被匹配上了。
* <p>
* Key is the heading string, Value is its associated attachment object.
* When the heading string is matched, the attachment object will be returned
* as identifier.
* @return {@link StartWithMatcher}所需的匹配条件定义。
* <br>Matching definitions for usage of {@link StartWithMatcher}.
*/
static protected List<MatchingDefinition> normalizeMatchingDefinitions(Map<String, ? extends Object> headingDefinitions){
// exactMatchExample自动设置为与regularExpression相同
List<MatchingDefinition> l = new ArrayList<MatchingDefinition>(headingDefinitions.size());
for (Map.Entry<String, ? extends Object> e: headingDefinitions.entrySet()){
MatchingDefinition c = new MatchingDefinition();
c.setRegularExpression(escapeForRegExp(e.getKey()));
c.setAttachment(e.getValue());
c.setExactMatchExample(e.getKey());
l.add(c);
}
return l;
}
/**
* Expand number segments (such as 138000~138999 or 138000~138029) into number headings
* (such as 138 or {13800,13801,13802}).<br>
* 把号码段(类似:138000~138999或138000~138029)展开成号码头(类似:138或13800,13801,13802)。
*
* @param headingDefinitions 可用来对{@link StringStartWithMatcher}进行初始化的展开后的匹配条件
* 会被放到这个Map里。
* <br> Equivalent heading definitions that could be used to
* create instance of {@link StringStartWithMatcher} will be put into this Map.
* @param start 起始号码 <br> first/starting number
* @param end 结束号码 <br> last/ending number
* @param attachment 匹配附件<br>attachment to identify that the segment matches a string
*/
public static <T> void expandNumberMatchingRange(Map<String, T> headingDefinitions, String start, String end, T attachment){
int firstDiff; //第一个不相同字符的位置
int lastDiff; //末尾0:9对应段开始的位置
// 先强行保证起始号码与结束号码长度相同
if (start.length() > end.length()){
StringBuilder sb = new StringBuilder(end);
while (start.length() > sb.length()){
sb.append("9");
}
end = sb.toString();
} else if (end.length() > start.length()){
StringBuilder sb = new StringBuilder(start);
while (end.length() > sb.length()){
sb.append("0");
}
start = sb.toString();
}
// 然后寻找第一个不相同字符的位置
for (firstDiff = 0; firstDiff < start.length(); firstDiff++){
if (start.charAt(firstDiff) != end.charAt(firstDiff)){
break;
}
}
// 再寻找末尾0:9对应段开始的位置
for (lastDiff = start.length() - 1; lastDiff >= 0; lastDiff--){
if (start.charAt(lastDiff) != '0' || end.charAt(lastDiff) != '9'){
break;
}
}
lastDiff++;
if (firstDiff == lastDiff){ // 则表示可合并为一条
headingDefinitions.put(start.substring(0, firstDiff), attachment);
} else { // 则表示要扩展为多条
int j = Integer.parseInt(start.substring(firstDiff, lastDiff));
int k = Integer.parseInt(end.substring(firstDiff, lastDiff));
String head = start.substring(0, firstDiff);
String f = "%" + (lastDiff-firstDiff) + "d";
StringBuilder sb = new StringBuilder();
for (int i = j; i <= k; i++){
sb.setLength(0);
sb.append(head);
sb.append(String.format(f, i));
headingDefinitions.put(sb.toString(), attachment);
}
}
}
}
| james-hu/jabb-core | src/main/java/net/sf/jabb/util/text/StringStartWithMatcher.java | Java | apache-2.0 | 8,259 |
# -*- coding: utf-8 -*-
"""
Linguistic and other taggers.
Tagging each token in a sentence with supplementary information,
such as its part-of-speech (POS) tag, and named entity (NE) tag.
"""
__all__ = [
"PerceptronTagger",
"pos_tag",
"pos_tag_sents",
"tag_provinces",
"chunk_parse",
"NER",
]
from pythainlp.tag.locations import tag_provinces
from pythainlp.tag.pos_tag import pos_tag, pos_tag_sents
from pythainlp.tag._tag_perceptron import PerceptronTagger
from pythainlp.tag.chunk import chunk_parse
from pythainlp.tag.named_entity import NER
| PyThaiNLP/pythainlp | pythainlp/tag/__init__.py | Python | apache-2.0 | 573 |
<?php
namespace Topxia\WebBundle\Listener;
use Symfony\Component\HttpKernel\KernelEvents;
use Symfony\Component\HttpKernel\Event\GetResponseEvent;
use Symfony\Component\EventDispatcher\EventSubscriberInterface;
class LocaleListener implements EventSubscriberInterface
{
private $defaultLocale;
public function __construct($defaultLocale)
{
if ($defaultLocale == 'en') {
$defaultLocale = 'en_US'; //兼容原来的配置
}
$this->defaultLocale = $defaultLocale;
}
public function onKernelRequest(GetResponseEvent $event)
{
$request = $event->getRequest();
if (!$request->hasPreviousSession()) {
return;
}
$locale = $request->getSession()->get('_locale', $request->cookies->get('_last_logout_locale') ?: $this->defaultLocale);
$request->setLocale($locale);
}
public static function getSubscribedEvents()
{
return array(
// must be registered after the default Locale listener
KernelEvents::REQUEST => array(array('onKernelRequest', 15))
);
}
}
| 18826252059/im | src/Topxia/WebBundle/Listener/LocaleListener.php | PHP | apache-2.0 | 1,115 |
<?php
namespace BankId\Merchant\Library\Schemas\saml\metadata;
/**
* Class representing Extensions
*/
class Extensions extends ExtensionsType
{
}
| notarynodes/idintt | hackathon-module/idintt-php-module/library/Schemas/saml/metadata/Extensions.php | PHP | apache-2.0 | 166 |
package com.dexvis.simple.transform;
import javafx.scene.web.HTMLEditor;
import org.simpleframework.xml.transform.Transform;
public class HTMLEditorTransform implements Transform<HTMLEditor>
{
public HTMLEditor read(String value) throws Exception
{
HTMLEditor editor = new HTMLEditor();
editor.setHtmlText(value);
return editor;
}
@Override
public String write(HTMLEditor value) throws Exception
{
return value.getHtmlText();
}
}
| PatMartin/Dex | src/com/dexvis/simple/transform/HTMLEditorTransform.java | Java | apache-2.0 | 485 |
package ecologylab.bigsemantics.service.crawler;
import java.io.IOException;
/**
* A general framework for crawling resources.
*
* @author quyin
*/
public interface ResourceCrawler<T>
{
/**
* Queue a resource with the given URI.
*
* @param uri
*/
void queue(String uri);
/**
* If the crawler has more resources to crawl.
*
* @return true if there are still resources to crawl.
*/
boolean hasNext();
/**
* Retrieve the next resource.
*
* @return The next crawled resource.
* @throws IOException
* If the resource cannot be accessed.
*/
T next() throws IOException;
/**
* Expand a given resource.
*
* @param resource
*/
void expand(T resource);
/**
* @return The number of resources queued.
*/
int countQueued();
/**
* @return The number of resources that are to be crawled.
*/
int countWaiting();
/**
* @return The number of resources that have been accessed.
*/
int countAccessed();
/**
* @return The number of resources that have been accessed successfully.
*/
int countSuccess();
/**
* @return The number of resources that have been accessed unsuccessfully.
*/
int countFailure();
} | ecologylab/BigSemanticsService | BasicCrawler/src/ecologylab/bigsemantics/service/crawler/ResourceCrawler.java | Java | apache-2.0 | 1,304 |
package net.sf.anpr.rcp.widget;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.SwingConstants;
public class JCanvasPanel extends JLabel {
private static final long serialVersionUID = 1L;
private Rectangle focusArea=new Rectangle();
private BufferedImage image;
public JCanvasPanel() {
super();
this.setVerticalAlignment(SwingConstants.TOP);
this.setHorizontalAlignment(SwingConstants.LEFT);
}
protected void paintComponent(Graphics g) {
super.paintComponent(g);
if(focusArea==null) return ;
if (focusArea.width >= 0 && focusArea.height >= 0) {
Color c = g.getColor();
g.setColor(Color.RED);
g.drawRect(focusArea.x, focusArea.y, focusArea.width, focusArea.height);
g.setColor(c);
}
g.dispose();
}
protected void setImage(BufferedImage image){
this.image=image;
this.setIcon(new ImageIcon(image));
}
public void setFocusArea(Rectangle focusArea) {
this.focusArea = focusArea;
}
protected BufferedImage getImage() {
return image;
}
}
| alexmao86/swing-rcp | src/main/java/net/sf/anpr/rcp/widget/JCanvasPanel.java | Java | apache-2.0 | 1,173 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Common Policy Engine Implementation
Policies can be expressed in one of two forms: A list of lists, or a
string written in the new policy language.
In the list-of-lists representation, each check inside the innermost
list is combined as with an "and" conjunction--for that check to pass,
all the specified checks must pass. These innermost lists are then
combined as with an "or" conjunction. This is the original way of
expressing policies, but there now exists a new way: the policy
language.
In the policy language, each check is specified the same way as in the
list-of-lists representation: a simple "a:b" pair that is matched to
the correct code to perform that check. However, conjunction
operators are available, allowing for more expressiveness in crafting
policies.
As an example, take the following rule, expressed in the list-of-lists
representation::
[["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]]
In the policy language, this becomes::
role:admin or (project_id:%(project_id)s and role:projectadmin)
The policy language also has the "not" operator, allowing a richer
policy rule::
project_id:%(project_id)s and not role:dunce
Finally, two special policy checks should be mentioned; the policy
check "@" will always accept an access, and the policy check "!" will
always reject an access. (Note that if a rule is either the empty
list ("[]") or the empty string, this is equivalent to the "@" policy
check.) Of these, the "!" policy check is probably the most useful,
as it allows particular rules to be explicitly disabled.
"""
import abc
import re
import urllib
import urllib2
from oslo.config import cfg
import six
from kwstandby.openstack.common import fileutils
from kwstandby.openstack.common.gettextutils import _
from kwstandby.openstack.common import jsonutils
from kwstandby.openstack.common import log as logging
policy_opts = [
cfg.StrOpt('policy_file',
default='policy.json',
help=_('JSON file containing policy')),
cfg.StrOpt('policy_default_rule',
default='default',
help=_('Rule enforced when requested rule is not found')),
]
CONF = cfg.CONF
CONF.register_opts(policy_opts)
LOG = logging.getLogger(__name__)
_checks = {}
class PolicyNotAuthorized(Exception):
def __init__(self, rule):
msg = _("Policy doesn't allow %s to be performed.") % rule
super(PolicyNotAuthorized, self).__init__(msg)
class Rules(dict):
"""A store for rules. Handles the default_rule setting directly."""
@classmethod
def load_json(cls, data, default_rule=None):
"""Allow loading of JSON rule data."""
# Suck in the JSON data and parse the rules
rules = dict((k, parse_rule(v)) for k, v in
jsonutils.loads(data).items())
return cls(rules, default_rule)
def __init__(self, rules=None, default_rule=None):
"""Initialize the Rules store."""
super(Rules, self).__init__(rules or {})
self.default_rule = default_rule
def __missing__(self, key):
"""Implements the default rule handling."""
# If the default rule isn't actually defined, do something
# reasonably intelligent
if not self.default_rule or self.default_rule not in self:
raise KeyError(key)
return self[self.default_rule]
def __str__(self):
"""Dumps a string representation of the rules."""
# Start by building the canonical strings for the rules
out_rules = {}
for key, value in self.items():
# Use empty string for singleton TrueCheck instances
if isinstance(value, TrueCheck):
out_rules[key] = ''
else:
out_rules[key] = str(value)
# Dump a pretty-printed JSON representation
return jsonutils.dumps(out_rules, indent=4)
class Enforcer(object):
"""Responsible for loading and enforcing rules.
:param policy_file: Custom policy file to use, if none is
specified, `CONF.policy_file` will be
used.
:param rules: Default dictionary / Rules to use. It will be
considered just in the first instantiation. If
`load_rules(True)`, `clear()` or `set_rules(True)`
is called this will be overwritten.
:param default_rule: Default rule to use, CONF.default_rule will
be used if none is specified.
"""
def __init__(self, policy_file=None, rules=None, default_rule=None):
self.rules = Rules(rules)
self.default_rule = default_rule or CONF.policy_default_rule
self.policy_path = None
self.policy_file = policy_file or CONF.policy_file
def set_rules(self, rules, overwrite=True):
"""Create a new Rules object based on the provided dict of rules.
:param rules: New rules to use. It should be an instance of dict.
:param overwrite: Whether to overwrite current rules or update them
with the new rules.
"""
if not isinstance(rules, dict):
raise TypeError(_("Rules must be an instance of dict or Rules, "
"got %s instead") % type(rules))
if overwrite:
self.rules = Rules(rules)
else:
self.update(rules)
def clear(self):
"""Clears Enforcer rules, policy's cache and policy's path."""
self.set_rules({})
self.policy_path = None
def load_rules(self, force_reload=False):
"""Loads policy_path's rules.
Policy file is cached and will be reloaded if modified.
:param force_reload: Whether to overwrite current rules.
"""
if not self.policy_path:
self.policy_path = self._get_policy_path()
reloaded, data = fileutils.read_cached_file(self.policy_path,
force_reload=force_reload)
if reloaded:
rules = Rules.load_json(data, self.default_rule)
self.set_rules(rules)
LOG.debug(_("Rules successfully reloaded"))
def _get_policy_path(self):
"""Locate the policy json data file.
:param policy_file: Custom policy file to locate.
:returns: The policy path
:raises: ConfigFilesNotFoundError if the file couldn't
be located.
"""
policy_file = CONF.find_file(self.policy_file)
if policy_file:
return policy_file
raise cfg.ConfigFilesNotFoundError(path=CONF.policy_file)
def enforce(self, rule, target, creds, do_raise=False,
exc=None, *args, **kwargs):
"""Checks authorization of a rule against the target and credentials.
:param rule: A string or BaseCheck instance specifying the rule
to evaluate.
:param target: As much information about the object being operated
on as possible, as a dictionary.
:param creds: As much information about the user performing the
action as possible, as a dictionary.
:param do_raise: Whether to raise an exception or not if check
fails.
:param exc: Class of the exception to raise if the check fails.
Any remaining arguments passed to check() (both
positional and keyword arguments) will be passed to
the exception class. If not specified, PolicyNotAuthorized
will be used.
:return: Returns False if the policy does not allow the action and
exc is not provided; otherwise, returns a value that
evaluates to True. Note: for rules using the "case"
expression, this True value will be the specified string
from the expression.
"""
# NOTE(flaper87): Not logging target or creds to avoid
# potential security issues.
LOG.debug(_("Rule %s will be now enforced") % rule)
self.load_rules()
# Allow the rule to be a Check tree
if isinstance(rule, BaseCheck):
result = rule(target, creds, self)
elif not self.rules:
# No rules to reference means we're going to fail closed
result = False
else:
try:
# Evaluate the rule
result = self.rules[rule](target, creds, self)
except KeyError:
LOG.debug(_("Rule [%s] doesn't exist") % rule)
# If the rule doesn't exist, fail closed
result = False
# If it is False, raise the exception if requested
if do_raise and not result:
if exc:
raise exc(*args, **kwargs)
raise PolicyNotAuthorized(rule)
return result
class BaseCheck(object):
"""Abstract base class for Check classes."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __str__(self):
"""String representation of the Check tree rooted at this node."""
pass
@abc.abstractmethod
def __call__(self, target, cred):
"""Triggers if instance of the class is called.
Performs the check. Returns False to reject the access or a
true value (not necessary True) to accept the access.
"""
pass
class FalseCheck(BaseCheck):
"""A policy check that always returns False (disallow)."""
def __str__(self):
"""Return a string representation of this check."""
return "!"
def __call__(self, target, cred):
"""Check the policy."""
return False
class TrueCheck(BaseCheck):
"""A policy check that always returns True (allow)."""
def __str__(self):
"""Return a string representation of this check."""
return "@"
def __call__(self, target, cred):
"""Check the policy."""
return True
class Check(BaseCheck):
"""A base class to allow for user-defined policy checks."""
def __init__(self, kind, match):
"""Initiates Check instance.
:param kind: The kind of the check, i.e., the field before the
':'.
:param match: The match of the check, i.e., the field after
the ':'.
"""
self.kind = kind
self.match = match
def __str__(self):
"""Return a string representation of this check."""
return "%s:%s" % (self.kind, self.match)
class NotCheck(BaseCheck):
"""Implements the "not" logical operator.
A policy check that inverts the result of another policy check.
"""
def __init__(self, rule):
"""Initialize the 'not' check.
:param rule: The rule to negate. Must be a Check.
"""
self.rule = rule
def __str__(self):
"""Return a string representation of this check."""
return "not %s" % self.rule
def __call__(self, target, cred):
"""Check the policy.
Returns the logical inverse of the wrapped check.
"""
return not self.rule(target, cred)
class AndCheck(BaseCheck):
"""Implements the "and" logical operator.
A policy check that requires that a list of other checks all return True.
"""
def __init__(self, rules):
"""Initialize the 'and' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' and '.join(str(r) for r in self.rules)
def __call__(self, target, cred):
"""Check the policy.
Requires that all rules accept in order to return True.
"""
for rule in self.rules:
if not rule(target, cred):
return False
return True
def add_check(self, rule):
"""Adds rule to be tested.
Allows addition of another rule to the list of rules that will
be tested. Returns the AndCheck object for convenience.
"""
self.rules.append(rule)
return self
class OrCheck(BaseCheck):
"""Implements the "or" operator.
A policy check that requires that at least one of a list of other
checks returns True.
"""
def __init__(self, rules):
"""Initialize the 'or' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' or '.join(str(r) for r in self.rules)
def __call__(self, target, cred):
"""Check the policy.
Requires that at least one rule accept in order to return True.
"""
for rule in self.rules:
if rule(target, cred):
return True
return False
def add_check(self, rule):
"""Adds rule to be tested.
Allows addition of another rule to the list of rules that will
be tested. Returns the OrCheck object for convenience.
"""
self.rules.append(rule)
return self
def _parse_check(rule):
"""Parse a single base check rule into an appropriate Check object."""
# Handle the special checks
if rule == '!':
return FalseCheck()
elif rule == '@':
return TrueCheck()
try:
kind, match = rule.split(':', 1)
except Exception:
LOG.exception(_("Failed to understand rule %s") % rule)
# If the rule is invalid, we'll fail closed
return FalseCheck()
# Find what implements the check
if kind in _checks:
return _checks[kind](kind, match)
elif None in _checks:
return _checks[None](kind, match)
else:
LOG.error(_("No handler for matches of kind %s") % kind)
return FalseCheck()
def _parse_list_rule(rule):
"""Translates the old list-of-lists syntax into a tree of Check objects.
Provided for backwards compatibility.
"""
# Empty rule defaults to True
if not rule:
return TrueCheck()
# Outer list is joined by "or"; inner list by "and"
or_list = []
for inner_rule in rule:
# Elide empty inner lists
if not inner_rule:
continue
# Handle bare strings
if isinstance(inner_rule, basestring):
inner_rule = [inner_rule]
# Parse the inner rules into Check objects
and_list = [_parse_check(r) for r in inner_rule]
# Append the appropriate check to the or_list
if len(and_list) == 1:
or_list.append(and_list[0])
else:
or_list.append(AndCheck(and_list))
# If we have only one check, omit the "or"
if not or_list:
return FalseCheck()
elif len(or_list) == 1:
return or_list[0]
return OrCheck(or_list)
# Used for tokenizing the policy language
_tokenize_re = re.compile(r'\s+')
def _parse_tokenize(rule):
"""Tokenizer for the policy language.
Most of the single-character tokens are specified in the
_tokenize_re; however, parentheses need to be handled specially,
because they can appear inside a check string. Thankfully, those
parentheses that appear inside a check string can never occur at
the very beginning or end ("%(variable)s" is the correct syntax).
"""
for tok in _tokenize_re.split(rule):
# Skip empty tokens
if not tok or tok.isspace():
continue
# Handle leading parens on the token
clean = tok.lstrip('(')
for i in range(len(tok) - len(clean)):
yield '(', '('
# If it was only parentheses, continue
if not clean:
continue
else:
tok = clean
# Handle trailing parens on the token
clean = tok.rstrip(')')
trail = len(tok) - len(clean)
# Yield the cleaned token
lowered = clean.lower()
if lowered in ('and', 'or', 'not'):
# Special tokens
yield lowered, clean
elif clean:
# Not a special token, but not composed solely of ')'
if len(tok) >= 2 and ((tok[0], tok[-1]) in
[('"', '"'), ("'", "'")]):
# It's a quoted string
yield 'string', tok[1:-1]
else:
yield 'check', _parse_check(clean)
# Yield the trailing parens
for i in range(trail):
yield ')', ')'
class ParseStateMeta(type):
"""Metaclass for the ParseState class.
Facilitates identifying reduction methods.
"""
def __new__(mcs, name, bases, cls_dict):
"""Create the class.
Injects the 'reducers' list, a list of tuples matching token sequences
to the names of the corresponding reduction methods.
"""
reducers = []
for key, value in cls_dict.items():
if not hasattr(value, 'reducers'):
continue
for reduction in value.reducers:
reducers.append((reduction, key))
cls_dict['reducers'] = reducers
return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict)
def reducer(*tokens):
"""Decorator for reduction methods.
Arguments are a sequence of tokens, in order, which should trigger running
this reduction method.
"""
def decorator(func):
# Make sure we have a list of reducer sequences
if not hasattr(func, 'reducers'):
func.reducers = []
# Add the tokens to the list of reducer sequences
func.reducers.append(list(tokens))
return func
return decorator
class ParseState(object):
"""Implement the core of parsing the policy language.
Uses a greedy reduction algorithm to reduce a sequence of tokens into
a single terminal, the value of which will be the root of the Check tree.
Note: error reporting is rather lacking. The best we can get with
this parser formulation is an overall "parse failed" error.
Fortunately, the policy language is simple enough that this
shouldn't be that big a problem.
"""
__metaclass__ = ParseStateMeta
def __init__(self):
"""Initialize the ParseState."""
self.tokens = []
self.values = []
def reduce(self):
"""Perform a greedy reduction of the token stream.
If a reducer method matches, it will be executed, then the
reduce() method will be called recursively to search for any more
possible reductions.
"""
for reduction, methname in self.reducers:
if (len(self.tokens) >= len(reduction) and
self.tokens[-len(reduction):] == reduction):
# Get the reduction method
meth = getattr(self, methname)
# Reduce the token stream
results = meth(*self.values[-len(reduction):])
# Update the tokens and values
self.tokens[-len(reduction):] = [r[0] for r in results]
self.values[-len(reduction):] = [r[1] for r in results]
# Check for any more reductions
return self.reduce()
def shift(self, tok, value):
"""Adds one more token to the state. Calls reduce()."""
self.tokens.append(tok)
self.values.append(value)
# Do a greedy reduce...
self.reduce()
@property
def result(self):
"""Obtain the final result of the parse.
Raises ValueError if the parse failed to reduce to a single result.
"""
if len(self.values) != 1:
raise ValueError("Could not parse rule")
return self.values[0]
@reducer('(', 'check', ')')
@reducer('(', 'and_expr', ')')
@reducer('(', 'or_expr', ')')
def _wrap_check(self, _p1, check, _p2):
"""Turn parenthesized expressions into a 'check' token."""
return [('check', check)]
@reducer('check', 'and', 'check')
def _make_and_expr(self, check1, _and, check2):
"""Create an 'and_expr'.
Join two checks by the 'and' operator.
"""
return [('and_expr', AndCheck([check1, check2]))]
@reducer('and_expr', 'and', 'check')
def _extend_and_expr(self, and_expr, _and, check):
"""Extend an 'and_expr' by adding one more check."""
return [('and_expr', and_expr.add_check(check))]
@reducer('check', 'or', 'check')
def _make_or_expr(self, check1, _or, check2):
"""Create an 'or_expr'.
Join two checks by the 'or' operator.
"""
return [('or_expr', OrCheck([check1, check2]))]
@reducer('or_expr', 'or', 'check')
def _extend_or_expr(self, or_expr, _or, check):
"""Extend an 'or_expr' by adding one more check."""
return [('or_expr', or_expr.add_check(check))]
@reducer('not', 'check')
def _make_not_expr(self, _not, check):
"""Invert the result of another check."""
return [('check', NotCheck(check))]
def _parse_text_rule(rule):
"""Parses policy to the tree.
Translates a policy written in the policy language into a tree of
Check objects.
"""
# Empty rule means always accept
if not rule:
return TrueCheck()
# Parse the token stream
state = ParseState()
for tok, value in _parse_tokenize(rule):
state.shift(tok, value)
try:
return state.result
except ValueError:
# Couldn't parse the rule
LOG.exception(_("Failed to understand rule %(rule)r") % locals())
# Fail closed
return FalseCheck()
def parse_rule(rule):
"""Parses a policy rule into a tree of Check objects."""
# If the rule is a string, it's in the policy language
if isinstance(rule, basestring):
return _parse_text_rule(rule)
return _parse_list_rule(rule)
def register(name, func=None):
"""Register a function or Check class as a policy check.
:param name: Gives the name of the check type, e.g., 'rule',
'role', etc. If name is None, a default check type
will be registered.
:param func: If given, provides the function or class to register.
If not given, returns a function taking one argument
to specify the function or class to register,
allowing use as a decorator.
"""
# Perform the actual decoration by registering the function or
# class. Returns the function or class for compliance with the
# decorator interface.
def decorator(func):
_checks[name] = func
return func
# If the function or class is given, do the registration
if func:
return decorator(func)
return decorator
@register("rule")
class RuleCheck(Check):
def __call__(self, target, creds, enforcer):
"""Recursively checks credentials based on the defined rules."""
try:
return enforcer.rules[self.match](target, creds, enforcer)
except KeyError:
# We don't have any matching rule; fail closed
return False
@register("role")
class RoleCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check that there is a matching role in the cred dict."""
return self.match.lower() in [x.lower() for x in creds['roles']]
@register('http')
class HttpCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check http: rules by calling to a remote server.
This example implementation simply verifies that the response
is exactly 'True'.
"""
url = ('http:' + self.match) % target
data = {'target': jsonutils.dumps(target),
'credentials': jsonutils.dumps(creds)}
post_data = urllib.urlencode(data)
f = urllib2.urlopen(url, post_data)
return f.read() == "True"
@register(None)
class GenericCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check an individual match.
Matches look like:
tenant:%(tenant_id)s
role:compute:admin
"""
# TODO(termie): do dict inspection via dot syntax
match = self.match % target
if self.kind in creds:
return match == six.text_type(creds[self.kind])
return False
| frossigneux/kwstandby | kwstandby/openstack/common/policy.py | Python | apache-2.0 | 25,233 |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2022 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.app.standalone.about;
import org.eclipse.jface.action.IAction;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.actions.ActionDelegate;
public class AboutBoxAction extends ActionDelegate
{
private IWorkbenchWindow window;
public AboutBoxAction(IWorkbenchWindow window) {
this.window = window;
}
@Override
public void run(IAction action)
{
// new AboutDialog(window.getShell()).open();
AboutBoxDialog dialog = new AboutBoxDialog(window.getShell());
dialog.open();
}
} | dbeaver/dbeaver | plugins/org.jkiss.dbeaver.ui.app.standalone/src/org/jkiss/dbeaver/ui/app/standalone/about/AboutBoxAction.java | Java | apache-2.0 | 1,230 |
using SolrNetLight;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
using System.Text;
using SolrNetLight.Facet;
namespace SolrNetLight
{
[DataContract]
public class SolrResponse<T>
{
[DataMember(Name="response")]
public SolrResponseBase<T> Response { get; set; }
[DataMember(Name = "facet_counts")]
public FacetCounts Facets { get; set; }
}
}
| SolrNetLight/SolrNetLight | SolrNetLight/SolrResponse.cs | C# | apache-2.0 | 453 |
package com.action.design.pattern.chain;
/**
* 创建不同类型的记录器。赋予它们不同的错误级别,并在每个记录器中设置下一个记录器。每个记录器中的下一个记录器代表的是链的一部分。
* Created by wuyunfeng on 2017/6/15.
*/
public class ChainPatternDemo {
private static AbstractLogger getChainOfLoggers() {
AbstractLogger errorLogger = new ErrorLogger(AbstractLogger.ERROR);
AbstractLogger fileLogger = new FileLogger(AbstractLogger.DEBUG);
AbstractLogger consoleLogger = new ConsoleLogger(AbstractLogger.INFO);
errorLogger.setNextLogger(fileLogger);
fileLogger.setNextLogger(consoleLogger);
return errorLogger;
}
public static void main(String[] args) {
AbstractLogger loggerChain = getChainOfLoggers();
loggerChain.logMessage(AbstractLogger.INFO, "This is an information.");
loggerChain.logMessage(AbstractLogger.DEBUG,
"This is an debug level information.");
loggerChain.logMessage(AbstractLogger.ERROR,
"This is an error information.");
}
}
| pearpai/java_action | src/main/java/com/action/design/pattern/chain/ChainPatternDemo.java | Java | apache-2.0 | 1,140 |
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.descriptor;
import boofcv.struct.feature.TupleDesc_B;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.TimeValue;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
@SuppressWarnings("ResultOfMethodCallIgnored") @BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 2)
@Measurement(iterations = 5)
@State(Scope.Benchmark)
@Fork(value = 1)
public class BenchmarkDescriptorDistance {
static int NUM_FEATURES = 10000;
List<TupleDesc_B> binaryA = new ArrayList<>();
List<TupleDesc_B> binaryB = new ArrayList<>();
HammingTable16 table = new HammingTable16();
@Setup public void setup() {
Random rand = new Random(234234);
binaryA = new ArrayList<>();
binaryB = new ArrayList<>();
for (int i = 0; i < NUM_FEATURES; i++) {
binaryA.add(randomFeature(rand));
binaryB.add(randomFeature(rand));
}
}
@Benchmark public void hammingTable() {
for (int i = 0; i < binaryA.size(); i++) {
tableScore(binaryA.get(i), binaryB.get(i));
}
}
private int tableScore( TupleDesc_B a, TupleDesc_B b ) {
int score = 0;
for (int i = 0; i < a.data.length; i++) {
int dataA = a.data[i];
int dataB = b.data[i];
score += table.lookup((short)dataA, (short)dataB);
score += table.lookup((short)(dataA >> 16), (short)(dataB >> 16));
}
return score;
}
@Benchmark public void equationOld() {
for (int i = 0; i < binaryA.size(); i++) {
ExperimentalDescriptorDistance.hamming(binaryA.get(i), binaryB.get(i));
}
}
@Benchmark public void equation() {
for (int i = 0; i < binaryA.size(); i++) {
DescriptorDistance.hamming(binaryA.get(i), binaryB.get(i));
}
}
private TupleDesc_B randomFeature( Random rand ) {
TupleDesc_B feat = new TupleDesc_B(512);
for (int j = 0; j < feat.data.length; j++) {
feat.data[j] = rand.nextInt();
}
return feat;
}
public static void main( String[] args ) throws RunnerException {
Options opt = new OptionsBuilder()
.include(BenchmarkDescriptorDistance.class.getSimpleName())
.warmupTime(TimeValue.seconds(1))
.measurementTime(TimeValue.seconds(1))
.build();
new Runner(opt).run();
}
}
| lessthanoptimal/BoofCV | main/boofcv-feature/src/benchmark/java/boofcv/alg/descriptor/BenchmarkDescriptorDistance.java | Java | apache-2.0 | 3,105 |
package adamin90.com.wpp.model.mostsearch;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Generated;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
@Generated("org.jsonschema2pojo")
public class MostSearchData {
@SerializedName("data")
@Expose
private List<Datum> data = new ArrayList<Datum>();
@SerializedName("code")
@Expose
private Integer code;
/**
*
* @return
* The data
*/
public List<Datum> getData() {
return data;
}
/**
*
* @param data
* The data
*/
public void setData(List<Datum> data) {
this.data = data;
}
/**
*
* @return
* The code
*/
public Integer getCode() {
return code;
}
/**
*
* @param code
* The code
*/
public void setCode(Integer code) {
this.code = code;
}
}
| adamin1990/MaterialWpp | wpp/app/src/main/java/adamin90/com/wpp/model/mostsearch/MostSearchData.java | Java | apache-2.0 | 971 |
var activeElements = [];
var sleepElements = [];
var promises = [];
$.ajax("https://osiproghackuc2015.osisoft.com/piwebapi/assetdatabases/D0EgxEhIf8KUieOFdFcX1IWQZ8qIGYDdE0m5aJCwNb4x_gSlVQSVRFUjAwMVxQSUZJVE5FU1M/elements", {
type : 'GET',
headers: { "Authorization" : "Basic " + btoa("osiproghack\\hackuser051:bO2rA53P2")},
success: function(results){
for (var i = 0; i < results.Items.length; i++) {
var item = results.Items[i];
getSubElements(item);
}
}
}).done(function(){
$.when.apply($,promises).done(function(){
spinner.stop(target);
var blackout = document.getElementById('blackout');
$('#blackout').css('opacity', '0');
$('#blackout').css('width', '0%');
$('#blackout').css('height', '0%');
});
});
var getSubElements = function(personElement){
promises.push($.ajax("https://osiproghackuc2015.osisoft.com/piwebapi/elements/" + personElement.WebId + "/elements", {
type : 'GET',
headers: { "Authorization" : "Basic " + btoa("osiproghack\\hackuser051:bO2rA53P2")},
success: function(results){
for (var i = 0; i < results.Items.length; i++) {
var innerItem = results.Items[i];
if (innerItem.TemplateName == "Fitbit Activity Template") {
getFitbitActivityAttributes({
Person : personElement.Name,
Child : "Fitbit Activity",
ChildWebId : innerItem.WebId
});
} else if (innerItem.TemplateName == "Fitbit Sleep Template") {
getFitbitSleepAttributes({
Person : personElement.Name,
Child : "Fitbit Sleep",
ChildWebId : innerItem.WebId
});
}
}}}));
}
var getFitbitActivityAttributes = function(object) {
promises.push($.ajax("https://osiproghackuc2015.osisoft.com/piwebapi/elements/" + object.ChildWebId + "/attributes",{
type : 'GET',
headers: { "Authorization" : "Basic " + btoa("osiproghack\\hackuser051:bO2rA53P2")},
success: function(results){
object.Attributes = [];
activeElements.push(object);
for (var i = 0; i < results.Items.length; i++) {
var attribute = results.Items[i];
object.Attributes.push({
Attribute : attribute.Name,
AttributeWebId : attribute.WebId
});
};
}
}));
}
var getFitbitSleepAttributes = function(object) {
promises.push($.ajax("https://osiproghackuc2015.osisoft.com/piwebapi/elements/" + object.ChildWebId + "/attributes",{
type : 'GET',
headers: { "Authorization" : "Basic " + btoa("osiproghack\\hackuser051:bO2rA53P2")},
success: function(results){
object.Attributes = [];
sleepElements.push(object);
for (var i = 0; i < results.Items.length; i++) {
var attribute = results.Items[i];
object.Attributes.push({
Attribute : attribute.Name,
AttributeWebId : attribute.WebId
});
};
}
}));
}
| dstcontrols/UnhandledException | customJS/piWebAPI.js | JavaScript | apache-2.0 | 2,755 |
import { Seq, Set as ISet } from 'immutable';
import { atom, unwrap } from '../derivable';
import { equals } from './equals';
describe('util/equals', () => {
it('should check equality of primitives', () => {
expect(equals(NaN, NaN)).toBe(true);
expect(equals(4, 2 + 2)).toBe(true);
expect(equals(0, 0)).toBe(true);
expect(equals('abcd', 'ab' + 'cd')).toBe(true);
});
it('should check identity on ordinary object', () => {
expect(equals({}, {})).toBe(false);
expect(equals([], [])).toBe(false);
const arr: never[] = [];
const obj = {};
expect(equals(arr, arr)).toBe(true);
expect(equals(obj, obj)).toBe(true);
});
it('should check equality on immutable objects', () => {
const seq = Seq.Indexed.of(1, 2, 3);
const set = ISet.of(1, 2, 3);
expect(equals(seq, set)).toBe(false);
expect(equals(seq.toSetSeq(), set)).toBe(true);
expect(equals(seq, [1, 2, 3])).toBe(false);
});
it('should check the equality of derivables', () => {
const a = atom('foo');
const b = atom('foo');
const notA = atom('bar');
const aDerivable = a.derive(v => v.toUpperCase());
const bDerivable = b.derive(v => v.toUpperCase());
const withObj1 = atom({ hello: 'world' });
const withObj2 = atom({ hello: 'world' });
expect(equals(a, a)).toBe(true);
expect(equals(b, b)).toBe(true);
expect(equals(a, notA)).toBe(false);
expect(equals(a, b)).toBe(false);
expect(equals(aDerivable, bDerivable)).toBe(false);
expect(equals(withObj1, withObj1)).toBe(true);
expect(equals(withObj1, withObj2)).toBe(false);
});
it('should test for reference equality, not derivable value equality', () => {
const personA = { name$: atom('Sherlock') };
const personB = { name$: atom('Sherlock') };
const person$ = atom(personA);
const nameOfPerson$ = person$.derive(p => p.name$).derive(unwrap).autoCache();
expect(nameOfPerson$.get()).toBe('Sherlock');
person$.set(personB);
expect(nameOfPerson$.get()).toBe('Sherlock');
personB.name$.set('Moriarty');
expect(nameOfPerson$.get()).toBe('Moriarty');
});
});
| politie/sherlock | src/utils/equals.test.ts | TypeScript | apache-2.0 | 2,307 |
// Generated from /POI/java/org/apache/poi/hpsf/VariantBool.java
#pragma once
#include <fwd-POI.hpp>
#include <org/apache/poi/hpsf/fwd-POI.hpp>
#include <org/apache/poi/util/fwd-POI.hpp>
#include <java/lang/Object.hpp>
struct default_init_tag;
class poi::hpsf::VariantBool
: public virtual ::java::lang::Object
{
public:
typedef ::java::lang::Object super;
private:
static ::poi::util::POILogger* LOG_;
public: /* package */
static constexpr int32_t SIZE { int32_t(2) };
private:
bool _value { };
protected:
void ctor();
public: /* package */
virtual void read(::poi::util::LittleEndianByteArrayInputStream* lei);
virtual bool getValue();
virtual void setValue(bool value);
// Generated
VariantBool();
protected:
VariantBool(const ::default_init_tag&);
public:
static ::java::lang::Class *class_();
static void clinit();
private:
static ::poi::util::POILogger*& LOG();
virtual ::java::lang::Class* getClass0();
};
| pebble2015/cpoi | src/org/apache/poi/hpsf/VariantBool.hpp | C++ | apache-2.0 | 992 |
package com.hangon.saying.viewPager;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.ImageLoader;
import com.android.volley.toolbox.ImageRequest;
import com.android.volley.toolbox.NetworkImageView;
import com.example.fd.ourapplication.R;
import com.hangon.common.Constants;
import com.hangon.common.MyApplication;
import com.hangon.common.ViewHolder;
import com.hangon.common.VolleyBitmapCache;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Administrator on 2016/5/31.
*/
public class GradAdapter extends BaseAdapter implements AbsListView.OnScrollListener {
Context context;
List list = new ArrayList();
private static ImageLoader mImageLoader; // imageLoader对象,用来初始化NetworkImageView
/**
* 记录每个子项的高度。
*/
private int mItemHeight = 0;
GradAdapter(Context context, List list) {
this.context = context;
this.list = list;
mImageLoader = new ImageLoader(MyApplication.queues, new VolleyBitmapCache()); // 初始化一个loader对象,可以进行自定义配置
}
@Override
public int getCount() {
return list.size();
}
@Override
public Object getItem(int position) {
return list.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
ViewGradHolder gradHolder;
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
gradHolder = new ViewGradHolder();
convertView = LayoutInflater.from(context).inflate(R.layout.carlife_grade_content, null);
gradHolder.img = (ImageView) convertView.findViewById(R.id.item_grida_image);
convertView.setTag(gradHolder);
} else {
gradHolder = (ViewGradHolder) convertView.getTag();
}
NetworkImageView networkImageView = (NetworkImageView) gradHolder.img;
// 设置默认的图片
networkImageView.setDefaultImageResId(R.drawable.default_photo);
// 设置图片加载失败后显示的图片
networkImageView.setErrorImageResId(R.drawable.error_photo);
if (list.get(position) != null && !list.get(position).equals("")) {
//getImag(list.get(position).toString());
// 开始加载网络图片
networkImageView.setImageUrl(Constants.LOAD_SAYING_IMG_URL + list.get(position), mImageLoader);
}
return convertView;
}
class ViewGradHolder {
ImageView img;
}
private void getImag(String path) {
String url = Constants.LOAD_SAYING_IMG_URL + path;
ImageRequest request = new ImageRequest(url, new Response.Listener<Bitmap>() {
@Override
public void onResponse(Bitmap bitmap) {
gradHolder.img.setImageBitmap(bitmap);
}
}, 0, 0, Bitmap.Config.ARGB_8888, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError volleyError) {
Toast.makeText(context, "说说图片加载失败", Toast.LENGTH_SHORT).show();
}
});
MyApplication.getHttpQueues().add(request);
}
/**
* 设置item子项的高度。
*/
public void setItemHeight(int height) {
if (height == mItemHeight) {
return;
}
mItemHeight = height;
notifyDataSetChanged();
}
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
// 仅当GridView静止时才去下载图片,GridView滑动时取消所有正在下载的任务
if (scrollState == SCROLL_STATE_IDLE) {
// loadBitmaps(mFirstVisibleItem, mVisibleItemCount);
} else {
// cancelAllTasks();
}
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
}
}
| TangZuopeng/OurApplication | app/src/main/java/com/hangon/saying/viewPager/GradAdapter.java | Java | apache-2.0 | 4,468 |
/*
* MaiKe Labs (2016 - 2026)
*
* Written by Jack Tan <jiankemeng@gmail.com>
*
* Connect VCC of the SSD1306 OLED to 3.3V
* Connect GND to Ground
* Connect SCL to i2c clock - GPIO21
* Connect SDA to i2c data - GPIO22
* Connect DC to GND (The scanned i2c address is 0x3C)
*
*/
#include <stdio.h>
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "esp_system.h"
#include "nvs_flash.h"
#include "U8glib.h"
U8GLIB_SSD1306_128X64 u8g(U8G_I2C_OPT_NONE); // I2C / TWI
void draw(void)
{
u8g.setFont(u8g_font_unifont);
u8g.drawStr(0, 22, "Hello World!");
}
void ssd1306_task(void *pvParameter)
{
// assign default color value
if (u8g.getMode() == U8G_MODE_R3G3B2) {
u8g.setColorIndex(255); // white
} else if (u8g.getMode() == U8G_MODE_GRAY2BIT) {
u8g.setColorIndex(3); // max intensity
} else if (u8g.getMode() == U8G_MODE_BW) {
u8g.setColorIndex(1); // pixel on
} else if (u8g.getMode() == U8G_MODE_HICOLOR) {
u8g.setHiColorByRGB(255, 255, 255);
}
while(1) {
// picture loop
u8g.firstPage();
do {
draw();
} while (u8g.nextPage());
draw();
vTaskDelay(1000 / portTICK_RATE_MS);
}
}
extern "C" void app_main()
{
nvs_flash_init();
printf("Welcome to Noduino!\r\n");
printf("Start to test SSD1306 OLED!\r\n");
xTaskCreate(&ssd1306_task, "ssd1306_task", 2048, NULL, 5, NULL);
}
| icamgo/esp-idf | examples/10_ssd1306_hello/main/main.cpp | C++ | apache-2.0 | 1,359 |
# encoding: UTF-8
# Copyright 2012 Twitter, Inc
# http://www.apache.org/licenses/LICENSE-2.0
# Documentation: https://github.com/hunspell/hyphen/blob/21127cc8493a68d4fe9adbb71377b469b4f2b550/doc/tb87nemeth.pdf
module TwitterCldr
module Shared
class Hyphenator
class UnsupportedLocaleError < StandardError; end
BASE_RESOURCE_PATH = %w(shared hyphenation).freeze
DEFAULT_LEFT_HYPHEN_MIN = 2
DEFAULT_RIGHT_HYPHEN_MIN = 2
DEFAULT_NO_HYPHEN = "-'’".freeze
class << self
def get(locale)
locale = find_supported_locale(locale)
unless locale
raise UnsupportedLocaleError,
"'#{locale}' is not a supported hyphenation locale"
end
cache[locale] ||= begin
resource = resource_for(locale)
new(resource[:rules], locale, resource[:options])
end
end
def supported_locale?(locale)
!!find_supported_locale(locale)
end
def supported_locales
@supported_locales ||= begin
absolute_resource_path = TwitterCldr.absolute_resource_path(
File.join(BASE_RESOURCE_PATH)
)
files = Dir.glob(File.join(absolute_resource_path, '*.yml'))
files.map { |f| File.basename(f).chomp('.yml') }
end
end
private
def find_supported_locale(locale)
maximized_locale = Locale.parse(locale.to_s).maximize
maximized_locale.permutations('-').find do |locale_candidate|
TwitterCldr.resource_exists?(
*BASE_RESOURCE_PATH, locale_candidate
)
end
end
def cache
@cache ||= {}
end
def resource_for(locale)
TwitterCldr.get_resource(*BASE_RESOURCE_PATH, locale)
end
end
attr_reader :rules, :locale, :options, :trie
def initialize(rules, locale, options)
@rules = rules
@locale = locale
@options = options
@trie = build_trie_from(rules)
end
# 0x00AD is a soft hyphen
def hyphenate(text, hyphen = "\u00AD")
each_chunk(text).to_a.join(hyphen)
end
def each_chunk(text)
if block_given?
last_pos = 0
each_position(text) do |pos|
yield text[last_pos...pos].tap { last_pos = pos }
end
if last_pos < text.size
yield text[last_pos..text.size]
end
else
to_enum(__method__, text)
end
end
def each_position(text)
if block_given?
text = ".#{text}."
break_weights = break_weights_for(text)
left = left_hyphen_min
right = text.size - right_hyphen_min - 2
(left...right).each do |idx|
yield idx if break_weights[idx].odd?
end
else
to_enum(__method__, text)
end
end
private
def break_weights_for(text)
break_weights = Array.new(text.size - 1, 0)
text.each_char.with_index do |char, idx|
subtrie = trie.root
counter = idx
while subtrie
subtrie = subtrie.child(text[counter])
counter += 1
if subtrie && subtrie.has_value?
update_break_weights(subtrie.value, break_weights, idx)
end
end
end
remove_illegal_hyphens(break_weights, text)
end
def update_break_weights(pattern, break_weights, start_idx)
pattern_idx = 0
pattern.each_char do |segment|
if segment =~ /\d/
int_seg = segment.to_i
idx = (start_idx + pattern_idx) - 1
break if idx >= break_weights.size
break_weights[idx] = if break_weights[idx] > int_seg
break_weights[idx]
else
int_seg
end
else
pattern_idx += 1
end
end
end
def remove_illegal_hyphens(break_weights, text)
break_weights.map.with_index do |break_weight, idx|
next break_weight if idx.zero?
next 0 if no_hyphen.include?(text[idx - 1])
break_weight
end
end
def left_hyphen_min
@left_hyphen_min ||=
options.fetch(:lefthyphenmin, DEFAULT_LEFT_HYPHEN_MIN).to_i
end
def right_hyphen_min
@right_hyphen_min ||=
options.fetch(:righthyphenmin, DEFAULT_RIGHT_HYPHEN_MIN).to_i
end
def no_hyphen
@no_hyphen ||= options.fetch(:nohyphen, DEFAULT_NO_HYPHEN)
end
def build_trie_from(rules)
TwitterCldr::Utils::Trie.new.tap do |trie|
rules.each do |rule|
trie.add(rule.gsub(/\d/, '').each_char, rule)
end
end
end
end
end
end
| twitter/twitter-cldr-rb | lib/twitter_cldr/shared/hyphenator.rb | Ruby | apache-2.0 | 4,872 |
/**
* Copyright 2013 Agustín Miura <"agustin.miura@gmail.com">
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ar.com.imperium.common.security;
import org.springframework.stereotype.Component;
@Component("dummyHashService")
public class DummyHashServiceImpl implements IHashService
{
@Override
public String hashString(String input) throws Exception
{
return input;
}
}
| agustinmiura/imperium | src/main/java/ar/com/imperium/common/security/DummyHashServiceImpl.java | Java | apache-2.0 | 925 |
<?php
namespace DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter;
/*!
* This class was generated with the PHPFHIR library (https://github.com/dcarbone/php-fhir) using
* class definitions from HL7 FHIR (https://www.hl7.org/fhir/)
*
* Class creation date: December 26th, 2019 15:44+0000
*
* PHPFHIR Copyright:
*
* Copyright 2016-2019 Daniel Carbone (daniel.p.carbone@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* FHIR Copyright Notice:
*
* Copyright (c) 2011+, HL7, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of HL7 nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
* Generated on Fri, Nov 1, 2019 09:29+1100 for FHIR v4.0.1
*
* Note: the schemas & schematrons do not contain all of the rules about what makes resources
* valid. Implementers will still need to be familiar with the content of the specification and with
* any profiles that apply to the resources in order to make a conformant implementation.
*
*/
use DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement;
use DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRCodeableConcept;
use DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRPositiveInt;
use DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRReference;
use DCarbone\PHPFHIRGenerated\R4\PHPFHIRConstants;
use DCarbone\PHPFHIRGenerated\R4\PHPFHIRTypeInterface;
/**
* An interaction between a patient and healthcare provider(s) for the purpose of
* providing healthcare service(s) or assessing the health status of a patient.
*
* Class FHIREncounterDiagnosis
* @package \DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter
*/
class FHIREncounterDiagnosis extends FHIRBackboneElement
{
// name of FHIR type this class describes
const FHIR_TYPE_NAME = PHPFHIRConstants::TYPE_NAME_ENCOUNTER_DOT_DIAGNOSIS;
const FIELD_CONDITION = 'condition';
const FIELD_RANK = 'rank';
const FIELD_RANK_EXT = '_rank';
const FIELD_USE = 'use';
/** @var string */
private $_xmlns = 'http://hl7.org/fhir';
/**
* A reference from one resource to another.
* If the element is present, it must have a value for at least one of the defined
* elements, an \@id referenced from the Narrative, or extensions
*
* Reason the encounter takes place, as specified using information from another
* resource. For admissions, this is the admission diagnosis. The indication will
* typically be a Condition (with other resources referenced in the
* evidence.detail), or a Procedure.
*
* @var null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRReference
*/
protected $condition = null;
/**
* An integer with a value that is positive (e.g. >0)
* If the element is present, it must have either a \@value, an \@id referenced from
* the Narrative, or extensions
*
* Ranking of the diagnosis (for each role type).
*
* @var null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRPositiveInt
*/
protected $rank = null;
/**
* A concept that may be defined by a formal reference to a terminology or ontology
* or may be provided by text.
* If the element is present, it must have a value for at least one of the defined
* elements, an \@id referenced from the Narrative, or extensions
*
* Role that this diagnosis has within the encounter (e.g. admission, billing,
* discharge …).
*
* @var null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRCodeableConcept
*/
protected $use = null;
/**
* Validation map for fields in type Encounter.Diagnosis
* @var array
*/
private static $_validationRules = [ ];
/**
* FHIREncounterDiagnosis Constructor
* @param null|array $data
*/
public function __construct($data = null)
{
if (null === $data || [] === $data) {
return;
}
if (!is_array($data)) {
throw new \InvalidArgumentException(sprintf(
'FHIREncounterDiagnosis::_construct - $data expected to be null or array, %s seen',
gettype($data)
));
}
parent::__construct($data);
if (isset($data[self::FIELD_CONDITION])) {
if ($data[self::FIELD_CONDITION] instanceof FHIRReference) {
$this->setCondition($data[self::FIELD_CONDITION]);
} else {
$this->setCondition(new FHIRReference($data[self::FIELD_CONDITION]));
}
}
if (isset($data[self::FIELD_RANK]) || isset($data[self::FIELD_RANK_EXT])) {
if (isset($data[self::FIELD_RANK])) {
$value = $data[self::FIELD_RANK];
} else {
$value = null;
}
if (isset($data[self::FIELD_RANK_EXT]) && is_array($data[self::FIELD_RANK_EXT])) {
$ext = $data[self::FIELD_RANK_EXT];
} else {
$ext = [];
}
if (null !== $value) {
if ($value instanceof FHIRPositiveInt) {
$this->setRank($value);
} else if (is_array($value)) {
$this->setRank(new FHIRPositiveInt(array_merge($ext, $value)));
} else {
$this->setRank(new FHIRPositiveInt([FHIRPositiveInt::FIELD_VALUE => $value] + $ext));
}
} else if ([] !== $ext) {
$this->setRank(new FHIRPositiveInt($ext));
}
}
if (isset($data[self::FIELD_USE])) {
if ($data[self::FIELD_USE] instanceof FHIRCodeableConcept) {
$this->setUse($data[self::FIELD_USE]);
} else {
$this->setUse(new FHIRCodeableConcept($data[self::FIELD_USE]));
}
}
}
/**
* @return string
*/
public function _getFHIRTypeName()
{
return self::FHIR_TYPE_NAME;
}
/**
* @return string
*/
public function _getFHIRXMLElementDefinition()
{
$xmlns = $this->_getFHIRXMLNamespace();
if (null !== $xmlns) {
$xmlns = " xmlns=\"{$xmlns}\"";
}
return "<EncounterDiagnosis{$xmlns}></EncounterDiagnosis>";
}
/**
* A reference from one resource to another.
* If the element is present, it must have a value for at least one of the defined
* elements, an \@id referenced from the Narrative, or extensions
*
* Reason the encounter takes place, as specified using information from another
* resource. For admissions, this is the admission diagnosis. The indication will
* typically be a Condition (with other resources referenced in the
* evidence.detail), or a Procedure.
*
* @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRReference
*/
public function getCondition()
{
return $this->condition;
}
/**
* A reference from one resource to another.
* If the element is present, it must have a value for at least one of the defined
* elements, an \@id referenced from the Narrative, or extensions
*
* Reason the encounter takes place, as specified using information from another
* resource. For admissions, this is the admission diagnosis. The indication will
* typically be a Condition (with other resources referenced in the
* evidence.detail), or a Procedure.
*
* @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRReference $condition
* @return static
*/
public function setCondition(FHIRReference $condition = null)
{
$this->condition = $condition;
return $this;
}
/**
* An integer with a value that is positive (e.g. >0)
* If the element is present, it must have either a \@value, an \@id referenced from
* the Narrative, or extensions
*
* Ranking of the diagnosis (for each role type).
*
* @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRPositiveInt
*/
public function getRank()
{
return $this->rank;
}
/**
* An integer with a value that is positive (e.g. >0)
* If the element is present, it must have either a \@value, an \@id referenced from
* the Narrative, or extensions
*
* Ranking of the diagnosis (for each role type).
*
* @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRPositiveInt $rank
* @return static
*/
public function setRank($rank = null)
{
if (null === $rank) {
$this->rank = null;
return $this;
}
if ($rank instanceof FHIRPositiveInt) {
$this->rank = $rank;
return $this;
}
$this->rank = new FHIRPositiveInt($rank);
return $this;
}
/**
* A concept that may be defined by a formal reference to a terminology or ontology
* or may be provided by text.
* If the element is present, it must have a value for at least one of the defined
* elements, an \@id referenced from the Narrative, or extensions
*
* Role that this diagnosis has within the encounter (e.g. admission, billing,
* discharge …).
*
* @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRCodeableConcept
*/
public function getUse()
{
return $this->use;
}
/**
* A concept that may be defined by a formal reference to a terminology or ontology
* or may be provided by text.
* If the element is present, it must have a value for at least one of the defined
* elements, an \@id referenced from the Narrative, or extensions
*
* Role that this diagnosis has within the encounter (e.g. admission, billing,
* discharge …).
*
* @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRCodeableConcept $use
* @return static
*/
public function setUse(FHIRCodeableConcept $use = null)
{
$this->use = $use;
return $this;
}
/**
* Returns the validation rules that this type's fields must comply with to be considered "valid"
* The returned array is in ["fieldname[.offset]" => ["rule" => {constraint}]]
*
* @return array
*/
public function _getValidationRules()
{
return self::$_validationRules;
}
/**
* Validates that this type conforms to the specifications set forth for it by FHIR. An empty array must be seen as
* passing.
*
* @return array
*/
public function _getValidationErrors()
{
$errs = parent::_getValidationErrors();
$validationRules = $this->_getValidationRules();
if (null !== ($v = $this->getCondition())) {
if ([] !== ($fieldErrs = $v->_getValidationErrors())) {
$errs[self::FIELD_CONDITION] = $fieldErrs;
}
}
if (null !== ($v = $this->getRank())) {
if ([] !== ($fieldErrs = $v->_getValidationErrors())) {
$errs[self::FIELD_RANK] = $fieldErrs;
}
}
if (null !== ($v = $this->getUse())) {
if ([] !== ($fieldErrs = $v->_getValidationErrors())) {
$errs[self::FIELD_USE] = $fieldErrs;
}
}
if (isset($validationRules[self::FIELD_CONDITION])) {
$v = $this->getCondition();
foreach($validationRules[self::FIELD_CONDITION] as $rule => $constraint) {
$err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ENCOUNTER_DOT_DIAGNOSIS, self::FIELD_CONDITION, $rule, $constraint, $v);
if (null !== $err) {
if (!isset($errs[self::FIELD_CONDITION])) {
$errs[self::FIELD_CONDITION] = [];
}
$errs[self::FIELD_CONDITION][$rule] = $err;
}
}
}
if (isset($validationRules[self::FIELD_RANK])) {
$v = $this->getRank();
foreach($validationRules[self::FIELD_RANK] as $rule => $constraint) {
$err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ENCOUNTER_DOT_DIAGNOSIS, self::FIELD_RANK, $rule, $constraint, $v);
if (null !== $err) {
if (!isset($errs[self::FIELD_RANK])) {
$errs[self::FIELD_RANK] = [];
}
$errs[self::FIELD_RANK][$rule] = $err;
}
}
}
if (isset($validationRules[self::FIELD_USE])) {
$v = $this->getUse();
foreach($validationRules[self::FIELD_USE] as $rule => $constraint) {
$err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ENCOUNTER_DOT_DIAGNOSIS, self::FIELD_USE, $rule, $constraint, $v);
if (null !== $err) {
if (!isset($errs[self::FIELD_USE])) {
$errs[self::FIELD_USE] = [];
}
$errs[self::FIELD_USE][$rule] = $err;
}
}
}
if (isset($validationRules[self::FIELD_MODIFIER_EXTENSION])) {
$v = $this->getModifierExtension();
foreach($validationRules[self::FIELD_MODIFIER_EXTENSION] as $rule => $constraint) {
$err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_BACKBONE_ELEMENT, self::FIELD_MODIFIER_EXTENSION, $rule, $constraint, $v);
if (null !== $err) {
if (!isset($errs[self::FIELD_MODIFIER_EXTENSION])) {
$errs[self::FIELD_MODIFIER_EXTENSION] = [];
}
$errs[self::FIELD_MODIFIER_EXTENSION][$rule] = $err;
}
}
}
if (isset($validationRules[self::FIELD_EXTENSION])) {
$v = $this->getExtension();
foreach($validationRules[self::FIELD_EXTENSION] as $rule => $constraint) {
$err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ELEMENT, self::FIELD_EXTENSION, $rule, $constraint, $v);
if (null !== $err) {
if (!isset($errs[self::FIELD_EXTENSION])) {
$errs[self::FIELD_EXTENSION] = [];
}
$errs[self::FIELD_EXTENSION][$rule] = $err;
}
}
}
if (isset($validationRules[self::FIELD_ID])) {
$v = $this->getId();
foreach($validationRules[self::FIELD_ID] as $rule => $constraint) {
$err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_ELEMENT, self::FIELD_ID, $rule, $constraint, $v);
if (null !== $err) {
if (!isset($errs[self::FIELD_ID])) {
$errs[self::FIELD_ID] = [];
}
$errs[self::FIELD_ID][$rule] = $err;
}
}
}
return $errs;
}
/**
* @param \SimpleXMLElement|string|null $sxe
* @param null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter\FHIREncounterDiagnosis $type
* @param null|int $libxmlOpts
* @return null|\DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter\FHIREncounterDiagnosis
*/
public static function xmlUnserialize($sxe = null, PHPFHIRTypeInterface $type = null, $libxmlOpts = 591872)
{
if (null === $sxe) {
return null;
}
if (is_string($sxe)) {
libxml_use_internal_errors(true);
$sxe = new \SimpleXMLElement($sxe, $libxmlOpts, false);
if ($sxe === false) {
throw new \DomainException(sprintf('FHIREncounterDiagnosis::xmlUnserialize - String provided is not parseable as XML: %s', implode(', ', array_map(function(\libXMLError $err) { return $err->message; }, libxml_get_errors()))));
}
libxml_use_internal_errors(false);
}
if (!($sxe instanceof \SimpleXMLElement)) {
throw new \InvalidArgumentException(sprintf('FHIREncounterDiagnosis::xmlUnserialize - $sxe value must be null, \\SimpleXMLElement, or valid XML string, %s seen', gettype($sxe)));
}
if (null === $type) {
$type = new FHIREncounterDiagnosis;
} elseif (!is_object($type) || !($type instanceof FHIREncounterDiagnosis)) {
throw new \RuntimeException(sprintf(
'FHIREncounterDiagnosis::xmlUnserialize - $type must be instance of \DCarbone\PHPFHIRGenerated\R4\FHIRElement\FHIRBackboneElement\FHIREncounter\FHIREncounterDiagnosis or null, %s seen.',
is_object($type) ? get_class($type) : gettype($type)
));
}
FHIRBackboneElement::xmlUnserialize($sxe, $type);
$xmlNamespaces = $sxe->getDocNamespaces(false, false);
if ([] !== $xmlNamespaces) {
$ns = reset($xmlNamespaces);
if (false !== $ns && '' !== $ns) {
$type->_xmlns = $ns;
}
}
$attributes = $sxe->attributes();
$children = $sxe->children();
if (isset($children->condition)) {
$type->setCondition(FHIRReference::xmlUnserialize($children->condition));
}
if (isset($children->rank)) {
$type->setRank(FHIRPositiveInt::xmlUnserialize($children->rank));
}
if (isset($attributes->rank)) {
$pt = $type->getRank();
if (null !== $pt) {
$pt->setValue((string)$attributes->rank);
} else {
$type->setRank((string)$attributes->rank);
}
}
if (isset($children->use)) {
$type->setUse(FHIRCodeableConcept::xmlUnserialize($children->use));
}
return $type;
}
/**
* @param null|\SimpleXMLElement $sxe
* @param null|int $libxmlOpts
* @return \SimpleXMLElement
*/
public function xmlSerialize(\SimpleXMLElement $sxe = null, $libxmlOpts = 591872)
{
if (null === $sxe) {
$sxe = new \SimpleXMLElement($this->_getFHIRXMLElementDefinition(), $libxmlOpts, false);
}
parent::xmlSerialize($sxe);
if (null !== ($v = $this->getCondition())) {
$v->xmlSerialize($sxe->addChild(self::FIELD_CONDITION, null, $v->_getFHIRXMLNamespace()));
}
if (null !== ($v = $this->getRank())) {
$v->xmlSerialize($sxe->addChild(self::FIELD_RANK, null, $v->_getFHIRXMLNamespace()));
}
if (null !== ($v = $this->getUse())) {
$v->xmlSerialize($sxe->addChild(self::FIELD_USE, null, $v->_getFHIRXMLNamespace()));
}
return $sxe;
}
/**
* @return array
*/
public function jsonSerialize()
{
$a = parent::jsonSerialize();
if (null !== ($v = $this->getCondition())) {
$a[self::FIELD_CONDITION] = $v;
}
if (null !== ($v = $this->getRank())) {
$a[self::FIELD_RANK] = $v->getValue();
$enc = $v->jsonSerialize();
$cnt = count($enc);
if (0 < $cnt && (1 !== $cnt || (1 === $cnt && !array_key_exists(FHIRPositiveInt::FIELD_VALUE, $enc)))) {
unset($enc[FHIRPositiveInt::FIELD_VALUE]);
$a[self::FIELD_RANK_EXT] = $enc;
}
}
if (null !== ($v = $this->getUse())) {
$a[self::FIELD_USE] = $v;
}
if ([] !== ($vs = $this->_getFHIRComments())) {
$a[PHPFHIRConstants::JSON_FIELD_FHIR_COMMENTS] = $vs;
}
return $a;
}
/**
* @return string
*/
public function __toString()
{
return self::FHIR_TYPE_NAME;
}
} | dcarbone/php-fhir-generated | src/DCarbone/PHPFHIRGenerated/R4/FHIRElement/FHIRBackboneElement/FHIREncounter/FHIREncounterDiagnosis.php | PHP | apache-2.0 | 21,748 |
/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// If you make changes to this file, you should also make the corresponding change in ReplicaSet.
package replication
import (
"github.com/golang/glog"
"k8s.io/kubernetes/pkg/api"
client "k8s.io/kubernetes/pkg/client/unversioned"
)
// updateReplicaCount attempts to update the Status.Replicas of the given controller, with a single GET/PUT retry.
func updateReplicaCount(rcClient client.ReplicationControllerInterface, controller api.ReplicationController, numReplicas int) (updateErr error) {
// This is the steady state. It happens when the rc doesn't have any expectations, since
// we do a periodic relist every 30s. If the generations differ but the replicas are
// the same, a caller might've resized to the same replica count.
if controller.Status.Replicas == numReplicas &&
controller.Generation == controller.Status.ObservedGeneration {
return nil
}
// Save the generation number we acted on, otherwise we might wrongfully indicate
// that we've seen a spec update when we retry.
// TODO: This can clobber an update if we allow multiple agents to write to the
// same status.
generation := controller.Generation
var getErr error
for i, rc := 0, &controller; ; i++ {
glog.V(4).Infof("Updating replica count for rc: %v, %d->%d (need %d), sequence No: %v->%v",
controller.Name, controller.Status.Replicas, numReplicas, controller.Spec.Replicas, controller.Status.ObservedGeneration, generation)
rc.Status = api.ReplicationControllerStatus{Replicas: numReplicas, ObservedGeneration: generation}
_, updateErr = rcClient.UpdateStatus(rc)
if updateErr == nil || i >= statusUpdateRetries {
return updateErr
}
// Update the controller with the latest resource version for the next poll
if rc, getErr = rcClient.Get(controller.Name); getErr != nil {
// If the GET fails we can't trust status.Replicas anymore. This error
// is bound to be more interesting than the update failure.
return getErr
}
}
}
// OverlappingControllers sorts a list of controllers by creation timestamp, using their names as a tie breaker.
type OverlappingControllers []api.ReplicationController
func (o OverlappingControllers) Len() int { return len(o) }
func (o OverlappingControllers) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
func (o OverlappingControllers) Less(i, j int) bool {
if o[i].CreationTimestamp.Equal(o[j].CreationTimestamp) {
return o[i].Name < o[j].Name
}
return o[i].CreationTimestamp.Before(o[j].CreationTimestamp)
}
| WIZARD-CXY/kubernetes | pkg/controller/replication/replication_controller_utils.go | GO | apache-2.0 | 3,070 |
package com.sadc.game.gameobject.trackobject;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.sadc.game.GameConstants;
import com.sadc.game.gameobject.GameUtils;
import com.sadc.game.gameobject.Player;
/**
* @author f536985 (Tom Farello)
*/
public class Wall extends TrackObject {
public Wall(float distance, float angle) {
setActive(true);
setDistance(distance);
setAngle(angle);
setWidth(22);
setTexture(new Texture("brickWall.png"));
}
@Override
public void update(float delta, Player player) {
if (collide(player)) {
player.crash();
setActive(false);
}
}
@Override
public void draw(float delta, float playerDistance, SpriteBatch spriteBatch) {
float drawDistance = (float)Math.pow(2 , playerDistance - (getDistance()));
GameUtils.setColorByDrawDistance(drawDistance, spriteBatch);
spriteBatch.draw(getTexture(), GameConstants.SCREEN_WIDTH / 2 - 50, 15,
50, GameConstants.SCREEN_HEIGHT / 2 - 15, 100, 70, drawDistance, drawDistance, getAngle(), 0, 0, 100, 70, false, false);
}
}
| jlturner85/libgdx-gradle-template | core/src/main/java/com/sadc/game/gameobject/trackobject/Wall.java | Java | apache-2.0 | 1,196 |
class JobApplicationsController < ApplicationController
after_action :verify_authorized
after_action :verify_policy_scoped, only: [:index]
before_action :require_login
before_action :set_job_application, only: [:show, :edit, :update, :destroy, :followup]
# GET /posting/1/job_application
# GET /posting/1/job_application.json
# def index
# @posting = Posting.unscoped.find(params[:posting_id])
# authorize @posting, :show?
# @job_applications = JobApplication.all
# end
# GET /posting/1/job_application
# GET /posting/1/job_application.json
def show
end
# GET /posting/1/job_application/new
def new
@job_application = JobApplication.new
authorize @job_application
end
# GET /posting/1/job_application/edit
def edit
end
# POST /posting/1/job_application
# POST /posting/1/job_application.json
def create
@job_application = JobApplication.new(job_application_params)
@job_application.posting = Posting.unscoped.find(params[:posting_id])
authorize @job_application
authorize @job_application.posting, :update?
respond_to do |format|
if @job_application.save
# TODO: Is this line still needed?
@job_application_is_new = true
format.html {
redirect_to @job_application.posting,
notice: 'Job application was successfully created.'
}
format.json { render action: 'show', status: :created }
else
format.html { render action: 'new' }
format.json {
render json: @job_application.errors,
status: :unprocessable_entity
}
end
end
end
# PATCH/PUT /posting/1/job_application/followup.json
def followup
respond_to do |format|
if @job_application.update(followup: Time.now)
format.json { render action: 'show' }
else
format.json {
render json: @job_application.errors,
status: :unprocessable_entity
}
end
end
end
# PATCH/PUT /posting/1/job_application
# PATCH/PUT /posting/1/job_application.json
def update
respond_to do |format|
if @job_application.update(job_application_params)
format.html { redirect_to @job_application.posting, notice: 'Changes saved!' }
format.json { render action: 'show', notice: 'Changes saved!' }
else
format.html { render action: 'edit' }
format.json {
render json: @job_application.errors,
status: :unprocessable_entity
}
end
end
end
# DELETE /posting/1/job_application
# DELETE /posting/1/job_application.json
def destroy
@job_application.destroy
respond_to do |format|
format.html { redirect_to @job_application.posting }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_job_application
@job_application = Posting.unscoped.find(params[:posting_id]).job_application
authorize @job_application
end
# Never trust parameters from the scary internet, only allow the white list through.
def job_application_params
params.require(:job_application).permit(:date_sent, :cover_letter, :posting_id)
end
end
| sarahmonster/suitor | app/controllers/job_applications_controller.rb | Ruby | apache-2.0 | 3,255 |
/*
*
* * Copyright 2010-2016 OrientDB LTD (http://orientdb.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://orientdb.com
*
*/
package com.orientechnologies.orient.core.sql.functions.coll;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* This operator can work as aggregate or inline. If only one argument is passed than aggregates,
* otherwise executes, and returns, the SYMMETRIC DIFFERENCE between the collections received as
* parameters. Works also with no collection values.
*
* @author Luca Garulli (l.garulli--(at)--orientdb.com)
*/
public class OSQLFunctionSymmetricDifference extends OSQLFunctionMultiValueAbstract<Set<Object>> {
public static final String NAME = "symmetricDifference";
private Set<Object> rejected;
public OSQLFunctionSymmetricDifference() {
super(NAME, 1, -1);
}
private static void addItemToResult(Object o, Set<Object> accepted, Set<Object> rejected) {
if (!accepted.contains(o) && !rejected.contains(o)) {
accepted.add(o);
} else {
accepted.remove(o);
rejected.add(o);
}
}
private static void addItemsToResult(
Collection<Object> co, Set<Object> accepted, Set<Object> rejected) {
for (Object o : co) {
addItemToResult(o, accepted, rejected);
}
}
@SuppressWarnings("unchecked")
public Object execute(
Object iThis,
OIdentifiable iCurrentRecord,
Object iCurrentResult,
final Object[] iParams,
OCommandContext iContext) {
if (iParams[0] == null) return null;
Object value = iParams[0];
if (iParams.length == 1) {
// AGGREGATION MODE (STATEFUL)
if (context == null) {
context = new HashSet<Object>();
rejected = new HashSet<Object>();
}
if (value instanceof Collection<?>) {
addItemsToResult((Collection<Object>) value, context, rejected);
} else {
addItemToResult(value, context, rejected);
}
return null;
} else {
// IN-LINE MODE (STATELESS)
final Set<Object> result = new HashSet<Object>();
final Set<Object> rejected = new HashSet<Object>();
for (Object iParameter : iParams) {
if (iParameter instanceof Collection<?>) {
addItemsToResult((Collection<Object>) iParameter, result, rejected);
} else {
addItemToResult(iParameter, result, rejected);
}
}
return result;
}
}
@Override
public Set<Object> getResult() {
if (returnDistributedResult()) {
final Map<String, Object> doc = new HashMap<String, Object>();
doc.put("result", context);
doc.put("rejected", rejected);
return Collections.<Object>singleton(doc);
} else {
return super.getResult();
}
}
public String getSyntax() {
return "difference(<field>*)";
}
@Override
public Object mergeDistributedResult(List<Object> resultsToMerge) {
if (returnDistributedResult()) {
final Set<Object> result = new HashSet<Object>();
final Set<Object> rejected = new HashSet<Object>();
for (Object item : resultsToMerge) {
rejected.addAll(unwrap(item, "rejected"));
}
for (Object item : resultsToMerge) {
addItemsToResult(unwrap(item, "result"), result, rejected);
}
return result;
}
if (!resultsToMerge.isEmpty()) return resultsToMerge.get(0);
return null;
}
@SuppressWarnings("unchecked")
private Set<Object> unwrap(Object obj, String field) {
final Set<Object> objAsSet = (Set<Object>) obj;
final Map<String, Object> objAsMap = (Map<String, Object>) objAsSet.iterator().next();
final Set<Object> objAsField = (Set<Object>) objAsMap.get(field);
return objAsField;
}
}
| orientechnologies/orientdb | core/src/main/java/com/orientechnologies/orient/core/sql/functions/coll/OSQLFunctionSymmetricDifference.java | Java | apache-2.0 | 4,574 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpleemail.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* An empty element returned on a successful request.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/email-2010-12-01/SetReceiptRulePosition" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SetReceiptRulePositionResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SetReceiptRulePositionResult == false)
return false;
SetReceiptRulePositionResult other = (SetReceiptRulePositionResult) obj;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
return hashCode;
}
@Override
public SetReceiptRulePositionResult clone() {
try {
return (SetReceiptRulePositionResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| dagnir/aws-sdk-java | aws-java-sdk-ses/src/main/java/com/amazonaws/services/simpleemail/model/SetReceiptRulePositionResult.java | Java | apache-2.0 | 2,365 |
// <copyright file="KeyEvent.cs" company="WebDriver Committers">
// Copyright 2015 Software Freedom Conservancy
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
using System;
using System.Collections.Generic;
using System.Text;
using OpenQA.Selenium;
namespace Selenium.Internal.SeleniumEmulation
{
/// <summary>
/// Defines the command for the keyEvent keyword.
/// </summary>
internal class KeyEvent : SeleneseCommand
{
private ElementFinder finder;
private KeyState keyState;
private string eventName;
/// <summary>
/// Initializes a new instance of the <see cref="KeyEvent"/> class.
/// </summary>
/// <param name="elementFinder">An <see cref="ElementFinder"/> used to find the element on which to execute the command.</param>
/// <param name="state">A <see cref="KeyState"/> object defining the state of modifier keys.</param>
/// <param name="eventName">The name of the event to send.</param>
public KeyEvent(ElementFinder elementFinder, KeyState state, string eventName)
{
this.finder = elementFinder;
this.keyState = state;
this.eventName = eventName;
}
/// <summary>
/// Handles the command.
/// </summary>
/// <param name="driver">The driver used to execute the command.</param>
/// <param name="locator">The first parameter to the command.</param>
/// <param name="value">The second parameter to the command.</param>
/// <returns>The result of the command.</returns>
protected override object HandleSeleneseCommand(IWebDriver driver, string locator, string value)
{
object[] parameters = new object[]
{
value,
this.keyState.ControlKeyDown,
this.keyState.AltKeyDown,
this.keyState.ShiftKeyDown,
this.keyState.MetaKeyDown
};
JavaScriptLibrary.CallEmbeddedSelenium(driver, this.eventName, this.finder.FindElement(driver, locator), parameters);
return null;
}
}
}
| soundcloud/selenium | dotnet/src/webdriverbackedselenium/Internal/SeleniumEmulation/KeyEvent.cs | C# | apache-2.0 | 2,688 |
/*
* Copyright 2012-2016 JetBrains s.r.o
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.jetpad.base.edt;
public class BufferingEdtManager extends RunningEdtManager {
public BufferingEdtManager() {
super();
}
public BufferingEdtManager(String name) {
super(name);
}
@Override
protected void doSchedule(Runnable r) {
addTaskToQueue(r);
}
@Override
public String toString() {
return "BufferingEdtManager@" + Integer.toHexString(hashCode()) +
("".equals(getName()) ? "" : " (" + getName()+ ")");
}
} | timzam/jetpad-mapper | util/base/src/test/java/jetbrains/jetpad/base/edt/BufferingEdtManager.java | Java | apache-2.0 | 1,075 |
<?php
/*
* Apolbox - Framework Productiont
*
* Apolbox adalah kode sumber yang terbuka sebagai aplikasi kerangka kerja untuk membangun website dengan metode penyihir,
* yang dapat membuat pembangunan website lebih cepat dan lebih praktis.
*
* (c) Ayus irfang filaras
*
*/
require_once __DIR__.'/../bootstrap/pustaka.php';
/**
* Apolbox - Framework Productiont
*
* Apolbox adalah kode sumber yang terbuka sebagai aplikasi kerangka kerja untuk membangun website dengan metode penyihir,
* yang dapat membuat pembangunan website lebih cepat dan lebih praktis.
*
* @package apolbox
* @subpackage administrator
*
* @copyright (c) [29 Juni 2016]
* @since version 1.0.0
*
* @author Ayus Irfang Filaras <ayus.sahabat@gmail.com>
* @link https://github.com/apolbox/apolbox.git
* @lisence https://github.com/apolbox/apolbox/blob/master/LICENSE
*/
$pustaka = new Pustaka\Applications();
return $pustaka->run();
| apolbox-project/apolbox | public_html/index.php | PHP | apache-2.0 | 921 |
/*
* Copyright 2016
* Ubiquitous Knowledge Processing (UKP) Lab
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.ukp.experiments.argumentation.sequence.io.filters;
import de.tudarmstadt.ukp.experiments.argumentation.sequence.DocumentRegister;
import org.apache.uima.jcas.JCas;
import java.util.HashSet;
import java.util.Set;
/**
* Filter wrt document register
*
* @author Ivan Habernal
*/
public class DocumentRegisterFilter
implements DocumentCollectionFilter
{
private final Set<DocumentRegister> documentRegisters = new HashSet<>();
public DocumentRegisterFilter(String documentRegistersString)
{
// parse document registers
if (!documentRegistersString.isEmpty()) {
for (String documentDomainSplit : documentRegistersString.split(" ")) {
String domain = documentDomainSplit.trim();
if (!domain.isEmpty()) {
documentRegisters.add(DocumentRegister.fromString(domain));
}
}
}
}
@Override
public boolean removeFromCollection(JCas jCas)
{
DocumentRegister register = DocumentRegister.fromJCas(jCas);
return !documentRegisters.isEmpty() && !documentRegisters.contains(register);
}
@Override
public boolean applyFilter()
{
return !documentRegisters.isEmpty();
}
}
| habernal/emnlp2015 | code/experiments/src/main/java/de/tudarmstadt/ukp/experiments/argumentation/sequence/io/filters/DocumentRegisterFilter.java | Java | apache-2.0 | 1,948 |
package es.npatarino.android.gotchallenge.chat.message.viewmodel;
import android.net.Uri;
import es.npatarino.android.gotchallenge.chat.message.model.Payload;
public class StickerPayLoad implements Payload {
private String stickerFilePath;
public StickerPayLoad(String stickerFilePath) {
this.stickerFilePath = stickerFilePath;
}
public String getStickerFilePath() {
return stickerFilePath;
}
public Uri getSticker() {
return Uri.parse(stickerFilePath);
}
}
| tonilopezmr/Game-of-Thrones | app/src/main/java/es/npatarino/android/gotchallenge/chat/message/viewmodel/StickerPayLoad.java | Java | apache-2.0 | 490 |
exports.view = function() {
this.render();
};
exports.async = function() {
this.render();
};
| alibaba/plover | packages/plover/test/fixtures/core/app/modules/helper/index.js | JavaScript | apache-2.0 | 99 |
module.exports = {
readFiles: readFiles
};
var filewalker = require('filewalker');
var _ = require('kling/kling.js');
var fs = require('fs');
function readFiles(directory, fileSuffix) {
return new Promise(function(resolve, reject) {
var files = [];
filewalker(directory)
.on('file', function(file) {
if (file.endsWith(fileSuffix)) {
files.push(file);
}
})
.on('done', function() {
var addLazyReaderCurried = _.curry(addLazyReader);
resolve(_.fmap(addLazyReaderCurried(directory), files));
})
.walk();
});
}
function addLazyReader(directory, file) {
return {
name: directory+file,
getContents: function() {
return fs.readFileSync(directory+file, 'utf8');
}
};
}
| petercrona/latmask | src/reader/reader.js | JavaScript | apache-2.0 | 880 |
/* */ package com.hundsun.network.gates.wulin.biz.service.pojo.auction;
/* */
/* */ import com.hundsun.network.gates.luosi.biz.security.ServiceException;
/* */ import com.hundsun.network.gates.luosi.common.enums.EnumActiveStatus;
/* */ import com.hundsun.network.gates.luosi.common.enums.EnumBidCheckStatus;
/* */ import com.hundsun.network.gates.luosi.common.enums.EnumBidPriceStatus;
/* */ import com.hundsun.network.gates.luosi.common.enums.EnumOperatorType;
/* */ import com.hundsun.network.gates.luosi.common.remote.ServiceResult;
/* */ import com.hundsun.network.gates.luosi.wulin.reomte.enums.EnumAuctionErrors;
/* */ import com.hundsun.network.gates.luosi.wulin.reomte.request.AuctionMulitBidRequest;
/* */ import com.hundsun.network.gates.luosi.wulin.reomte.request.SystemMessageRequest;
/* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionBidderDAO;
/* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionFreeBidDAO;
/* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionHallDAO;
/* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionLogDAO;
/* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionBidder;
/* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionFreeBid;
/* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionLog;
/* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionMulitBidProject;
/* */ import com.hundsun.network.gates.wulin.biz.domain.query.AuctionMulitBidProjectQuery;
/* */ import com.hundsun.network.gates.wulin.biz.domain.query.MulitAuctionReviewQuery;
/* */ import com.hundsun.network.gates.wulin.biz.service.BaseService;
/* */ import com.hundsun.network.gates.wulin.biz.service.auction.MulitAuctionService;
/* */ import com.hundsun.network.gates.wulin.biz.service.message.SystemMessageService;
/* */ import com.hundsun.network.gates.wulin.biz.service.project.ProjectListingService;
/* */ import com.hundsun.network.melody.common.util.StringUtil;
/* */ import java.io.IOException;
/* */ import java.util.ArrayList;
/* */ import java.util.HashMap;
/* */ import java.util.List;
/* */ import java.util.Locale;
/* */ import org.apache.commons.logging.Log;
/* */ import org.codehaus.jackson.map.ObjectMapper;
/* */ import org.springframework.beans.factory.annotation.Autowired;
/* */ import org.springframework.context.MessageSource;
/* */ import org.springframework.stereotype.Service;
/* */ import org.springframework.transaction.TransactionStatus;
/* */ import org.springframework.transaction.support.TransactionCallback;
/* */ import org.springframework.transaction.support.TransactionTemplate;
/* */
/* */ @Service("mulitAuctionService")
/* */ public class MulitAuctionServiceImpl extends BaseService
/* */ implements MulitAuctionService
/* */ {
/* */
/* */ @Autowired
/* */ private ProjectListingService projectListingService;
/* */
/* */ @Autowired
/* */ private AuctionFreeBidDAO auctionFreeBidDAO;
/* */
/* */ @Autowired
/* */ private AuctionBidderDAO auctionBidderDAO;
/* */
/* */ @Autowired
/* */ private AuctionLogDAO auctionLogDAO;
/* */
/* */ @Autowired
/* */ private MessageSource messageSource;
/* */
/* */ @Autowired
/* */ private AuctionHallDAO auctionHallDAO;
/* */
/* */ @Autowired
/* */ private SystemMessageService systemMessageService;
/* */
/* */ public ServiceResult review(final AuctionMulitBidRequest request)
/* */ {
/* 70 */ ServiceResult serviceResult = new ServiceResult();
/* */
/* 72 */ if ((null == request) || (StringUtil.isEmpty(request.getBidderAccount())) || (StringUtil.isEmpty(request.getReviewer())) || (StringUtil.isEmpty(request.getProjectCode())) || (StringUtil.isEmpty(request.getRemark())))
/* */ {
/* 76 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.PARAMETER_ERROR.getValue()), EnumAuctionErrors.PARAMETER_ERROR.getInfo());
/* */
/* 78 */ return serviceResult;
/* */ }
/* 80 */ AuctionMulitBidProjectQuery query = new AuctionMulitBidProjectQuery();
/* 81 */ query.setReviewer(request.getReviewer());
/* 82 */ query.setProjectCode(request.getProjectCode());
/* 83 */ List projectList = this.projectListingService.queryAuctionMulitBidProjectUncheckedByProjectCode(query);
/* */
/* 86 */ if ((null == projectList) || (projectList.size() <= 0)) {
/* 87 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.CHECK_PROJECT_LISTING_NULL.getValue()), EnumAuctionErrors.CHECK_PROJECT_LISTING_NULL.getInfo());
/* */
/* 89 */ return serviceResult;
/* */ }
/* */
/* 92 */ AuctionFreeBid auctionFreeBid = queryTopUncheckFreeBid(request.getProjectCode(), request.getBidderAccount());
/* */
/* 94 */ if (null == auctionFreeBid) {
/* 95 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.PARAMETER_ERROR.getValue()), EnumAuctionErrors.PARAMETER_ERROR.getInfo());
/* */
/* 97 */ return serviceResult;
/* */ }
/* */
/* 100 */ AuctionBidder auctionBidder = this.auctionBidderDAO.selectNormalByBidderAccount(request.getProjectCode(), request.getBidderAccount());
/* */
/* 102 */ if (null == auctionBidder) {
/* 103 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.CHECK_BIDDER_NULL.getValue()), EnumAuctionErrors.CHECK_BIDDER_NULL.getInfo());
/* */
/* 105 */ return serviceResult;
/* */ }
/* 107 */ ObjectMapper mapper = new ObjectMapper();
/* 108 */ String auctionBidderJson = "";
/* */ try {
/* 110 */ auctionBidderJson = mapper.writeValueAsString(auctionBidder);
/* */ } catch (IOException e) {
/* 112 */ if (this.log.isErrorEnabled()) {
/* 113 */ this.log.error("convert auctionBidder to json format fail,", e);
/* */ }
/* */ }
/* 116 */ final String fAuctionBidderJson = auctionBidderJson;
/* 117 */ final AuctionFreeBid fAuctionFreeBid = auctionFreeBid;
/* 118 */ final String logRemark = getMessage("project.auction.mulitbid.review.log.remark", new String[] { request.getReviewer(), auctionBidder.getBidderAccount() });
/* */
/* 120 */ final AuctionBidder fAuctionBidder = auctionBidder;
/* 121 */ final AuctionMulitBidProject fAuctionMulitBidProject = (AuctionMulitBidProject)projectList.get(0);
/* */
/* 123 */ serviceResult = (ServiceResult)this.transactionTemplate.execute(new TransactionCallback() {
/* */ public ServiceResult doInTransaction(TransactionStatus status) {
/* 125 */ ServiceResult result = new ServiceResult();
/* 126 */ Object savePoint = status.createSavepoint();
/* */ try
/* */ {
/* 129 */ AuctionFreeBid auctionFreeBid = new AuctionFreeBid();
/* 130 */ auctionFreeBid.setBidderAccount(fAuctionFreeBid.getBidderAccount());
/* 131 */ auctionFreeBid.setBidderTrademark(fAuctionFreeBid.getBidderTrademark());
/* 132 */ auctionFreeBid.setBidOperatorAccount(fAuctionFreeBid.getBidOperatorAccount());
/* 133 */ auctionFreeBid.setCheckRemark(request.getRemark());
/* 134 */ auctionFreeBid.setCheckStatus(EnumBidCheckStatus.Fail.getValue());
/* 135 */ auctionFreeBid.setIp(fAuctionFreeBid.getIp());
/* 136 */ auctionFreeBid.setOperator(request.getOperator());
/* 137 */ auctionFreeBid.setPrice(fAuctionFreeBid.getPrice());
/* 138 */ auctionFreeBid.setProjectCode(request.getProjectCode());
/* 139 */ auctionFreeBid.setStatus(fAuctionFreeBid.getStatus());
/* 140 */ MulitAuctionServiceImpl.this.auctionFreeBidDAO.insert(auctionFreeBid);
/* */
/* 143 */ if (MulitAuctionServiceImpl.this.auctionBidderDAO.deleteByBidderAccount(request.getProjectCode(), request.getBidderAccount()) <= 0)
/* */ {
/* 145 */ throw new ServiceException(EnumAuctionErrors.REVIEW_DELETE_BIDDER_FAIL.getInfo(), Integer.valueOf(EnumAuctionErrors.REVIEW_DELETE_BIDDER_FAIL.getValue()));
/* */ }
/* */
/* 150 */ if (EnumActiveStatus.Yes.getValue().equals(fAuctionBidder.getIsPriority())) {
/* 151 */ HashMap actionHallMap = new HashMap();
/* 152 */ actionHallMap.put("priorityNumSub", Integer.valueOf(1));
/* 153 */ actionHallMap.put("whereProjectCode", request.getProjectCode());
/* 154 */ if (MulitAuctionServiceImpl.this.auctionHallDAO.updateByMap(actionHallMap) <= 0) {
/* 155 */ throw new ServiceException(EnumAuctionErrors.REVIEW_UPDATE_HALL_FALL.getInfo(), Integer.valueOf(EnumAuctionErrors.REVIEW_UPDATE_HALL_FALL.getValue()));
/* */ }
/* */
/* */ }
/* */
/* 172 */ SystemMessageRequest systemMessageRequest = new SystemMessageRequest();
/* 173 */ systemMessageRequest.setSendAccount(EnumOperatorType.SYSTEM.getValue());
/* 174 */ systemMessageRequest.setContent(MulitAuctionServiceImpl.this.getMessage("project.auction.mulitbid.review.message.content", new String[] { fAuctionMulitBidProject.getProjectTitle(), request.getRemark() }));
/* */
/* 177 */ systemMessageRequest.setTitle(MulitAuctionServiceImpl.this.getMessage("project.auction.mulitbid.review.message.title", new String[0]));
/* */
/* 179 */ List userAccountList = new ArrayList();
/* 180 */ userAccountList.add(fAuctionBidder.getBidderAccount());
/* 181 */ systemMessageRequest.setUserAccountList(userAccountList);
/* 182 */ MulitAuctionServiceImpl.this.systemMessageService.sendSystemMessage(systemMessageRequest);
/* */
/* 185 */ AuctionLog auctionLog = new AuctionLog();
/* 186 */ auctionLog.setDataJson(fAuctionBidderJson);
/* 187 */ auctionLog.setProjectCode(request.getProjectCode());
/* 188 */ auctionLog.setRemark(logRemark);
/* 189 */ auctionLog.setOperatorType(EnumOperatorType.REVIEWER.getValue());
/* 190 */ auctionLog.setOperator(request.getReviewer());
/* 191 */ MulitAuctionServiceImpl.this.auctionLogDAO.insert(auctionLog);
/* */ }
/* */ catch (ServiceException e) {
/* 194 */ status.rollbackToSavepoint(savePoint);
/* 195 */ MulitAuctionServiceImpl.this.log.error("MulitAuctionServiceImpl review fail", e);
/* 196 */ result.setErrorNO(e.getErrorNO());
/* 197 */ result.setErrorInfo(e.getErrorInfo());
/* */ } catch (Exception e) {
/* 199 */ status.rollbackToSavepoint(savePoint);
/* 200 */ MulitAuctionServiceImpl.this.log.error("MulitAuctionServiceImpl review error", e);
/* 201 */ result.setErrorNO(Integer.valueOf(EnumAuctionErrors.INTERNAL_ERROR.getValue()));
/* 202 */ result.setErrorInfo(EnumAuctionErrors.INTERNAL_ERROR.getInfo());
/* */ }
/* 204 */ return result;
/* */ }
/* */ });
/* 208 */ return serviceResult;
/* */ }
/* */
/* */ public AuctionFreeBid queryTopUncheckFreeBid(String projectCode, String bidderAccount)
/* */ {
/* 213 */ MulitAuctionReviewQuery query = new MulitAuctionReviewQuery();
/* 214 */ query.setBidderAccount(bidderAccount);
/* 215 */ query.setCheckStatus(EnumBidCheckStatus.Pass);
/* 216 */ query.setProjectCode(projectCode);
/* 217 */ query.setStatus(EnumBidPriceStatus.EFFECTIVE);
/* 218 */ return this.auctionFreeBidDAO.selectTopByMulitAuctionReviewQuery(query);
/* */ }
/* */
/* */ protected String getMessage(String code, String[] args) {
/* 222 */ return this.messageSource.getMessage(code, args, Locale.CHINA);
/* */ }
/* */ }
/* Location: E:\__安装归档\linquan-20161112\deploy16\wulin\webroot\WEB-INF\classes\
* Qualified Name: com.hundsun.network.gates.wulin.biz.service.pojo.auction.MulitAuctionServiceImpl
* JD-Core Version: 0.6.0
*/ | hnccfr/ccfrweb | basecore/src/com/hundsun/network/gates/wulin/biz/service/pojo/auction/MulitAuctionServiceImpl.java | Java | apache-2.0 | 12,488 |